1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id_task: Task<Option<()>>,
140 },
141 Remote {
142 sharing_has_stopped: bool,
143 remote_id: u64,
144 replica_id: ReplicaId,
145 _detect_unshare_task: Task<Option<()>>,
146 },
147}
148
149#[derive(Clone, Debug)]
150pub struct Collaborator {
151 pub user: Arc<User>,
152 pub peer_id: PeerId,
153 pub replica_id: ReplicaId,
154}
155
156#[derive(Clone, Debug, PartialEq, Eq)]
157pub enum Event {
158 ActiveEntryChanged(Option<ProjectEntryId>),
159 WorktreeAdded,
160 WorktreeRemoved(WorktreeId),
161 DiskBasedDiagnosticsStarted {
162 language_server_id: usize,
163 },
164 DiskBasedDiagnosticsFinished {
165 language_server_id: usize,
166 },
167 DiagnosticsUpdated {
168 path: ProjectPath,
169 language_server_id: usize,
170 },
171 RemoteIdChanged(Option<u64>),
172 CollaboratorLeft(PeerId),
173 ContactRequestedJoin(Arc<User>),
174 ContactCancelledJoinRequest(Arc<User>),
175}
176
177#[derive(Serialize)]
178pub struct LanguageServerStatus {
179 pub name: String,
180 pub pending_work: BTreeMap<String, LanguageServerProgress>,
181 pub pending_diagnostic_updates: isize,
182}
183
184#[derive(Clone, Debug, Serialize)]
185pub struct LanguageServerProgress {
186 pub message: Option<String>,
187 pub percentage: Option<usize>,
188 #[serde(skip_serializing)]
189 pub last_update_at: Instant,
190}
191
192#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
193pub struct ProjectPath {
194 pub worktree_id: WorktreeId,
195 pub path: Arc<Path>,
196}
197
198#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
199pub struct DiagnosticSummary {
200 pub language_server_id: usize,
201 pub error_count: usize,
202 pub warning_count: usize,
203}
204
205#[derive(Debug)]
206pub struct Location {
207 pub buffer: ModelHandle<Buffer>,
208 pub range: Range<language::Anchor>,
209}
210
211#[derive(Debug)]
212pub struct DocumentHighlight {
213 pub range: Range<language::Anchor>,
214 pub kind: DocumentHighlightKind,
215}
216
217#[derive(Clone, Debug)]
218pub struct Symbol {
219 pub source_worktree_id: WorktreeId,
220 pub worktree_id: WorktreeId,
221 pub language_server_name: LanguageServerName,
222 pub path: PathBuf,
223 pub label: CodeLabel,
224 pub name: String,
225 pub kind: lsp::SymbolKind,
226 pub range: Range<PointUtf16>,
227 pub signature: [u8; 32],
228}
229
230#[derive(Clone, Debug, PartialEq)]
231pub struct HoverBlock {
232 pub text: String,
233 pub language: Option<String>,
234}
235
236impl HoverBlock {
237 fn try_new(marked_string: MarkedString) -> Option<Self> {
238 let result = match marked_string {
239 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
240 text: value,
241 language: Some(language),
242 },
243 MarkedString::String(text) => HoverBlock {
244 text,
245 language: None,
246 },
247 };
248 if result.text.is_empty() {
249 None
250 } else {
251 Some(result)
252 }
253 }
254}
255
256#[derive(Debug)]
257pub struct Hover {
258 pub contents: Vec<HoverBlock>,
259 pub range: Option<Range<language::Anchor>>,
260}
261
262#[derive(Default)]
263pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
264
265impl DiagnosticSummary {
266 fn new<'a, T: 'a>(
267 language_server_id: usize,
268 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
269 ) -> Self {
270 let mut this = Self {
271 language_server_id,
272 error_count: 0,
273 warning_count: 0,
274 };
275
276 for entry in diagnostics {
277 if entry.diagnostic.is_primary {
278 match entry.diagnostic.severity {
279 DiagnosticSeverity::ERROR => this.error_count += 1,
280 DiagnosticSeverity::WARNING => this.warning_count += 1,
281 _ => {}
282 }
283 }
284 }
285
286 this
287 }
288
289 pub fn is_empty(&self) -> bool {
290 self.error_count == 0 && self.warning_count == 0
291 }
292
293 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
294 proto::DiagnosticSummary {
295 path: path.to_string_lossy().to_string(),
296 language_server_id: self.language_server_id as u64,
297 error_count: self.error_count as u32,
298 warning_count: self.warning_count as u32,
299 }
300 }
301}
302
303#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
304pub struct ProjectEntryId(usize);
305
306impl ProjectEntryId {
307 pub const MAX: Self = Self(usize::MAX);
308
309 pub fn new(counter: &AtomicUsize) -> Self {
310 Self(counter.fetch_add(1, SeqCst))
311 }
312
313 pub fn from_proto(id: u64) -> Self {
314 Self(id as usize)
315 }
316
317 pub fn to_proto(&self) -> u64 {
318 self.0 as u64
319 }
320
321 pub fn to_usize(&self) -> usize {
322 self.0
323 }
324}
325
326impl Project {
327 pub fn init(client: &Arc<Client>) {
328 client.add_model_message_handler(Self::handle_request_join_project);
329 client.add_model_message_handler(Self::handle_add_collaborator);
330 client.add_model_message_handler(Self::handle_buffer_reloaded);
331 client.add_model_message_handler(Self::handle_buffer_saved);
332 client.add_model_message_handler(Self::handle_start_language_server);
333 client.add_model_message_handler(Self::handle_update_language_server);
334 client.add_model_message_handler(Self::handle_remove_collaborator);
335 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
336 client.add_model_message_handler(Self::handle_update_project);
337 client.add_model_message_handler(Self::handle_unregister_project);
338 client.add_model_message_handler(Self::handle_project_unshared);
339 client.add_model_message_handler(Self::handle_update_buffer_file);
340 client.add_model_message_handler(Self::handle_update_buffer);
341 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
342 client.add_model_message_handler(Self::handle_update_worktree);
343 client.add_model_request_handler(Self::handle_create_project_entry);
344 client.add_model_request_handler(Self::handle_rename_project_entry);
345 client.add_model_request_handler(Self::handle_copy_project_entry);
346 client.add_model_request_handler(Self::handle_delete_project_entry);
347 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
348 client.add_model_request_handler(Self::handle_apply_code_action);
349 client.add_model_request_handler(Self::handle_reload_buffers);
350 client.add_model_request_handler(Self::handle_format_buffers);
351 client.add_model_request_handler(Self::handle_get_code_actions);
352 client.add_model_request_handler(Self::handle_get_completions);
353 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
354 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
355 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
356 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
357 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
358 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
359 client.add_model_request_handler(Self::handle_search_project);
360 client.add_model_request_handler(Self::handle_get_project_symbols);
361 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
362 client.add_model_request_handler(Self::handle_open_buffer_by_id);
363 client.add_model_request_handler(Self::handle_open_buffer_by_path);
364 client.add_model_request_handler(Self::handle_save_buffer);
365 }
366
367 pub fn local(
368 online: bool,
369 client: Arc<Client>,
370 user_store: ModelHandle<UserStore>,
371 project_store: ModelHandle<ProjectStore>,
372 languages: Arc<LanguageRegistry>,
373 fs: Arc<dyn Fs>,
374 cx: &mut MutableAppContext,
375 ) -> ModelHandle<Self> {
376 cx.add_model(|cx: &mut ModelContext<Self>| {
377 let (online_tx, online_rx) = watch::channel_with(online);
378 let (remote_id_tx, remote_id_rx) = watch::channel();
379 let _maintain_remote_id_task = cx.spawn_weak({
380 let status_rx = client.clone().status();
381 let online_rx = online_rx.clone();
382 move |this, mut cx| async move {
383 let mut stream = Stream::map(status_rx.clone(), drop)
384 .merge(Stream::map(online_rx.clone(), drop));
385 while stream.recv().await.is_some() {
386 let this = this.upgrade(&cx)?;
387 if status_rx.borrow().is_connected() && *online_rx.borrow() {
388 this.update(&mut cx, |this, cx| this.register(cx))
389 .await
390 .log_err()?;
391 } else {
392 this.update(&mut cx, |this, cx| this.unregister(cx))
393 .await
394 .log_err();
395 }
396 }
397 None
398 }
399 });
400
401 let handle = cx.weak_handle();
402 project_store.update(cx, |store, cx| store.add_project(handle, cx));
403
404 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
405 Self {
406 worktrees: Default::default(),
407 collaborators: Default::default(),
408 opened_buffers: Default::default(),
409 shared_buffers: Default::default(),
410 loading_buffers: Default::default(),
411 loading_local_worktrees: Default::default(),
412 buffer_snapshots: Default::default(),
413 client_state: ProjectClientState::Local {
414 is_shared: false,
415 remote_id_tx,
416 remote_id_rx,
417 online_tx,
418 online_rx,
419 _maintain_remote_id_task,
420 },
421 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
422 client_subscriptions: Vec::new(),
423 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
424 active_entry: None,
425 languages,
426 client,
427 user_store,
428 project_store,
429 fs,
430 next_entry_id: Default::default(),
431 next_diagnostic_group_id: Default::default(),
432 language_servers: Default::default(),
433 started_language_servers: Default::default(),
434 language_server_statuses: Default::default(),
435 last_workspace_edits_by_language_server: Default::default(),
436 language_server_settings: Default::default(),
437 next_language_server_id: 0,
438 nonce: StdRng::from_entropy().gen(),
439 initialized_persistent_state: false,
440 }
441 })
442 }
443
444 pub async fn remote(
445 remote_id: u64,
446 client: Arc<Client>,
447 user_store: ModelHandle<UserStore>,
448 project_store: ModelHandle<ProjectStore>,
449 languages: Arc<LanguageRegistry>,
450 fs: Arc<dyn Fs>,
451 mut cx: AsyncAppContext,
452 ) -> Result<ModelHandle<Self>, JoinProjectError> {
453 client.authenticate_and_connect(true, &cx).await?;
454
455 let response = client
456 .request(proto::JoinProject {
457 project_id: remote_id,
458 })
459 .await?;
460
461 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
462 proto::join_project_response::Variant::Accept(response) => response,
463 proto::join_project_response::Variant::Decline(decline) => {
464 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
465 Some(proto::join_project_response::decline::Reason::Declined) => {
466 Err(JoinProjectError::HostDeclined)?
467 }
468 Some(proto::join_project_response::decline::Reason::Closed) => {
469 Err(JoinProjectError::HostClosedProject)?
470 }
471 Some(proto::join_project_response::decline::Reason::WentOffline) => {
472 Err(JoinProjectError::HostWentOffline)?
473 }
474 None => Err(anyhow!("missing decline reason"))?,
475 }
476 }
477 };
478
479 let replica_id = response.replica_id as ReplicaId;
480
481 let mut worktrees = Vec::new();
482 for worktree in response.worktrees {
483 let (worktree, load_task) = cx
484 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
485 worktrees.push(worktree);
486 load_task.detach();
487 }
488
489 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
490 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
491 let handle = cx.weak_handle();
492 project_store.update(cx, |store, cx| store.add_project(handle, cx));
493
494 let mut this = Self {
495 worktrees: Vec::new(),
496 loading_buffers: Default::default(),
497 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
498 shared_buffers: Default::default(),
499 loading_local_worktrees: Default::default(),
500 active_entry: None,
501 collaborators: Default::default(),
502 languages,
503 user_store: user_store.clone(),
504 project_store,
505 fs,
506 next_entry_id: Default::default(),
507 next_diagnostic_group_id: Default::default(),
508 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
509 _subscriptions: Default::default(),
510 client: client.clone(),
511 client_state: ProjectClientState::Remote {
512 sharing_has_stopped: false,
513 remote_id,
514 replica_id,
515 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
516 async move {
517 let mut status = client.status();
518 let is_connected =
519 status.next().await.map_or(false, |s| s.is_connected());
520 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
521 if !is_connected || status.next().await.is_some() {
522 if let Some(this) = this.upgrade(&cx) {
523 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
524 }
525 }
526 Ok(())
527 }
528 .log_err()
529 }),
530 },
531 language_servers: Default::default(),
532 started_language_servers: Default::default(),
533 language_server_settings: Default::default(),
534 language_server_statuses: response
535 .language_servers
536 .into_iter()
537 .map(|server| {
538 (
539 server.id as usize,
540 LanguageServerStatus {
541 name: server.name,
542 pending_work: Default::default(),
543 pending_diagnostic_updates: 0,
544 },
545 )
546 })
547 .collect(),
548 last_workspace_edits_by_language_server: Default::default(),
549 next_language_server_id: 0,
550 opened_buffers: Default::default(),
551 buffer_snapshots: Default::default(),
552 nonce: StdRng::from_entropy().gen(),
553 initialized_persistent_state: false,
554 };
555 for worktree in worktrees {
556 this.add_worktree(&worktree, cx);
557 }
558 this
559 });
560
561 let user_ids = response
562 .collaborators
563 .iter()
564 .map(|peer| peer.user_id)
565 .collect();
566 user_store
567 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
568 .await?;
569 let mut collaborators = HashMap::default();
570 for message in response.collaborators {
571 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
572 collaborators.insert(collaborator.peer_id, collaborator);
573 }
574
575 this.update(&mut cx, |this, _| {
576 this.collaborators = collaborators;
577 });
578
579 Ok(this)
580 }
581
582 #[cfg(any(test, feature = "test-support"))]
583 pub async fn test(
584 fs: Arc<dyn Fs>,
585 root_paths: impl IntoIterator<Item = &Path>,
586 cx: &mut gpui::TestAppContext,
587 ) -> ModelHandle<Project> {
588 if !cx.read(|cx| cx.has_global::<Settings>()) {
589 cx.update(|cx| cx.set_global(Settings::test(cx)));
590 }
591
592 let languages = Arc::new(LanguageRegistry::test());
593 let http_client = client::test::FakeHttpClient::with_404_response();
594 let client = client::Client::new(http_client.clone());
595 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
596 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
597 let project = cx.update(|cx| {
598 Project::local(true, client, user_store, project_store, languages, fs, cx)
599 });
600 for path in root_paths {
601 let (tree, _) = project
602 .update(cx, |project, cx| {
603 project.find_or_create_local_worktree(path, true, cx)
604 })
605 .await
606 .unwrap();
607 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
608 .await;
609 }
610 project
611 }
612
613 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
614 if self.is_remote() {
615 return Task::ready(Ok(()));
616 }
617
618 let db = self.project_store.read(cx).db.clone();
619 let keys = self.db_keys_for_online_state(cx);
620 let online_by_default = cx.global::<Settings>().projects_online_by_default;
621 let read_online = cx.background().spawn(async move {
622 let values = db.read(keys)?;
623 anyhow::Ok(
624 values
625 .into_iter()
626 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
627 )
628 });
629 cx.spawn(|this, mut cx| async move {
630 let online = read_online.await.log_err().unwrap_or(false);
631 this.update(&mut cx, |this, cx| {
632 this.initialized_persistent_state = true;
633 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
634 let mut online_tx = online_tx.borrow_mut();
635 if *online_tx != online {
636 *online_tx = online;
637 drop(online_tx);
638 this.metadata_changed(false, cx);
639 }
640 }
641 });
642 Ok(())
643 })
644 }
645
646 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
647 if self.is_remote() || !self.initialized_persistent_state {
648 return Task::ready(Ok(()));
649 }
650
651 let db = self.project_store.read(cx).db.clone();
652 let keys = self.db_keys_for_online_state(cx);
653 let is_online = self.is_online();
654 cx.background().spawn(async move {
655 let value = &[is_online as u8];
656 db.write(keys.into_iter().map(|key| (key, value)))
657 })
658 }
659
660 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
661 let settings = cx.global::<Settings>();
662
663 let mut language_servers_to_start = Vec::new();
664 for buffer in self.opened_buffers.values() {
665 if let Some(buffer) = buffer.upgrade(cx) {
666 let buffer = buffer.read(cx);
667 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
668 {
669 if settings.enable_language_server(Some(&language.name())) {
670 let worktree = file.worktree.read(cx);
671 language_servers_to_start.push((
672 worktree.id(),
673 worktree.as_local().unwrap().abs_path().clone(),
674 language.clone(),
675 ));
676 }
677 }
678 }
679 }
680
681 let mut language_servers_to_stop = Vec::new();
682 for language in self.languages.to_vec() {
683 if let Some(lsp_adapter) = language.lsp_adapter() {
684 if !settings.enable_language_server(Some(&language.name())) {
685 let lsp_name = lsp_adapter.name();
686 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
687 if lsp_name == *started_lsp_name {
688 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
689 }
690 }
691 }
692 }
693 }
694
695 // Stop all newly-disabled language servers.
696 for (worktree_id, adapter_name) in language_servers_to_stop {
697 self.stop_language_server(worktree_id, adapter_name, cx)
698 .detach();
699 }
700
701 // Start all the newly-enabled language servers.
702 for (worktree_id, worktree_path, language) in language_servers_to_start {
703 self.start_language_server(worktree_id, worktree_path, language, cx);
704 }
705
706 cx.notify();
707 }
708
709 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
710 self.opened_buffers
711 .get(&remote_id)
712 .and_then(|buffer| buffer.upgrade(cx))
713 }
714
715 pub fn languages(&self) -> &Arc<LanguageRegistry> {
716 &self.languages
717 }
718
719 pub fn client(&self) -> Arc<Client> {
720 self.client.clone()
721 }
722
723 pub fn user_store(&self) -> ModelHandle<UserStore> {
724 self.user_store.clone()
725 }
726
727 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
728 self.project_store.clone()
729 }
730
731 #[cfg(any(test, feature = "test-support"))]
732 pub fn check_invariants(&self, cx: &AppContext) {
733 if self.is_local() {
734 let mut worktree_root_paths = HashMap::default();
735 for worktree in self.worktrees(cx) {
736 let worktree = worktree.read(cx);
737 let abs_path = worktree.as_local().unwrap().abs_path().clone();
738 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
739 assert_eq!(
740 prev_worktree_id,
741 None,
742 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
743 abs_path,
744 worktree.id(),
745 prev_worktree_id
746 )
747 }
748 } else {
749 let replica_id = self.replica_id();
750 for buffer in self.opened_buffers.values() {
751 if let Some(buffer) = buffer.upgrade(cx) {
752 let buffer = buffer.read(cx);
753 assert_eq!(
754 buffer.deferred_ops_len(),
755 0,
756 "replica {}, buffer {} has deferred operations",
757 replica_id,
758 buffer.remote_id()
759 );
760 }
761 }
762 }
763 }
764
765 #[cfg(any(test, feature = "test-support"))]
766 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
767 let path = path.into();
768 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
769 self.opened_buffers.iter().any(|(_, buffer)| {
770 if let Some(buffer) = buffer.upgrade(cx) {
771 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
772 if file.worktree == worktree && file.path() == &path.path {
773 return true;
774 }
775 }
776 }
777 false
778 })
779 } else {
780 false
781 }
782 }
783
784 pub fn fs(&self) -> &Arc<dyn Fs> {
785 &self.fs
786 }
787
788 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
789 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
790 let mut online_tx = online_tx.borrow_mut();
791 if *online_tx != online {
792 *online_tx = online;
793 drop(online_tx);
794 self.metadata_changed(true, cx);
795 }
796 }
797 }
798
799 pub fn is_online(&self) -> bool {
800 match &self.client_state {
801 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
802 ProjectClientState::Remote { .. } => true,
803 }
804 }
805
806 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
807 self.unshared(cx);
808 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
809 if let Some(remote_id) = *remote_id_rx.borrow() {
810 let request = self.client.request(proto::UnregisterProject {
811 project_id: remote_id,
812 });
813 return cx.spawn(|this, mut cx| async move {
814 let response = request.await;
815
816 // Unregistering the project causes the server to send out a
817 // contact update removing this project from the host's list
818 // of online projects. Wait until this contact update has been
819 // processed before clearing out this project's remote id, so
820 // that there is no moment where this project appears in the
821 // contact metadata and *also* has no remote id.
822 this.update(&mut cx, |this, cx| {
823 this.user_store()
824 .update(cx, |store, _| store.contact_updates_done())
825 })
826 .await;
827
828 this.update(&mut cx, |this, cx| {
829 if let ProjectClientState::Local { remote_id_tx, .. } =
830 &mut this.client_state
831 {
832 *remote_id_tx.borrow_mut() = None;
833 }
834 this.client_subscriptions.clear();
835 this.metadata_changed(false, cx);
836 });
837 response.map(drop)
838 });
839 }
840 }
841 Task::ready(Ok(()))
842 }
843
844 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
845 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
846 if remote_id_rx.borrow().is_some() {
847 return Task::ready(Ok(()));
848 }
849 }
850
851 let response = self.client.request(proto::RegisterProject {});
852 cx.spawn(|this, mut cx| async move {
853 let remote_id = response.await?.project_id;
854 this.update(&mut cx, |this, cx| {
855 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
856 *remote_id_tx.borrow_mut() = Some(remote_id);
857 }
858
859 this.metadata_changed(false, cx);
860 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
861 this.client_subscriptions
862 .push(this.client.add_model_for_remote_entity(remote_id, cx));
863 Ok(())
864 })
865 })
866 }
867
868 pub fn remote_id(&self) -> Option<u64> {
869 match &self.client_state {
870 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
871 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
872 }
873 }
874
875 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
876 let mut id = None;
877 let mut watch = None;
878 match &self.client_state {
879 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
880 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
881 }
882
883 async move {
884 if let Some(id) = id {
885 return id;
886 }
887 let mut watch = watch.unwrap();
888 loop {
889 let id = *watch.borrow();
890 if let Some(id) = id {
891 return id;
892 }
893 watch.next().await;
894 }
895 }
896 }
897
898 pub fn shared_remote_id(&self) -> Option<u64> {
899 match &self.client_state {
900 ProjectClientState::Local {
901 remote_id_rx,
902 is_shared,
903 ..
904 } => {
905 if *is_shared {
906 *remote_id_rx.borrow()
907 } else {
908 None
909 }
910 }
911 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
912 }
913 }
914
915 pub fn replica_id(&self) -> ReplicaId {
916 match &self.client_state {
917 ProjectClientState::Local { .. } => 0,
918 ProjectClientState::Remote { replica_id, .. } => *replica_id,
919 }
920 }
921
922 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
923 if let ProjectClientState::Local {
924 remote_id_rx,
925 online_rx,
926 ..
927 } = &self.client_state
928 {
929 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
930 self.client
931 .send(proto::UpdateProject {
932 project_id,
933 worktrees: self
934 .worktrees
935 .iter()
936 .filter_map(|worktree| {
937 worktree.upgrade(&cx).map(|worktree| {
938 worktree.read(cx).as_local().unwrap().metadata_proto()
939 })
940 })
941 .collect(),
942 })
943 .log_err();
944 }
945
946 self.project_store.update(cx, |_, cx| cx.notify());
947 if persist {
948 self.persist_state(cx).detach_and_log_err(cx);
949 }
950 cx.notify();
951 }
952 }
953
954 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
955 &self.collaborators
956 }
957
958 pub fn worktrees<'a>(
959 &'a self,
960 cx: &'a AppContext,
961 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
962 self.worktrees
963 .iter()
964 .filter_map(move |worktree| worktree.upgrade(cx))
965 }
966
967 pub fn visible_worktrees<'a>(
968 &'a self,
969 cx: &'a AppContext,
970 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
971 self.worktrees.iter().filter_map(|worktree| {
972 worktree.upgrade(cx).and_then(|worktree| {
973 if worktree.read(cx).is_visible() {
974 Some(worktree)
975 } else {
976 None
977 }
978 })
979 })
980 }
981
982 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
983 self.visible_worktrees(cx)
984 .map(|tree| tree.read(cx).root_name())
985 }
986
987 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
988 self.worktrees
989 .iter()
990 .filter_map(|worktree| {
991 let worktree = worktree.upgrade(&cx)?.read(cx);
992 if worktree.is_visible() {
993 Some(format!(
994 "project-path-online:{}",
995 worktree.as_local().unwrap().abs_path().to_string_lossy()
996 ))
997 } else {
998 None
999 }
1000 })
1001 .collect::<Vec<_>>()
1002 }
1003
1004 pub fn worktree_for_id(
1005 &self,
1006 id: WorktreeId,
1007 cx: &AppContext,
1008 ) -> Option<ModelHandle<Worktree>> {
1009 self.worktrees(cx)
1010 .find(|worktree| worktree.read(cx).id() == id)
1011 }
1012
1013 pub fn worktree_for_entry(
1014 &self,
1015 entry_id: ProjectEntryId,
1016 cx: &AppContext,
1017 ) -> Option<ModelHandle<Worktree>> {
1018 self.worktrees(cx)
1019 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1020 }
1021
1022 pub fn worktree_id_for_entry(
1023 &self,
1024 entry_id: ProjectEntryId,
1025 cx: &AppContext,
1026 ) -> Option<WorktreeId> {
1027 self.worktree_for_entry(entry_id, cx)
1028 .map(|worktree| worktree.read(cx).id())
1029 }
1030
1031 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1032 paths.iter().all(|path| self.contains_path(&path, cx))
1033 }
1034
1035 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1036 for worktree in self.worktrees(cx) {
1037 let worktree = worktree.read(cx).as_local();
1038 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1039 return true;
1040 }
1041 }
1042 false
1043 }
1044
1045 pub fn create_entry(
1046 &mut self,
1047 project_path: impl Into<ProjectPath>,
1048 is_directory: bool,
1049 cx: &mut ModelContext<Self>,
1050 ) -> Option<Task<Result<Entry>>> {
1051 let project_path = project_path.into();
1052 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1053 if self.is_local() {
1054 Some(worktree.update(cx, |worktree, cx| {
1055 worktree
1056 .as_local_mut()
1057 .unwrap()
1058 .create_entry(project_path.path, is_directory, cx)
1059 }))
1060 } else {
1061 let client = self.client.clone();
1062 let project_id = self.remote_id().unwrap();
1063 Some(cx.spawn_weak(|_, mut cx| async move {
1064 let response = client
1065 .request(proto::CreateProjectEntry {
1066 worktree_id: project_path.worktree_id.to_proto(),
1067 project_id,
1068 path: project_path.path.as_os_str().as_bytes().to_vec(),
1069 is_directory,
1070 })
1071 .await?;
1072 let entry = response
1073 .entry
1074 .ok_or_else(|| anyhow!("missing entry in response"))?;
1075 worktree
1076 .update(&mut cx, |worktree, cx| {
1077 worktree.as_remote().unwrap().insert_entry(
1078 entry,
1079 response.worktree_scan_id as usize,
1080 cx,
1081 )
1082 })
1083 .await
1084 }))
1085 }
1086 }
1087
1088 pub fn copy_entry(
1089 &mut self,
1090 entry_id: ProjectEntryId,
1091 new_path: impl Into<Arc<Path>>,
1092 cx: &mut ModelContext<Self>,
1093 ) -> Option<Task<Result<Entry>>> {
1094 let worktree = self.worktree_for_entry(entry_id, cx)?;
1095 let new_path = new_path.into();
1096 if self.is_local() {
1097 worktree.update(cx, |worktree, cx| {
1098 worktree
1099 .as_local_mut()
1100 .unwrap()
1101 .copy_entry(entry_id, new_path, cx)
1102 })
1103 } else {
1104 let client = self.client.clone();
1105 let project_id = self.remote_id().unwrap();
1106
1107 Some(cx.spawn_weak(|_, mut cx| async move {
1108 let response = client
1109 .request(proto::CopyProjectEntry {
1110 project_id,
1111 entry_id: entry_id.to_proto(),
1112 new_path: new_path.as_os_str().as_bytes().to_vec(),
1113 })
1114 .await?;
1115 let entry = response
1116 .entry
1117 .ok_or_else(|| anyhow!("missing entry in response"))?;
1118 worktree
1119 .update(&mut cx, |worktree, cx| {
1120 worktree.as_remote().unwrap().insert_entry(
1121 entry,
1122 response.worktree_scan_id as usize,
1123 cx,
1124 )
1125 })
1126 .await
1127 }))
1128 }
1129 }
1130
1131 pub fn rename_entry(
1132 &mut self,
1133 entry_id: ProjectEntryId,
1134 new_path: impl Into<Arc<Path>>,
1135 cx: &mut ModelContext<Self>,
1136 ) -> Option<Task<Result<Entry>>> {
1137 let worktree = self.worktree_for_entry(entry_id, cx)?;
1138 let new_path = new_path.into();
1139 if self.is_local() {
1140 worktree.update(cx, |worktree, cx| {
1141 worktree
1142 .as_local_mut()
1143 .unwrap()
1144 .rename_entry(entry_id, new_path, cx)
1145 })
1146 } else {
1147 let client = self.client.clone();
1148 let project_id = self.remote_id().unwrap();
1149
1150 Some(cx.spawn_weak(|_, mut cx| async move {
1151 let response = client
1152 .request(proto::RenameProjectEntry {
1153 project_id,
1154 entry_id: entry_id.to_proto(),
1155 new_path: new_path.as_os_str().as_bytes().to_vec(),
1156 })
1157 .await?;
1158 let entry = response
1159 .entry
1160 .ok_or_else(|| anyhow!("missing entry in response"))?;
1161 worktree
1162 .update(&mut cx, |worktree, cx| {
1163 worktree.as_remote().unwrap().insert_entry(
1164 entry,
1165 response.worktree_scan_id as usize,
1166 cx,
1167 )
1168 })
1169 .await
1170 }))
1171 }
1172 }
1173
1174 pub fn delete_entry(
1175 &mut self,
1176 entry_id: ProjectEntryId,
1177 cx: &mut ModelContext<Self>,
1178 ) -> Option<Task<Result<()>>> {
1179 let worktree = self.worktree_for_entry(entry_id, cx)?;
1180 if self.is_local() {
1181 worktree.update(cx, |worktree, cx| {
1182 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1183 })
1184 } else {
1185 let client = self.client.clone();
1186 let project_id = self.remote_id().unwrap();
1187 Some(cx.spawn_weak(|_, mut cx| async move {
1188 let response = client
1189 .request(proto::DeleteProjectEntry {
1190 project_id,
1191 entry_id: entry_id.to_proto(),
1192 })
1193 .await?;
1194 worktree
1195 .update(&mut cx, move |worktree, cx| {
1196 worktree.as_remote().unwrap().delete_entry(
1197 entry_id,
1198 response.worktree_scan_id as usize,
1199 cx,
1200 )
1201 })
1202 .await
1203 }))
1204 }
1205 }
1206
1207 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1208 let project_id;
1209 if let ProjectClientState::Local {
1210 remote_id_rx,
1211 is_shared,
1212 ..
1213 } = &mut self.client_state
1214 {
1215 if *is_shared {
1216 return Task::ready(Ok(()));
1217 }
1218 *is_shared = true;
1219 if let Some(id) = *remote_id_rx.borrow() {
1220 project_id = id;
1221 } else {
1222 return Task::ready(Err(anyhow!("project hasn't been registered")));
1223 }
1224 } else {
1225 return Task::ready(Err(anyhow!("can't share a remote project")));
1226 };
1227
1228 for open_buffer in self.opened_buffers.values_mut() {
1229 match open_buffer {
1230 OpenBuffer::Strong(_) => {}
1231 OpenBuffer::Weak(buffer) => {
1232 if let Some(buffer) = buffer.upgrade(cx) {
1233 *open_buffer = OpenBuffer::Strong(buffer);
1234 }
1235 }
1236 OpenBuffer::Loading(_) => unreachable!(),
1237 }
1238 }
1239
1240 for worktree_handle in self.worktrees.iter_mut() {
1241 match worktree_handle {
1242 WorktreeHandle::Strong(_) => {}
1243 WorktreeHandle::Weak(worktree) => {
1244 if let Some(worktree) = worktree.upgrade(cx) {
1245 *worktree_handle = WorktreeHandle::Strong(worktree);
1246 }
1247 }
1248 }
1249 }
1250
1251 let mut tasks = Vec::new();
1252 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1253 worktree.update(cx, |worktree, cx| {
1254 let worktree = worktree.as_local_mut().unwrap();
1255 tasks.push(worktree.share(project_id, cx));
1256 });
1257 }
1258
1259 for (server_id, status) in &self.language_server_statuses {
1260 self.client
1261 .send(proto::StartLanguageServer {
1262 project_id,
1263 server: Some(proto::LanguageServer {
1264 id: *server_id as u64,
1265 name: status.name.clone(),
1266 }),
1267 })
1268 .log_err();
1269 }
1270
1271 cx.spawn(|this, mut cx| async move {
1272 for task in tasks {
1273 task.await?;
1274 }
1275 this.update(&mut cx, |_, cx| cx.notify());
1276 Ok(())
1277 })
1278 }
1279
1280 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1281 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1282 if !*is_shared {
1283 return;
1284 }
1285
1286 *is_shared = false;
1287 self.collaborators.clear();
1288 self.shared_buffers.clear();
1289 for worktree_handle in self.worktrees.iter_mut() {
1290 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1291 let is_visible = worktree.update(cx, |worktree, _| {
1292 worktree.as_local_mut().unwrap().unshare();
1293 worktree.is_visible()
1294 });
1295 if !is_visible {
1296 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1297 }
1298 }
1299 }
1300
1301 for open_buffer in self.opened_buffers.values_mut() {
1302 match open_buffer {
1303 OpenBuffer::Strong(buffer) => {
1304 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1305 }
1306 _ => {}
1307 }
1308 }
1309
1310 cx.notify();
1311 } else {
1312 log::error!("attempted to unshare a remote project");
1313 }
1314 }
1315
1316 pub fn respond_to_join_request(
1317 &mut self,
1318 requester_id: u64,
1319 allow: bool,
1320 cx: &mut ModelContext<Self>,
1321 ) {
1322 if let Some(project_id) = self.remote_id() {
1323 let share = self.share(cx);
1324 let client = self.client.clone();
1325 cx.foreground()
1326 .spawn(async move {
1327 share.await?;
1328 client.send(proto::RespondToJoinProjectRequest {
1329 requester_id,
1330 project_id,
1331 allow,
1332 })
1333 })
1334 .detach_and_log_err(cx);
1335 }
1336 }
1337
1338 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1339 if let ProjectClientState::Remote {
1340 sharing_has_stopped,
1341 ..
1342 } = &mut self.client_state
1343 {
1344 *sharing_has_stopped = true;
1345 self.collaborators.clear();
1346 cx.notify();
1347 }
1348 }
1349
1350 pub fn is_read_only(&self) -> bool {
1351 match &self.client_state {
1352 ProjectClientState::Local { .. } => false,
1353 ProjectClientState::Remote {
1354 sharing_has_stopped,
1355 ..
1356 } => *sharing_has_stopped,
1357 }
1358 }
1359
1360 pub fn is_local(&self) -> bool {
1361 match &self.client_state {
1362 ProjectClientState::Local { .. } => true,
1363 ProjectClientState::Remote { .. } => false,
1364 }
1365 }
1366
1367 pub fn is_remote(&self) -> bool {
1368 !self.is_local()
1369 }
1370
1371 pub fn create_buffer(
1372 &mut self,
1373 text: &str,
1374 language: Option<Arc<Language>>,
1375 cx: &mut ModelContext<Self>,
1376 ) -> Result<ModelHandle<Buffer>> {
1377 if self.is_remote() {
1378 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1379 }
1380
1381 let buffer = cx.add_model(|cx| {
1382 Buffer::new(self.replica_id(), text, cx)
1383 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1384 });
1385 self.register_buffer(&buffer, cx)?;
1386 Ok(buffer)
1387 }
1388
1389 pub fn open_path(
1390 &mut self,
1391 path: impl Into<ProjectPath>,
1392 cx: &mut ModelContext<Self>,
1393 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1394 let task = self.open_buffer(path, cx);
1395 cx.spawn_weak(|_, cx| async move {
1396 let buffer = task.await?;
1397 let project_entry_id = buffer
1398 .read_with(&cx, |buffer, cx| {
1399 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1400 })
1401 .ok_or_else(|| anyhow!("no project entry"))?;
1402 Ok((project_entry_id, buffer.into()))
1403 })
1404 }
1405
1406 pub fn open_local_buffer(
1407 &mut self,
1408 abs_path: impl AsRef<Path>,
1409 cx: &mut ModelContext<Self>,
1410 ) -> Task<Result<ModelHandle<Buffer>>> {
1411 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1412 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1413 } else {
1414 Task::ready(Err(anyhow!("no such path")))
1415 }
1416 }
1417
1418 pub fn open_buffer(
1419 &mut self,
1420 path: impl Into<ProjectPath>,
1421 cx: &mut ModelContext<Self>,
1422 ) -> Task<Result<ModelHandle<Buffer>>> {
1423 let project_path = path.into();
1424 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1425 worktree
1426 } else {
1427 return Task::ready(Err(anyhow!("no such worktree")));
1428 };
1429
1430 // If there is already a buffer for the given path, then return it.
1431 let existing_buffer = self.get_open_buffer(&project_path, cx);
1432 if let Some(existing_buffer) = existing_buffer {
1433 return Task::ready(Ok(existing_buffer));
1434 }
1435
1436 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1437 // If the given path is already being loaded, then wait for that existing
1438 // task to complete and return the same buffer.
1439 hash_map::Entry::Occupied(e) => e.get().clone(),
1440
1441 // Otherwise, record the fact that this path is now being loaded.
1442 hash_map::Entry::Vacant(entry) => {
1443 let (mut tx, rx) = postage::watch::channel();
1444 entry.insert(rx.clone());
1445
1446 let load_buffer = if worktree.read(cx).is_local() {
1447 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1448 } else {
1449 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1450 };
1451
1452 cx.spawn(move |this, mut cx| async move {
1453 let load_result = load_buffer.await;
1454 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1455 // Record the fact that the buffer is no longer loading.
1456 this.loading_buffers.remove(&project_path);
1457 let buffer = load_result.map_err(Arc::new)?;
1458 Ok(buffer)
1459 }));
1460 })
1461 .detach();
1462 rx
1463 }
1464 };
1465
1466 cx.foreground().spawn(async move {
1467 loop {
1468 if let Some(result) = loading_watch.borrow().as_ref() {
1469 match result {
1470 Ok(buffer) => return Ok(buffer.clone()),
1471 Err(error) => return Err(anyhow!("{}", error)),
1472 }
1473 }
1474 loading_watch.next().await;
1475 }
1476 })
1477 }
1478
1479 fn open_local_buffer_internal(
1480 &mut self,
1481 path: &Arc<Path>,
1482 worktree: &ModelHandle<Worktree>,
1483 cx: &mut ModelContext<Self>,
1484 ) -> Task<Result<ModelHandle<Buffer>>> {
1485 let load_buffer = worktree.update(cx, |worktree, cx| {
1486 let worktree = worktree.as_local_mut().unwrap();
1487 worktree.load_buffer(path, cx)
1488 });
1489 cx.spawn(|this, mut cx| async move {
1490 let buffer = load_buffer.await?;
1491 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1492 Ok(buffer)
1493 })
1494 }
1495
1496 fn open_remote_buffer_internal(
1497 &mut self,
1498 path: &Arc<Path>,
1499 worktree: &ModelHandle<Worktree>,
1500 cx: &mut ModelContext<Self>,
1501 ) -> Task<Result<ModelHandle<Buffer>>> {
1502 let rpc = self.client.clone();
1503 let project_id = self.remote_id().unwrap();
1504 let remote_worktree_id = worktree.read(cx).id();
1505 let path = path.clone();
1506 let path_string = path.to_string_lossy().to_string();
1507 cx.spawn(|this, mut cx| async move {
1508 let response = rpc
1509 .request(proto::OpenBufferByPath {
1510 project_id,
1511 worktree_id: remote_worktree_id.to_proto(),
1512 path: path_string,
1513 })
1514 .await?;
1515 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1516 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1517 .await
1518 })
1519 }
1520
1521 fn open_local_buffer_via_lsp(
1522 &mut self,
1523 abs_path: lsp::Url,
1524 lsp_adapter: Arc<dyn LspAdapter>,
1525 lsp_server: Arc<LanguageServer>,
1526 cx: &mut ModelContext<Self>,
1527 ) -> Task<Result<ModelHandle<Buffer>>> {
1528 cx.spawn(|this, mut cx| async move {
1529 let abs_path = abs_path
1530 .to_file_path()
1531 .map_err(|_| anyhow!("can't convert URI to path"))?;
1532 let (worktree, relative_path) = if let Some(result) =
1533 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1534 {
1535 result
1536 } else {
1537 let worktree = this
1538 .update(&mut cx, |this, cx| {
1539 this.create_local_worktree(&abs_path, false, cx)
1540 })
1541 .await?;
1542 this.update(&mut cx, |this, cx| {
1543 this.language_servers.insert(
1544 (worktree.read(cx).id(), lsp_adapter.name()),
1545 (lsp_adapter, lsp_server),
1546 );
1547 });
1548 (worktree, PathBuf::new())
1549 };
1550
1551 let project_path = ProjectPath {
1552 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1553 path: relative_path.into(),
1554 };
1555 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1556 .await
1557 })
1558 }
1559
1560 pub fn open_buffer_by_id(
1561 &mut self,
1562 id: u64,
1563 cx: &mut ModelContext<Self>,
1564 ) -> Task<Result<ModelHandle<Buffer>>> {
1565 if let Some(buffer) = self.buffer_for_id(id, cx) {
1566 Task::ready(Ok(buffer))
1567 } else if self.is_local() {
1568 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1569 } else if let Some(project_id) = self.remote_id() {
1570 let request = self
1571 .client
1572 .request(proto::OpenBufferById { project_id, id });
1573 cx.spawn(|this, mut cx| async move {
1574 let buffer = request
1575 .await?
1576 .buffer
1577 .ok_or_else(|| anyhow!("invalid buffer"))?;
1578 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1579 .await
1580 })
1581 } else {
1582 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1583 }
1584 }
1585
1586 pub fn save_buffer_as(
1587 &mut self,
1588 buffer: ModelHandle<Buffer>,
1589 abs_path: PathBuf,
1590 cx: &mut ModelContext<Project>,
1591 ) -> Task<Result<()>> {
1592 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1593 let old_path =
1594 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1595 cx.spawn(|this, mut cx| async move {
1596 if let Some(old_path) = old_path {
1597 this.update(&mut cx, |this, cx| {
1598 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1599 });
1600 }
1601 let (worktree, path) = worktree_task.await?;
1602 worktree
1603 .update(&mut cx, |worktree, cx| {
1604 worktree
1605 .as_local_mut()
1606 .unwrap()
1607 .save_buffer_as(buffer.clone(), path, cx)
1608 })
1609 .await?;
1610 this.update(&mut cx, |this, cx| {
1611 this.assign_language_to_buffer(&buffer, cx);
1612 this.register_buffer_with_language_server(&buffer, cx);
1613 });
1614 Ok(())
1615 })
1616 }
1617
1618 pub fn get_open_buffer(
1619 &mut self,
1620 path: &ProjectPath,
1621 cx: &mut ModelContext<Self>,
1622 ) -> Option<ModelHandle<Buffer>> {
1623 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1624 self.opened_buffers.values().find_map(|buffer| {
1625 let buffer = buffer.upgrade(cx)?;
1626 let file = File::from_dyn(buffer.read(cx).file())?;
1627 if file.worktree == worktree && file.path() == &path.path {
1628 Some(buffer)
1629 } else {
1630 None
1631 }
1632 })
1633 }
1634
1635 fn register_buffer(
1636 &mut self,
1637 buffer: &ModelHandle<Buffer>,
1638 cx: &mut ModelContext<Self>,
1639 ) -> Result<()> {
1640 let remote_id = buffer.read(cx).remote_id();
1641 let open_buffer = if self.is_remote() || self.is_shared() {
1642 OpenBuffer::Strong(buffer.clone())
1643 } else {
1644 OpenBuffer::Weak(buffer.downgrade())
1645 };
1646
1647 match self.opened_buffers.insert(remote_id, open_buffer) {
1648 None => {}
1649 Some(OpenBuffer::Loading(operations)) => {
1650 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1651 }
1652 Some(OpenBuffer::Weak(existing_handle)) => {
1653 if existing_handle.upgrade(cx).is_some() {
1654 Err(anyhow!(
1655 "already registered buffer with remote id {}",
1656 remote_id
1657 ))?
1658 }
1659 }
1660 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1661 "already registered buffer with remote id {}",
1662 remote_id
1663 ))?,
1664 }
1665 cx.subscribe(buffer, |this, buffer, event, cx| {
1666 this.on_buffer_event(buffer, event, cx);
1667 })
1668 .detach();
1669
1670 self.assign_language_to_buffer(buffer, cx);
1671 self.register_buffer_with_language_server(buffer, cx);
1672 cx.observe_release(buffer, |this, buffer, cx| {
1673 if let Some(file) = File::from_dyn(buffer.file()) {
1674 if file.is_local() {
1675 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1676 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1677 server
1678 .notify::<lsp::notification::DidCloseTextDocument>(
1679 lsp::DidCloseTextDocumentParams {
1680 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1681 },
1682 )
1683 .log_err();
1684 }
1685 }
1686 }
1687 })
1688 .detach();
1689
1690 Ok(())
1691 }
1692
1693 fn register_buffer_with_language_server(
1694 &mut self,
1695 buffer_handle: &ModelHandle<Buffer>,
1696 cx: &mut ModelContext<Self>,
1697 ) {
1698 let buffer = buffer_handle.read(cx);
1699 let buffer_id = buffer.remote_id();
1700 if let Some(file) = File::from_dyn(buffer.file()) {
1701 if file.is_local() {
1702 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1703 let initial_snapshot = buffer.text_snapshot();
1704
1705 let mut language_server = None;
1706 let mut language_id = None;
1707 if let Some(language) = buffer.language() {
1708 let worktree_id = file.worktree_id(cx);
1709 if let Some(adapter) = language.lsp_adapter() {
1710 language_id = adapter.id_for_language(language.name().as_ref());
1711 language_server = self
1712 .language_servers
1713 .get(&(worktree_id, adapter.name()))
1714 .cloned();
1715 }
1716 }
1717
1718 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1719 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1720 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1721 .log_err();
1722 }
1723 }
1724
1725 if let Some((_, server)) = language_server {
1726 server
1727 .notify::<lsp::notification::DidOpenTextDocument>(
1728 lsp::DidOpenTextDocumentParams {
1729 text_document: lsp::TextDocumentItem::new(
1730 uri,
1731 language_id.unwrap_or_default(),
1732 0,
1733 initial_snapshot.text(),
1734 ),
1735 }
1736 .clone(),
1737 )
1738 .log_err();
1739 buffer_handle.update(cx, |buffer, cx| {
1740 buffer.set_completion_triggers(
1741 server
1742 .capabilities()
1743 .completion_provider
1744 .as_ref()
1745 .and_then(|provider| provider.trigger_characters.clone())
1746 .unwrap_or(Vec::new()),
1747 cx,
1748 )
1749 });
1750 self.buffer_snapshots
1751 .insert(buffer_id, vec![(0, initial_snapshot)]);
1752 }
1753 }
1754 }
1755 }
1756
1757 fn unregister_buffer_from_language_server(
1758 &mut self,
1759 buffer: &ModelHandle<Buffer>,
1760 old_path: PathBuf,
1761 cx: &mut ModelContext<Self>,
1762 ) {
1763 buffer.update(cx, |buffer, cx| {
1764 buffer.update_diagnostics(Default::default(), cx);
1765 self.buffer_snapshots.remove(&buffer.remote_id());
1766 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1767 language_server
1768 .notify::<lsp::notification::DidCloseTextDocument>(
1769 lsp::DidCloseTextDocumentParams {
1770 text_document: lsp::TextDocumentIdentifier::new(
1771 lsp::Url::from_file_path(old_path).unwrap(),
1772 ),
1773 },
1774 )
1775 .log_err();
1776 }
1777 });
1778 }
1779
1780 fn on_buffer_event(
1781 &mut self,
1782 buffer: ModelHandle<Buffer>,
1783 event: &BufferEvent,
1784 cx: &mut ModelContext<Self>,
1785 ) -> Option<()> {
1786 match event {
1787 BufferEvent::Operation(operation) => {
1788 if let Some(project_id) = self.shared_remote_id() {
1789 let request = self.client.request(proto::UpdateBuffer {
1790 project_id,
1791 buffer_id: buffer.read(cx).remote_id(),
1792 operations: vec![language::proto::serialize_operation(&operation)],
1793 });
1794 cx.background().spawn(request).detach_and_log_err(cx);
1795 } else if let Some(project_id) = self.remote_id() {
1796 let _ = self
1797 .client
1798 .send(proto::RegisterProjectActivity { project_id });
1799 }
1800 }
1801 BufferEvent::Edited { .. } => {
1802 let (_, language_server) = self
1803 .language_server_for_buffer(buffer.read(cx), cx)?
1804 .clone();
1805 let buffer = buffer.read(cx);
1806 let file = File::from_dyn(buffer.file())?;
1807 let abs_path = file.as_local()?.abs_path(cx);
1808 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1809 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1810 let (version, prev_snapshot) = buffer_snapshots.last()?;
1811 let next_snapshot = buffer.text_snapshot();
1812 let next_version = version + 1;
1813
1814 let content_changes = buffer
1815 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1816 .map(|edit| {
1817 let edit_start = edit.new.start.0;
1818 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1819 let new_text = next_snapshot
1820 .text_for_range(edit.new.start.1..edit.new.end.1)
1821 .collect();
1822 lsp::TextDocumentContentChangeEvent {
1823 range: Some(lsp::Range::new(
1824 point_to_lsp(edit_start),
1825 point_to_lsp(edit_end),
1826 )),
1827 range_length: None,
1828 text: new_text,
1829 }
1830 })
1831 .collect();
1832
1833 buffer_snapshots.push((next_version, next_snapshot));
1834
1835 language_server
1836 .notify::<lsp::notification::DidChangeTextDocument>(
1837 lsp::DidChangeTextDocumentParams {
1838 text_document: lsp::VersionedTextDocumentIdentifier::new(
1839 uri,
1840 next_version,
1841 ),
1842 content_changes,
1843 },
1844 )
1845 .log_err();
1846 }
1847 BufferEvent::Saved => {
1848 let file = File::from_dyn(buffer.read(cx).file())?;
1849 let worktree_id = file.worktree_id(cx);
1850 let abs_path = file.as_local()?.abs_path(cx);
1851 let text_document = lsp::TextDocumentIdentifier {
1852 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1853 };
1854
1855 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1856 server
1857 .notify::<lsp::notification::DidSaveTextDocument>(
1858 lsp::DidSaveTextDocumentParams {
1859 text_document: text_document.clone(),
1860 text: None,
1861 },
1862 )
1863 .log_err();
1864 }
1865
1866 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1867 // that don't support a disk-based progress token.
1868 let (lsp_adapter, language_server) =
1869 self.language_server_for_buffer(buffer.read(cx), cx)?;
1870 if lsp_adapter
1871 .disk_based_diagnostics_progress_token()
1872 .is_none()
1873 {
1874 let server_id = language_server.server_id();
1875 self.disk_based_diagnostics_finished(server_id, cx);
1876 self.broadcast_language_server_update(
1877 server_id,
1878 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1879 proto::LspDiskBasedDiagnosticsUpdated {},
1880 ),
1881 );
1882 }
1883 }
1884 _ => {}
1885 }
1886
1887 None
1888 }
1889
1890 fn language_servers_for_worktree(
1891 &self,
1892 worktree_id: WorktreeId,
1893 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1894 self.language_servers.iter().filter_map(
1895 move |((language_server_worktree_id, _), server)| {
1896 if *language_server_worktree_id == worktree_id {
1897 Some(server)
1898 } else {
1899 None
1900 }
1901 },
1902 )
1903 }
1904
1905 fn assign_language_to_buffer(
1906 &mut self,
1907 buffer: &ModelHandle<Buffer>,
1908 cx: &mut ModelContext<Self>,
1909 ) -> Option<()> {
1910 // If the buffer has a language, set it and start the language server if we haven't already.
1911 let full_path = buffer.read(cx).file()?.full_path(cx);
1912 let language = self.languages.select_language(&full_path)?;
1913 buffer.update(cx, |buffer, cx| {
1914 buffer.set_language(Some(language.clone()), cx);
1915 });
1916
1917 let file = File::from_dyn(buffer.read(cx).file())?;
1918 let worktree = file.worktree.read(cx).as_local()?;
1919 let worktree_id = worktree.id();
1920 let worktree_abs_path = worktree.abs_path().clone();
1921 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1922
1923 None
1924 }
1925
1926 fn start_language_server(
1927 &mut self,
1928 worktree_id: WorktreeId,
1929 worktree_path: Arc<Path>,
1930 language: Arc<Language>,
1931 cx: &mut ModelContext<Self>,
1932 ) {
1933 if !cx
1934 .global::<Settings>()
1935 .enable_language_server(Some(&language.name()))
1936 {
1937 return;
1938 }
1939
1940 let adapter = if let Some(adapter) = language.lsp_adapter() {
1941 adapter
1942 } else {
1943 return;
1944 };
1945 let key = (worktree_id, adapter.name());
1946 self.started_language_servers
1947 .entry(key.clone())
1948 .or_insert_with(|| {
1949 let server_id = post_inc(&mut self.next_language_server_id);
1950 let language_server = self.languages.start_language_server(
1951 server_id,
1952 language.clone(),
1953 worktree_path,
1954 self.client.http_client(),
1955 cx,
1956 );
1957 cx.spawn_weak(|this, mut cx| async move {
1958 let language_server = language_server?.await.log_err()?;
1959 let language_server = language_server
1960 .initialize(adapter.initialization_options())
1961 .await
1962 .log_err()?;
1963 let this = this.upgrade(&cx)?;
1964 let disk_based_diagnostics_progress_token =
1965 adapter.disk_based_diagnostics_progress_token();
1966
1967 language_server
1968 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1969 let this = this.downgrade();
1970 let adapter = adapter.clone();
1971 move |params, mut cx| {
1972 if let Some(this) = this.upgrade(&cx) {
1973 this.update(&mut cx, |this, cx| {
1974 this.on_lsp_diagnostics_published(
1975 server_id, params, &adapter, cx,
1976 );
1977 });
1978 }
1979 }
1980 })
1981 .detach();
1982
1983 language_server
1984 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1985 let settings = this
1986 .read_with(&cx, |this, _| this.language_server_settings.clone());
1987 move |params, _| {
1988 let settings = settings.lock().clone();
1989 async move {
1990 Ok(params
1991 .items
1992 .into_iter()
1993 .map(|item| {
1994 if let Some(section) = &item.section {
1995 settings
1996 .get(section)
1997 .cloned()
1998 .unwrap_or(serde_json::Value::Null)
1999 } else {
2000 settings.clone()
2001 }
2002 })
2003 .collect())
2004 }
2005 }
2006 })
2007 .detach();
2008
2009 // Even though we don't have handling for these requests, respond to them to
2010 // avoid stalling any language server like `gopls` which waits for a response
2011 // to these requests when initializing.
2012 language_server
2013 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(|_, _| async {
2014 Ok(())
2015 })
2016 .detach();
2017 language_server
2018 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2019 Ok(())
2020 })
2021 .detach();
2022
2023 language_server
2024 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2025 let this = this.downgrade();
2026 let adapter = adapter.clone();
2027 let language_server = language_server.clone();
2028 move |params, cx| {
2029 Self::on_lsp_workspace_edit(
2030 this,
2031 params,
2032 server_id,
2033 adapter.clone(),
2034 language_server.clone(),
2035 cx,
2036 )
2037 }
2038 })
2039 .detach();
2040
2041 language_server
2042 .on_notification::<lsp::notification::Progress, _>({
2043 let this = this.downgrade();
2044 move |params, mut cx| {
2045 if let Some(this) = this.upgrade(&cx) {
2046 this.update(&mut cx, |this, cx| {
2047 this.on_lsp_progress(
2048 params,
2049 server_id,
2050 disk_based_diagnostics_progress_token,
2051 cx,
2052 );
2053 });
2054 }
2055 }
2056 })
2057 .detach();
2058
2059 this.update(&mut cx, |this, cx| {
2060 this.language_servers
2061 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2062 this.language_server_statuses.insert(
2063 server_id,
2064 LanguageServerStatus {
2065 name: language_server.name().to_string(),
2066 pending_work: Default::default(),
2067 pending_diagnostic_updates: 0,
2068 },
2069 );
2070 language_server
2071 .notify::<lsp::notification::DidChangeConfiguration>(
2072 lsp::DidChangeConfigurationParams {
2073 settings: this.language_server_settings.lock().clone(),
2074 },
2075 )
2076 .ok();
2077
2078 if let Some(project_id) = this.shared_remote_id() {
2079 this.client
2080 .send(proto::StartLanguageServer {
2081 project_id,
2082 server: Some(proto::LanguageServer {
2083 id: server_id as u64,
2084 name: language_server.name().to_string(),
2085 }),
2086 })
2087 .log_err();
2088 }
2089
2090 // Tell the language server about every open buffer in the worktree that matches the language.
2091 for buffer in this.opened_buffers.values() {
2092 if let Some(buffer_handle) = buffer.upgrade(cx) {
2093 let buffer = buffer_handle.read(cx);
2094 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2095 file
2096 } else {
2097 continue;
2098 };
2099 let language = if let Some(language) = buffer.language() {
2100 language
2101 } else {
2102 continue;
2103 };
2104 if file.worktree.read(cx).id() != key.0
2105 || language.lsp_adapter().map(|a| a.name())
2106 != Some(key.1.clone())
2107 {
2108 continue;
2109 }
2110
2111 let file = file.as_local()?;
2112 let versions = this
2113 .buffer_snapshots
2114 .entry(buffer.remote_id())
2115 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2116 let (version, initial_snapshot) = versions.last().unwrap();
2117 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2118 let language_id = adapter.id_for_language(language.name().as_ref());
2119 language_server
2120 .notify::<lsp::notification::DidOpenTextDocument>(
2121 lsp::DidOpenTextDocumentParams {
2122 text_document: lsp::TextDocumentItem::new(
2123 uri,
2124 language_id.unwrap_or_default(),
2125 *version,
2126 initial_snapshot.text(),
2127 ),
2128 },
2129 )
2130 .log_err()?;
2131 buffer_handle.update(cx, |buffer, cx| {
2132 buffer.set_completion_triggers(
2133 language_server
2134 .capabilities()
2135 .completion_provider
2136 .as_ref()
2137 .and_then(|provider| {
2138 provider.trigger_characters.clone()
2139 })
2140 .unwrap_or(Vec::new()),
2141 cx,
2142 )
2143 });
2144 }
2145 }
2146
2147 cx.notify();
2148 Some(())
2149 });
2150
2151 Some(language_server)
2152 })
2153 });
2154 }
2155
2156 fn stop_language_server(
2157 &mut self,
2158 worktree_id: WorktreeId,
2159 adapter_name: LanguageServerName,
2160 cx: &mut ModelContext<Self>,
2161 ) -> Task<()> {
2162 let key = (worktree_id, adapter_name);
2163 if let Some((_, language_server)) = self.language_servers.remove(&key) {
2164 self.language_server_statuses
2165 .remove(&language_server.server_id());
2166 cx.notify();
2167 }
2168
2169 if let Some(started_language_server) = self.started_language_servers.remove(&key) {
2170 cx.spawn_weak(|this, mut cx| async move {
2171 if let Some(language_server) = started_language_server.await {
2172 if let Some(shutdown) = language_server.shutdown() {
2173 shutdown.await;
2174 }
2175
2176 if let Some(this) = this.upgrade(&cx) {
2177 this.update(&mut cx, |this, cx| {
2178 this.language_server_statuses
2179 .remove(&language_server.server_id());
2180 cx.notify();
2181 });
2182 }
2183 }
2184 })
2185 } else {
2186 Task::ready(())
2187 }
2188 }
2189
2190 pub fn restart_language_servers_for_buffers(
2191 &mut self,
2192 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2193 cx: &mut ModelContext<Self>,
2194 ) -> Option<()> {
2195 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2196 .into_iter()
2197 .filter_map(|buffer| {
2198 let file = File::from_dyn(buffer.read(cx).file())?;
2199 let worktree = file.worktree.read(cx).as_local()?;
2200 let worktree_id = worktree.id();
2201 let worktree_abs_path = worktree.abs_path().clone();
2202 let full_path = file.full_path(cx);
2203 Some((worktree_id, worktree_abs_path, full_path))
2204 })
2205 .collect();
2206 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2207 let language = self.languages.select_language(&full_path)?;
2208 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2209 }
2210
2211 None
2212 }
2213
2214 fn restart_language_server(
2215 &mut self,
2216 worktree_id: WorktreeId,
2217 worktree_path: Arc<Path>,
2218 language: Arc<Language>,
2219 cx: &mut ModelContext<Self>,
2220 ) {
2221 let adapter = if let Some(adapter) = language.lsp_adapter() {
2222 adapter
2223 } else {
2224 return;
2225 };
2226
2227 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2228 cx.spawn_weak(|this, mut cx| async move {
2229 stop.await;
2230 if let Some(this) = this.upgrade(&cx) {
2231 this.update(&mut cx, |this, cx| {
2232 this.start_language_server(worktree_id, worktree_path, language, cx);
2233 });
2234 }
2235 })
2236 .detach();
2237 }
2238
2239 fn on_lsp_diagnostics_published(
2240 &mut self,
2241 server_id: usize,
2242 mut params: lsp::PublishDiagnosticsParams,
2243 adapter: &Arc<dyn LspAdapter>,
2244 cx: &mut ModelContext<Self>,
2245 ) {
2246 adapter.process_diagnostics(&mut params);
2247 self.update_diagnostics(
2248 server_id,
2249 params,
2250 adapter.disk_based_diagnostic_sources(),
2251 cx,
2252 )
2253 .log_err();
2254 }
2255
2256 fn on_lsp_progress(
2257 &mut self,
2258 progress: lsp::ProgressParams,
2259 server_id: usize,
2260 disk_based_diagnostics_progress_token: Option<&str>,
2261 cx: &mut ModelContext<Self>,
2262 ) {
2263 let token = match progress.token {
2264 lsp::NumberOrString::String(token) => token,
2265 lsp::NumberOrString::Number(token) => {
2266 log::info!("skipping numeric progress token {}", token);
2267 return;
2268 }
2269 };
2270 let progress = match progress.value {
2271 lsp::ProgressParamsValue::WorkDone(value) => value,
2272 };
2273 let language_server_status =
2274 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2275 status
2276 } else {
2277 return;
2278 };
2279 match progress {
2280 lsp::WorkDoneProgress::Begin(report) => {
2281 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2282 language_server_status.pending_diagnostic_updates += 1;
2283 if language_server_status.pending_diagnostic_updates == 1 {
2284 self.disk_based_diagnostics_started(server_id, cx);
2285 self.broadcast_language_server_update(
2286 server_id,
2287 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2288 proto::LspDiskBasedDiagnosticsUpdating {},
2289 ),
2290 );
2291 }
2292 } else {
2293 self.on_lsp_work_start(
2294 server_id,
2295 token.clone(),
2296 LanguageServerProgress {
2297 message: report.message.clone(),
2298 percentage: report.percentage.map(|p| p as usize),
2299 last_update_at: Instant::now(),
2300 },
2301 cx,
2302 );
2303 self.broadcast_language_server_update(
2304 server_id,
2305 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2306 token,
2307 message: report.message,
2308 percentage: report.percentage.map(|p| p as u32),
2309 }),
2310 );
2311 }
2312 }
2313 lsp::WorkDoneProgress::Report(report) => {
2314 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2315 self.on_lsp_work_progress(
2316 server_id,
2317 token.clone(),
2318 LanguageServerProgress {
2319 message: report.message.clone(),
2320 percentage: report.percentage.map(|p| p as usize),
2321 last_update_at: Instant::now(),
2322 },
2323 cx,
2324 );
2325 self.broadcast_language_server_update(
2326 server_id,
2327 proto::update_language_server::Variant::WorkProgress(
2328 proto::LspWorkProgress {
2329 token,
2330 message: report.message,
2331 percentage: report.percentage.map(|p| p as u32),
2332 },
2333 ),
2334 );
2335 }
2336 }
2337 lsp::WorkDoneProgress::End(_) => {
2338 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2339 language_server_status.pending_diagnostic_updates -= 1;
2340 if language_server_status.pending_diagnostic_updates == 0 {
2341 self.disk_based_diagnostics_finished(server_id, cx);
2342 self.broadcast_language_server_update(
2343 server_id,
2344 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2345 proto::LspDiskBasedDiagnosticsUpdated {},
2346 ),
2347 );
2348 }
2349 } else {
2350 self.on_lsp_work_end(server_id, token.clone(), cx);
2351 self.broadcast_language_server_update(
2352 server_id,
2353 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2354 token,
2355 }),
2356 );
2357 }
2358 }
2359 }
2360 }
2361
2362 fn on_lsp_work_start(
2363 &mut self,
2364 language_server_id: usize,
2365 token: String,
2366 progress: LanguageServerProgress,
2367 cx: &mut ModelContext<Self>,
2368 ) {
2369 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2370 status.pending_work.insert(token, progress);
2371 cx.notify();
2372 }
2373 }
2374
2375 fn on_lsp_work_progress(
2376 &mut self,
2377 language_server_id: usize,
2378 token: String,
2379 progress: LanguageServerProgress,
2380 cx: &mut ModelContext<Self>,
2381 ) {
2382 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2383 let entry = status
2384 .pending_work
2385 .entry(token)
2386 .or_insert(LanguageServerProgress {
2387 message: Default::default(),
2388 percentage: Default::default(),
2389 last_update_at: progress.last_update_at,
2390 });
2391 if progress.message.is_some() {
2392 entry.message = progress.message;
2393 }
2394 if progress.percentage.is_some() {
2395 entry.percentage = progress.percentage;
2396 }
2397 entry.last_update_at = progress.last_update_at;
2398 cx.notify();
2399 }
2400 }
2401
2402 fn on_lsp_work_end(
2403 &mut self,
2404 language_server_id: usize,
2405 token: String,
2406 cx: &mut ModelContext<Self>,
2407 ) {
2408 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2409 status.pending_work.remove(&token);
2410 cx.notify();
2411 }
2412 }
2413
2414 async fn on_lsp_workspace_edit(
2415 this: WeakModelHandle<Self>,
2416 params: lsp::ApplyWorkspaceEditParams,
2417 server_id: usize,
2418 adapter: Arc<dyn LspAdapter>,
2419 language_server: Arc<LanguageServer>,
2420 mut cx: AsyncAppContext,
2421 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2422 let this = this
2423 .upgrade(&cx)
2424 .ok_or_else(|| anyhow!("project project closed"))?;
2425 let transaction = Self::deserialize_workspace_edit(
2426 this.clone(),
2427 params.edit,
2428 true,
2429 adapter.clone(),
2430 language_server.clone(),
2431 &mut cx,
2432 )
2433 .await
2434 .log_err();
2435 this.update(&mut cx, |this, _| {
2436 if let Some(transaction) = transaction {
2437 this.last_workspace_edits_by_language_server
2438 .insert(server_id, transaction);
2439 }
2440 });
2441 Ok(lsp::ApplyWorkspaceEditResponse {
2442 applied: true,
2443 failed_change: None,
2444 failure_reason: None,
2445 })
2446 }
2447
2448 fn broadcast_language_server_update(
2449 &self,
2450 language_server_id: usize,
2451 event: proto::update_language_server::Variant,
2452 ) {
2453 if let Some(project_id) = self.shared_remote_id() {
2454 self.client
2455 .send(proto::UpdateLanguageServer {
2456 project_id,
2457 language_server_id: language_server_id as u64,
2458 variant: Some(event),
2459 })
2460 .log_err();
2461 }
2462 }
2463
2464 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2465 for (_, server) in self.language_servers.values() {
2466 server
2467 .notify::<lsp::notification::DidChangeConfiguration>(
2468 lsp::DidChangeConfigurationParams {
2469 settings: settings.clone(),
2470 },
2471 )
2472 .ok();
2473 }
2474 *self.language_server_settings.lock() = settings;
2475 }
2476
2477 pub fn language_server_statuses(
2478 &self,
2479 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2480 self.language_server_statuses.values()
2481 }
2482
2483 pub fn update_diagnostics(
2484 &mut self,
2485 language_server_id: usize,
2486 params: lsp::PublishDiagnosticsParams,
2487 disk_based_sources: &[&str],
2488 cx: &mut ModelContext<Self>,
2489 ) -> Result<()> {
2490 let abs_path = params
2491 .uri
2492 .to_file_path()
2493 .map_err(|_| anyhow!("URI is not a file"))?;
2494 let mut diagnostics = Vec::default();
2495 let mut primary_diagnostic_group_ids = HashMap::default();
2496 let mut sources_by_group_id = HashMap::default();
2497 let mut supporting_diagnostics = HashMap::default();
2498 for diagnostic in ¶ms.diagnostics {
2499 let source = diagnostic.source.as_ref();
2500 let code = diagnostic.code.as_ref().map(|code| match code {
2501 lsp::NumberOrString::Number(code) => code.to_string(),
2502 lsp::NumberOrString::String(code) => code.clone(),
2503 });
2504 let range = range_from_lsp(diagnostic.range);
2505 let is_supporting = diagnostic
2506 .related_information
2507 .as_ref()
2508 .map_or(false, |infos| {
2509 infos.iter().any(|info| {
2510 primary_diagnostic_group_ids.contains_key(&(
2511 source,
2512 code.clone(),
2513 range_from_lsp(info.location.range),
2514 ))
2515 })
2516 });
2517
2518 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2519 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2520 });
2521
2522 if is_supporting {
2523 supporting_diagnostics.insert(
2524 (source, code.clone(), range),
2525 (diagnostic.severity, is_unnecessary),
2526 );
2527 } else {
2528 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2529 let is_disk_based = source.map_or(false, |source| {
2530 disk_based_sources.contains(&source.as_str())
2531 });
2532
2533 sources_by_group_id.insert(group_id, source);
2534 primary_diagnostic_group_ids
2535 .insert((source, code.clone(), range.clone()), group_id);
2536
2537 diagnostics.push(DiagnosticEntry {
2538 range,
2539 diagnostic: Diagnostic {
2540 code: code.clone(),
2541 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2542 message: diagnostic.message.clone(),
2543 group_id,
2544 is_primary: true,
2545 is_valid: true,
2546 is_disk_based,
2547 is_unnecessary,
2548 },
2549 });
2550 if let Some(infos) = &diagnostic.related_information {
2551 for info in infos {
2552 if info.location.uri == params.uri && !info.message.is_empty() {
2553 let range = range_from_lsp(info.location.range);
2554 diagnostics.push(DiagnosticEntry {
2555 range,
2556 diagnostic: Diagnostic {
2557 code: code.clone(),
2558 severity: DiagnosticSeverity::INFORMATION,
2559 message: info.message.clone(),
2560 group_id,
2561 is_primary: false,
2562 is_valid: true,
2563 is_disk_based,
2564 is_unnecessary: false,
2565 },
2566 });
2567 }
2568 }
2569 }
2570 }
2571 }
2572
2573 for entry in &mut diagnostics {
2574 let diagnostic = &mut entry.diagnostic;
2575 if !diagnostic.is_primary {
2576 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2577 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2578 source,
2579 diagnostic.code.clone(),
2580 entry.range.clone(),
2581 )) {
2582 if let Some(severity) = severity {
2583 diagnostic.severity = severity;
2584 }
2585 diagnostic.is_unnecessary = is_unnecessary;
2586 }
2587 }
2588 }
2589
2590 self.update_diagnostic_entries(
2591 language_server_id,
2592 abs_path,
2593 params.version,
2594 diagnostics,
2595 cx,
2596 )?;
2597 Ok(())
2598 }
2599
2600 pub fn update_diagnostic_entries(
2601 &mut self,
2602 language_server_id: usize,
2603 abs_path: PathBuf,
2604 version: Option<i32>,
2605 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2606 cx: &mut ModelContext<Project>,
2607 ) -> Result<(), anyhow::Error> {
2608 let (worktree, relative_path) = self
2609 .find_local_worktree(&abs_path, cx)
2610 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2611 if !worktree.read(cx).is_visible() {
2612 return Ok(());
2613 }
2614
2615 let project_path = ProjectPath {
2616 worktree_id: worktree.read(cx).id(),
2617 path: relative_path.into(),
2618 };
2619 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2620 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2621 }
2622
2623 let updated = worktree.update(cx, |worktree, cx| {
2624 worktree
2625 .as_local_mut()
2626 .ok_or_else(|| anyhow!("not a local worktree"))?
2627 .update_diagnostics(
2628 language_server_id,
2629 project_path.path.clone(),
2630 diagnostics,
2631 cx,
2632 )
2633 })?;
2634 if updated {
2635 cx.emit(Event::DiagnosticsUpdated {
2636 language_server_id,
2637 path: project_path,
2638 });
2639 }
2640 Ok(())
2641 }
2642
2643 fn update_buffer_diagnostics(
2644 &mut self,
2645 buffer: &ModelHandle<Buffer>,
2646 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2647 version: Option<i32>,
2648 cx: &mut ModelContext<Self>,
2649 ) -> Result<()> {
2650 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2651 Ordering::Equal
2652 .then_with(|| b.is_primary.cmp(&a.is_primary))
2653 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2654 .then_with(|| a.severity.cmp(&b.severity))
2655 .then_with(|| a.message.cmp(&b.message))
2656 }
2657
2658 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2659
2660 diagnostics.sort_unstable_by(|a, b| {
2661 Ordering::Equal
2662 .then_with(|| a.range.start.cmp(&b.range.start))
2663 .then_with(|| b.range.end.cmp(&a.range.end))
2664 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2665 });
2666
2667 let mut sanitized_diagnostics = Vec::new();
2668 let edits_since_save = Patch::new(
2669 snapshot
2670 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2671 .collect(),
2672 );
2673 for entry in diagnostics {
2674 let start;
2675 let end;
2676 if entry.diagnostic.is_disk_based {
2677 // Some diagnostics are based on files on disk instead of buffers'
2678 // current contents. Adjust these diagnostics' ranges to reflect
2679 // any unsaved edits.
2680 start = edits_since_save.old_to_new(entry.range.start);
2681 end = edits_since_save.old_to_new(entry.range.end);
2682 } else {
2683 start = entry.range.start;
2684 end = entry.range.end;
2685 }
2686
2687 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2688 ..snapshot.clip_point_utf16(end, Bias::Right);
2689
2690 // Expand empty ranges by one character
2691 if range.start == range.end {
2692 range.end.column += 1;
2693 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2694 if range.start == range.end && range.end.column > 0 {
2695 range.start.column -= 1;
2696 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2697 }
2698 }
2699
2700 sanitized_diagnostics.push(DiagnosticEntry {
2701 range,
2702 diagnostic: entry.diagnostic,
2703 });
2704 }
2705 drop(edits_since_save);
2706
2707 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2708 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2709 Ok(())
2710 }
2711
2712 pub fn reload_buffers(
2713 &self,
2714 buffers: HashSet<ModelHandle<Buffer>>,
2715 push_to_history: bool,
2716 cx: &mut ModelContext<Self>,
2717 ) -> Task<Result<ProjectTransaction>> {
2718 let mut local_buffers = Vec::new();
2719 let mut remote_buffers = None;
2720 for buffer_handle in buffers {
2721 let buffer = buffer_handle.read(cx);
2722 if buffer.is_dirty() {
2723 if let Some(file) = File::from_dyn(buffer.file()) {
2724 if file.is_local() {
2725 local_buffers.push(buffer_handle);
2726 } else {
2727 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2728 }
2729 }
2730 }
2731 }
2732
2733 let remote_buffers = self.remote_id().zip(remote_buffers);
2734 let client = self.client.clone();
2735
2736 cx.spawn(|this, mut cx| async move {
2737 let mut project_transaction = ProjectTransaction::default();
2738
2739 if let Some((project_id, remote_buffers)) = remote_buffers {
2740 let response = client
2741 .request(proto::ReloadBuffers {
2742 project_id,
2743 buffer_ids: remote_buffers
2744 .iter()
2745 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2746 .collect(),
2747 })
2748 .await?
2749 .transaction
2750 .ok_or_else(|| anyhow!("missing transaction"))?;
2751 project_transaction = this
2752 .update(&mut cx, |this, cx| {
2753 this.deserialize_project_transaction(response, push_to_history, cx)
2754 })
2755 .await?;
2756 }
2757
2758 for buffer in local_buffers {
2759 let transaction = buffer
2760 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2761 .await?;
2762 buffer.update(&mut cx, |buffer, cx| {
2763 if let Some(transaction) = transaction {
2764 if !push_to_history {
2765 buffer.forget_transaction(transaction.id);
2766 }
2767 project_transaction.0.insert(cx.handle(), transaction);
2768 }
2769 });
2770 }
2771
2772 Ok(project_transaction)
2773 })
2774 }
2775
2776 pub fn format(
2777 &self,
2778 buffers: HashSet<ModelHandle<Buffer>>,
2779 push_to_history: bool,
2780 cx: &mut ModelContext<Project>,
2781 ) -> Task<Result<ProjectTransaction>> {
2782 let mut local_buffers = Vec::new();
2783 let mut remote_buffers = None;
2784 for buffer_handle in buffers {
2785 let buffer = buffer_handle.read(cx);
2786 if let Some(file) = File::from_dyn(buffer.file()) {
2787 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2788 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2789 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2790 }
2791 } else {
2792 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2793 }
2794 } else {
2795 return Task::ready(Ok(Default::default()));
2796 }
2797 }
2798
2799 let remote_buffers = self.remote_id().zip(remote_buffers);
2800 let client = self.client.clone();
2801
2802 cx.spawn(|this, mut cx| async move {
2803 let mut project_transaction = ProjectTransaction::default();
2804
2805 if let Some((project_id, remote_buffers)) = remote_buffers {
2806 let response = client
2807 .request(proto::FormatBuffers {
2808 project_id,
2809 buffer_ids: remote_buffers
2810 .iter()
2811 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2812 .collect(),
2813 })
2814 .await?
2815 .transaction
2816 .ok_or_else(|| anyhow!("missing transaction"))?;
2817 project_transaction = this
2818 .update(&mut cx, |this, cx| {
2819 this.deserialize_project_transaction(response, push_to_history, cx)
2820 })
2821 .await?;
2822 }
2823
2824 for (buffer, buffer_abs_path, language_server) in local_buffers {
2825 let text_document = lsp::TextDocumentIdentifier::new(
2826 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2827 );
2828 let capabilities = &language_server.capabilities();
2829 let tab_size = cx.update(|cx| {
2830 let language_name = buffer.read(cx).language().map(|language| language.name());
2831 cx.global::<Settings>().tab_size(language_name.as_deref())
2832 });
2833 let lsp_edits = if capabilities
2834 .document_formatting_provider
2835 .as_ref()
2836 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2837 {
2838 language_server
2839 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2840 text_document,
2841 options: lsp::FormattingOptions {
2842 tab_size,
2843 insert_spaces: true,
2844 insert_final_newline: Some(true),
2845 ..Default::default()
2846 },
2847 work_done_progress_params: Default::default(),
2848 })
2849 .await?
2850 } else if capabilities
2851 .document_range_formatting_provider
2852 .as_ref()
2853 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2854 {
2855 let buffer_start = lsp::Position::new(0, 0);
2856 let buffer_end =
2857 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2858 language_server
2859 .request::<lsp::request::RangeFormatting>(
2860 lsp::DocumentRangeFormattingParams {
2861 text_document,
2862 range: lsp::Range::new(buffer_start, buffer_end),
2863 options: lsp::FormattingOptions {
2864 tab_size,
2865 insert_spaces: true,
2866 insert_final_newline: Some(true),
2867 ..Default::default()
2868 },
2869 work_done_progress_params: Default::default(),
2870 },
2871 )
2872 .await?
2873 } else {
2874 continue;
2875 };
2876
2877 if let Some(lsp_edits) = lsp_edits {
2878 let edits = this
2879 .update(&mut cx, |this, cx| {
2880 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2881 })
2882 .await?;
2883 buffer.update(&mut cx, |buffer, cx| {
2884 buffer.finalize_last_transaction();
2885 buffer.start_transaction();
2886 for (range, text) in edits {
2887 buffer.edit([(range, text)], cx);
2888 }
2889 if buffer.end_transaction(cx).is_some() {
2890 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2891 if !push_to_history {
2892 buffer.forget_transaction(transaction.id);
2893 }
2894 project_transaction.0.insert(cx.handle(), transaction);
2895 }
2896 });
2897 }
2898 }
2899
2900 Ok(project_transaction)
2901 })
2902 }
2903
2904 pub fn definition<T: ToPointUtf16>(
2905 &self,
2906 buffer: &ModelHandle<Buffer>,
2907 position: T,
2908 cx: &mut ModelContext<Self>,
2909 ) -> Task<Result<Vec<Location>>> {
2910 let position = position.to_point_utf16(buffer.read(cx));
2911 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2912 }
2913
2914 pub fn references<T: ToPointUtf16>(
2915 &self,
2916 buffer: &ModelHandle<Buffer>,
2917 position: T,
2918 cx: &mut ModelContext<Self>,
2919 ) -> Task<Result<Vec<Location>>> {
2920 let position = position.to_point_utf16(buffer.read(cx));
2921 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2922 }
2923
2924 pub fn document_highlights<T: ToPointUtf16>(
2925 &self,
2926 buffer: &ModelHandle<Buffer>,
2927 position: T,
2928 cx: &mut ModelContext<Self>,
2929 ) -> Task<Result<Vec<DocumentHighlight>>> {
2930 let position = position.to_point_utf16(buffer.read(cx));
2931
2932 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2933 }
2934
2935 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2936 if self.is_local() {
2937 let mut requests = Vec::new();
2938 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2939 let worktree_id = *worktree_id;
2940 if let Some(worktree) = self
2941 .worktree_for_id(worktree_id, cx)
2942 .and_then(|worktree| worktree.read(cx).as_local())
2943 {
2944 let lsp_adapter = lsp_adapter.clone();
2945 let worktree_abs_path = worktree.abs_path().clone();
2946 requests.push(
2947 language_server
2948 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2949 query: query.to_string(),
2950 ..Default::default()
2951 })
2952 .log_err()
2953 .map(move |response| {
2954 (
2955 lsp_adapter,
2956 worktree_id,
2957 worktree_abs_path,
2958 response.unwrap_or_default(),
2959 )
2960 }),
2961 );
2962 }
2963 }
2964
2965 cx.spawn_weak(|this, cx| async move {
2966 let responses = futures::future::join_all(requests).await;
2967 let this = if let Some(this) = this.upgrade(&cx) {
2968 this
2969 } else {
2970 return Ok(Default::default());
2971 };
2972 this.read_with(&cx, |this, cx| {
2973 let mut symbols = Vec::new();
2974 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2975 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2976 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2977 let mut worktree_id = source_worktree_id;
2978 let path;
2979 if let Some((worktree, rel_path)) =
2980 this.find_local_worktree(&abs_path, cx)
2981 {
2982 worktree_id = worktree.read(cx).id();
2983 path = rel_path;
2984 } else {
2985 path = relativize_path(&worktree_abs_path, &abs_path);
2986 }
2987
2988 let label = this
2989 .languages
2990 .select_language(&path)
2991 .and_then(|language| {
2992 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2993 })
2994 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2995 let signature = this.symbol_signature(worktree_id, &path);
2996
2997 Some(Symbol {
2998 source_worktree_id,
2999 worktree_id,
3000 language_server_name: adapter.name(),
3001 name: lsp_symbol.name,
3002 kind: lsp_symbol.kind,
3003 label,
3004 path,
3005 range: range_from_lsp(lsp_symbol.location.range),
3006 signature,
3007 })
3008 }));
3009 }
3010 Ok(symbols)
3011 })
3012 })
3013 } else if let Some(project_id) = self.remote_id() {
3014 let request = self.client.request(proto::GetProjectSymbols {
3015 project_id,
3016 query: query.to_string(),
3017 });
3018 cx.spawn_weak(|this, cx| async move {
3019 let response = request.await?;
3020 let mut symbols = Vec::new();
3021 if let Some(this) = this.upgrade(&cx) {
3022 this.read_with(&cx, |this, _| {
3023 symbols.extend(
3024 response
3025 .symbols
3026 .into_iter()
3027 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3028 );
3029 })
3030 }
3031 Ok(symbols)
3032 })
3033 } else {
3034 Task::ready(Ok(Default::default()))
3035 }
3036 }
3037
3038 pub fn open_buffer_for_symbol(
3039 &mut self,
3040 symbol: &Symbol,
3041 cx: &mut ModelContext<Self>,
3042 ) -> Task<Result<ModelHandle<Buffer>>> {
3043 if self.is_local() {
3044 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
3045 symbol.source_worktree_id,
3046 symbol.language_server_name.clone(),
3047 )) {
3048 server.clone()
3049 } else {
3050 return Task::ready(Err(anyhow!(
3051 "language server for worktree and language not found"
3052 )));
3053 };
3054
3055 let worktree_abs_path = if let Some(worktree_abs_path) = self
3056 .worktree_for_id(symbol.worktree_id, cx)
3057 .and_then(|worktree| worktree.read(cx).as_local())
3058 .map(|local_worktree| local_worktree.abs_path())
3059 {
3060 worktree_abs_path
3061 } else {
3062 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3063 };
3064 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3065 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3066 uri
3067 } else {
3068 return Task::ready(Err(anyhow!("invalid symbol path")));
3069 };
3070
3071 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3072 } else if let Some(project_id) = self.remote_id() {
3073 let request = self.client.request(proto::OpenBufferForSymbol {
3074 project_id,
3075 symbol: Some(serialize_symbol(symbol)),
3076 });
3077 cx.spawn(|this, mut cx| async move {
3078 let response = request.await?;
3079 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3080 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3081 .await
3082 })
3083 } else {
3084 Task::ready(Err(anyhow!("project does not have a remote id")))
3085 }
3086 }
3087
3088 pub fn hover<T: ToPointUtf16>(
3089 &self,
3090 buffer: &ModelHandle<Buffer>,
3091 position: T,
3092 cx: &mut ModelContext<Self>,
3093 ) -> Task<Result<Option<Hover>>> {
3094 let position = position.to_point_utf16(buffer.read(cx));
3095 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3096 }
3097
3098 pub fn completions<T: ToPointUtf16>(
3099 &self,
3100 source_buffer_handle: &ModelHandle<Buffer>,
3101 position: T,
3102 cx: &mut ModelContext<Self>,
3103 ) -> Task<Result<Vec<Completion>>> {
3104 let source_buffer_handle = source_buffer_handle.clone();
3105 let source_buffer = source_buffer_handle.read(cx);
3106 let buffer_id = source_buffer.remote_id();
3107 let language = source_buffer.language().cloned();
3108 let worktree;
3109 let buffer_abs_path;
3110 if let Some(file) = File::from_dyn(source_buffer.file()) {
3111 worktree = file.worktree.clone();
3112 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3113 } else {
3114 return Task::ready(Ok(Default::default()));
3115 };
3116
3117 let position = position.to_point_utf16(source_buffer);
3118 let anchor = source_buffer.anchor_after(position);
3119
3120 if worktree.read(cx).as_local().is_some() {
3121 let buffer_abs_path = buffer_abs_path.unwrap();
3122 let (_, lang_server) =
3123 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3124 server.clone()
3125 } else {
3126 return Task::ready(Ok(Default::default()));
3127 };
3128
3129 cx.spawn(|_, cx| async move {
3130 let completions = lang_server
3131 .request::<lsp::request::Completion>(lsp::CompletionParams {
3132 text_document_position: lsp::TextDocumentPositionParams::new(
3133 lsp::TextDocumentIdentifier::new(
3134 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3135 ),
3136 point_to_lsp(position),
3137 ),
3138 context: Default::default(),
3139 work_done_progress_params: Default::default(),
3140 partial_result_params: Default::default(),
3141 })
3142 .await
3143 .context("lsp completion request failed")?;
3144
3145 let completions = if let Some(completions) = completions {
3146 match completions {
3147 lsp::CompletionResponse::Array(completions) => completions,
3148 lsp::CompletionResponse::List(list) => list.items,
3149 }
3150 } else {
3151 Default::default()
3152 };
3153
3154 source_buffer_handle.read_with(&cx, |this, _| {
3155 let snapshot = this.snapshot();
3156 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3157 let mut range_for_token = None;
3158 Ok(completions
3159 .into_iter()
3160 .filter_map(|lsp_completion| {
3161 // For now, we can only handle additional edits if they are returned
3162 // when resolving the completion, not if they are present initially.
3163 if lsp_completion
3164 .additional_text_edits
3165 .as_ref()
3166 .map_or(false, |edits| !edits.is_empty())
3167 {
3168 return None;
3169 }
3170
3171 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3172 // If the language server provides a range to overwrite, then
3173 // check that the range is valid.
3174 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3175 let range = range_from_lsp(edit.range);
3176 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3177 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3178 if start != range.start || end != range.end {
3179 log::info!("completion out of expected range");
3180 return None;
3181 }
3182 (
3183 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3184 edit.new_text.clone(),
3185 )
3186 }
3187 // If the language server does not provide a range, then infer
3188 // the range based on the syntax tree.
3189 None => {
3190 if position != clipped_position {
3191 log::info!("completion out of expected range");
3192 return None;
3193 }
3194 let Range { start, end } = range_for_token
3195 .get_or_insert_with(|| {
3196 let offset = position.to_offset(&snapshot);
3197 let (range, kind) = snapshot.surrounding_word(offset);
3198 if kind == Some(CharKind::Word) {
3199 range
3200 } else {
3201 offset..offset
3202 }
3203 })
3204 .clone();
3205 let text = lsp_completion
3206 .insert_text
3207 .as_ref()
3208 .unwrap_or(&lsp_completion.label)
3209 .clone();
3210 (
3211 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3212 text.clone(),
3213 )
3214 }
3215 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3216 log::info!("unsupported insert/replace completion");
3217 return None;
3218 }
3219 };
3220
3221 Some(Completion {
3222 old_range,
3223 new_text,
3224 label: language
3225 .as_ref()
3226 .and_then(|l| l.label_for_completion(&lsp_completion))
3227 .unwrap_or_else(|| {
3228 CodeLabel::plain(
3229 lsp_completion.label.clone(),
3230 lsp_completion.filter_text.as_deref(),
3231 )
3232 }),
3233 lsp_completion,
3234 })
3235 })
3236 .collect())
3237 })
3238 })
3239 } else if let Some(project_id) = self.remote_id() {
3240 let rpc = self.client.clone();
3241 let message = proto::GetCompletions {
3242 project_id,
3243 buffer_id,
3244 position: Some(language::proto::serialize_anchor(&anchor)),
3245 version: serialize_version(&source_buffer.version()),
3246 };
3247 cx.spawn_weak(|_, mut cx| async move {
3248 let response = rpc.request(message).await?;
3249
3250 source_buffer_handle
3251 .update(&mut cx, |buffer, _| {
3252 buffer.wait_for_version(deserialize_version(response.version))
3253 })
3254 .await;
3255
3256 response
3257 .completions
3258 .into_iter()
3259 .map(|completion| {
3260 language::proto::deserialize_completion(completion, language.as_ref())
3261 })
3262 .collect()
3263 })
3264 } else {
3265 Task::ready(Ok(Default::default()))
3266 }
3267 }
3268
3269 pub fn apply_additional_edits_for_completion(
3270 &self,
3271 buffer_handle: ModelHandle<Buffer>,
3272 completion: Completion,
3273 push_to_history: bool,
3274 cx: &mut ModelContext<Self>,
3275 ) -> Task<Result<Option<Transaction>>> {
3276 let buffer = buffer_handle.read(cx);
3277 let buffer_id = buffer.remote_id();
3278
3279 if self.is_local() {
3280 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3281 {
3282 server.clone()
3283 } else {
3284 return Task::ready(Ok(Default::default()));
3285 };
3286
3287 cx.spawn(|this, mut cx| async move {
3288 let resolved_completion = lang_server
3289 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3290 .await?;
3291 if let Some(edits) = resolved_completion.additional_text_edits {
3292 let edits = this
3293 .update(&mut cx, |this, cx| {
3294 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3295 })
3296 .await?;
3297 buffer_handle.update(&mut cx, |buffer, cx| {
3298 buffer.finalize_last_transaction();
3299 buffer.start_transaction();
3300 for (range, text) in edits {
3301 buffer.edit([(range, text)], cx);
3302 }
3303 let transaction = if buffer.end_transaction(cx).is_some() {
3304 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3305 if !push_to_history {
3306 buffer.forget_transaction(transaction.id);
3307 }
3308 Some(transaction)
3309 } else {
3310 None
3311 };
3312 Ok(transaction)
3313 })
3314 } else {
3315 Ok(None)
3316 }
3317 })
3318 } else if let Some(project_id) = self.remote_id() {
3319 let client = self.client.clone();
3320 cx.spawn(|_, mut cx| async move {
3321 let response = client
3322 .request(proto::ApplyCompletionAdditionalEdits {
3323 project_id,
3324 buffer_id,
3325 completion: Some(language::proto::serialize_completion(&completion)),
3326 })
3327 .await?;
3328
3329 if let Some(transaction) = response.transaction {
3330 let transaction = language::proto::deserialize_transaction(transaction)?;
3331 buffer_handle
3332 .update(&mut cx, |buffer, _| {
3333 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3334 })
3335 .await;
3336 if push_to_history {
3337 buffer_handle.update(&mut cx, |buffer, _| {
3338 buffer.push_transaction(transaction.clone(), Instant::now());
3339 });
3340 }
3341 Ok(Some(transaction))
3342 } else {
3343 Ok(None)
3344 }
3345 })
3346 } else {
3347 Task::ready(Err(anyhow!("project does not have a remote id")))
3348 }
3349 }
3350
3351 pub fn code_actions<T: Clone + ToOffset>(
3352 &self,
3353 buffer_handle: &ModelHandle<Buffer>,
3354 range: Range<T>,
3355 cx: &mut ModelContext<Self>,
3356 ) -> Task<Result<Vec<CodeAction>>> {
3357 let buffer_handle = buffer_handle.clone();
3358 let buffer = buffer_handle.read(cx);
3359 let snapshot = buffer.snapshot();
3360 let relevant_diagnostics = snapshot
3361 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3362 .map(|entry| entry.to_lsp_diagnostic_stub())
3363 .collect();
3364 let buffer_id = buffer.remote_id();
3365 let worktree;
3366 let buffer_abs_path;
3367 if let Some(file) = File::from_dyn(buffer.file()) {
3368 worktree = file.worktree.clone();
3369 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3370 } else {
3371 return Task::ready(Ok(Default::default()));
3372 };
3373 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3374
3375 if worktree.read(cx).as_local().is_some() {
3376 let buffer_abs_path = buffer_abs_path.unwrap();
3377 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3378 {
3379 server.clone()
3380 } else {
3381 return Task::ready(Ok(Default::default()));
3382 };
3383
3384 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3385 cx.foreground().spawn(async move {
3386 if !lang_server.capabilities().code_action_provider.is_some() {
3387 return Ok(Default::default());
3388 }
3389
3390 Ok(lang_server
3391 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3392 text_document: lsp::TextDocumentIdentifier::new(
3393 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3394 ),
3395 range: lsp_range,
3396 work_done_progress_params: Default::default(),
3397 partial_result_params: Default::default(),
3398 context: lsp::CodeActionContext {
3399 diagnostics: relevant_diagnostics,
3400 only: Some(vec![
3401 lsp::CodeActionKind::QUICKFIX,
3402 lsp::CodeActionKind::REFACTOR,
3403 lsp::CodeActionKind::REFACTOR_EXTRACT,
3404 lsp::CodeActionKind::SOURCE,
3405 ]),
3406 },
3407 })
3408 .await?
3409 .unwrap_or_default()
3410 .into_iter()
3411 .filter_map(|entry| {
3412 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3413 Some(CodeAction {
3414 range: range.clone(),
3415 lsp_action,
3416 })
3417 } else {
3418 None
3419 }
3420 })
3421 .collect())
3422 })
3423 } else if let Some(project_id) = self.remote_id() {
3424 let rpc = self.client.clone();
3425 let version = buffer.version();
3426 cx.spawn_weak(|_, mut cx| async move {
3427 let response = rpc
3428 .request(proto::GetCodeActions {
3429 project_id,
3430 buffer_id,
3431 start: Some(language::proto::serialize_anchor(&range.start)),
3432 end: Some(language::proto::serialize_anchor(&range.end)),
3433 version: serialize_version(&version),
3434 })
3435 .await?;
3436
3437 buffer_handle
3438 .update(&mut cx, |buffer, _| {
3439 buffer.wait_for_version(deserialize_version(response.version))
3440 })
3441 .await;
3442
3443 response
3444 .actions
3445 .into_iter()
3446 .map(language::proto::deserialize_code_action)
3447 .collect()
3448 })
3449 } else {
3450 Task::ready(Ok(Default::default()))
3451 }
3452 }
3453
3454 pub fn apply_code_action(
3455 &self,
3456 buffer_handle: ModelHandle<Buffer>,
3457 mut action: CodeAction,
3458 push_to_history: bool,
3459 cx: &mut ModelContext<Self>,
3460 ) -> Task<Result<ProjectTransaction>> {
3461 if self.is_local() {
3462 let buffer = buffer_handle.read(cx);
3463 let (lsp_adapter, lang_server) =
3464 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3465 server.clone()
3466 } else {
3467 return Task::ready(Ok(Default::default()));
3468 };
3469 let range = action.range.to_point_utf16(buffer);
3470
3471 cx.spawn(|this, mut cx| async move {
3472 if let Some(lsp_range) = action
3473 .lsp_action
3474 .data
3475 .as_mut()
3476 .and_then(|d| d.get_mut("codeActionParams"))
3477 .and_then(|d| d.get_mut("range"))
3478 {
3479 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3480 action.lsp_action = lang_server
3481 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3482 .await?;
3483 } else {
3484 let actions = this
3485 .update(&mut cx, |this, cx| {
3486 this.code_actions(&buffer_handle, action.range, cx)
3487 })
3488 .await?;
3489 action.lsp_action = actions
3490 .into_iter()
3491 .find(|a| a.lsp_action.title == action.lsp_action.title)
3492 .ok_or_else(|| anyhow!("code action is outdated"))?
3493 .lsp_action;
3494 }
3495
3496 if let Some(edit) = action.lsp_action.edit {
3497 Self::deserialize_workspace_edit(
3498 this,
3499 edit,
3500 push_to_history,
3501 lsp_adapter,
3502 lang_server,
3503 &mut cx,
3504 )
3505 .await
3506 } else if let Some(command) = action.lsp_action.command {
3507 this.update(&mut cx, |this, _| {
3508 this.last_workspace_edits_by_language_server
3509 .remove(&lang_server.server_id());
3510 });
3511 lang_server
3512 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3513 command: command.command,
3514 arguments: command.arguments.unwrap_or_default(),
3515 ..Default::default()
3516 })
3517 .await?;
3518 Ok(this.update(&mut cx, |this, _| {
3519 this.last_workspace_edits_by_language_server
3520 .remove(&lang_server.server_id())
3521 .unwrap_or_default()
3522 }))
3523 } else {
3524 Ok(ProjectTransaction::default())
3525 }
3526 })
3527 } else if let Some(project_id) = self.remote_id() {
3528 let client = self.client.clone();
3529 let request = proto::ApplyCodeAction {
3530 project_id,
3531 buffer_id: buffer_handle.read(cx).remote_id(),
3532 action: Some(language::proto::serialize_code_action(&action)),
3533 };
3534 cx.spawn(|this, mut cx| async move {
3535 let response = client
3536 .request(request)
3537 .await?
3538 .transaction
3539 .ok_or_else(|| anyhow!("missing transaction"))?;
3540 this.update(&mut cx, |this, cx| {
3541 this.deserialize_project_transaction(response, push_to_history, cx)
3542 })
3543 .await
3544 })
3545 } else {
3546 Task::ready(Err(anyhow!("project does not have a remote id")))
3547 }
3548 }
3549
3550 async fn deserialize_workspace_edit(
3551 this: ModelHandle<Self>,
3552 edit: lsp::WorkspaceEdit,
3553 push_to_history: bool,
3554 lsp_adapter: Arc<dyn LspAdapter>,
3555 language_server: Arc<LanguageServer>,
3556 cx: &mut AsyncAppContext,
3557 ) -> Result<ProjectTransaction> {
3558 let fs = this.read_with(cx, |this, _| this.fs.clone());
3559 let mut operations = Vec::new();
3560 if let Some(document_changes) = edit.document_changes {
3561 match document_changes {
3562 lsp::DocumentChanges::Edits(edits) => {
3563 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3564 }
3565 lsp::DocumentChanges::Operations(ops) => operations = ops,
3566 }
3567 } else if let Some(changes) = edit.changes {
3568 operations.extend(changes.into_iter().map(|(uri, edits)| {
3569 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3570 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3571 uri,
3572 version: None,
3573 },
3574 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3575 })
3576 }));
3577 }
3578
3579 let mut project_transaction = ProjectTransaction::default();
3580 for operation in operations {
3581 match operation {
3582 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3583 let abs_path = op
3584 .uri
3585 .to_file_path()
3586 .map_err(|_| anyhow!("can't convert URI to path"))?;
3587
3588 if let Some(parent_path) = abs_path.parent() {
3589 fs.create_dir(parent_path).await?;
3590 }
3591 if abs_path.ends_with("/") {
3592 fs.create_dir(&abs_path).await?;
3593 } else {
3594 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3595 .await?;
3596 }
3597 }
3598 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3599 let source_abs_path = op
3600 .old_uri
3601 .to_file_path()
3602 .map_err(|_| anyhow!("can't convert URI to path"))?;
3603 let target_abs_path = op
3604 .new_uri
3605 .to_file_path()
3606 .map_err(|_| anyhow!("can't convert URI to path"))?;
3607 fs.rename(
3608 &source_abs_path,
3609 &target_abs_path,
3610 op.options.map(Into::into).unwrap_or_default(),
3611 )
3612 .await?;
3613 }
3614 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3615 let abs_path = op
3616 .uri
3617 .to_file_path()
3618 .map_err(|_| anyhow!("can't convert URI to path"))?;
3619 let options = op.options.map(Into::into).unwrap_or_default();
3620 if abs_path.ends_with("/") {
3621 fs.remove_dir(&abs_path, options).await?;
3622 } else {
3623 fs.remove_file(&abs_path, options).await?;
3624 }
3625 }
3626 lsp::DocumentChangeOperation::Edit(op) => {
3627 let buffer_to_edit = this
3628 .update(cx, |this, cx| {
3629 this.open_local_buffer_via_lsp(
3630 op.text_document.uri,
3631 lsp_adapter.clone(),
3632 language_server.clone(),
3633 cx,
3634 )
3635 })
3636 .await?;
3637
3638 let edits = this
3639 .update(cx, |this, cx| {
3640 let edits = op.edits.into_iter().map(|edit| match edit {
3641 lsp::OneOf::Left(edit) => edit,
3642 lsp::OneOf::Right(edit) => edit.text_edit,
3643 });
3644 this.edits_from_lsp(
3645 &buffer_to_edit,
3646 edits,
3647 op.text_document.version,
3648 cx,
3649 )
3650 })
3651 .await?;
3652
3653 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3654 buffer.finalize_last_transaction();
3655 buffer.start_transaction();
3656 for (range, text) in edits {
3657 buffer.edit([(range, text)], cx);
3658 }
3659 let transaction = if buffer.end_transaction(cx).is_some() {
3660 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3661 if !push_to_history {
3662 buffer.forget_transaction(transaction.id);
3663 }
3664 Some(transaction)
3665 } else {
3666 None
3667 };
3668
3669 transaction
3670 });
3671 if let Some(transaction) = transaction {
3672 project_transaction.0.insert(buffer_to_edit, transaction);
3673 }
3674 }
3675 }
3676 }
3677
3678 Ok(project_transaction)
3679 }
3680
3681 pub fn prepare_rename<T: ToPointUtf16>(
3682 &self,
3683 buffer: ModelHandle<Buffer>,
3684 position: T,
3685 cx: &mut ModelContext<Self>,
3686 ) -> Task<Result<Option<Range<Anchor>>>> {
3687 let position = position.to_point_utf16(buffer.read(cx));
3688 self.request_lsp(buffer, PrepareRename { position }, cx)
3689 }
3690
3691 pub fn perform_rename<T: ToPointUtf16>(
3692 &self,
3693 buffer: ModelHandle<Buffer>,
3694 position: T,
3695 new_name: String,
3696 push_to_history: bool,
3697 cx: &mut ModelContext<Self>,
3698 ) -> Task<Result<ProjectTransaction>> {
3699 let position = position.to_point_utf16(buffer.read(cx));
3700 self.request_lsp(
3701 buffer,
3702 PerformRename {
3703 position,
3704 new_name,
3705 push_to_history,
3706 },
3707 cx,
3708 )
3709 }
3710
3711 pub fn search(
3712 &self,
3713 query: SearchQuery,
3714 cx: &mut ModelContext<Self>,
3715 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3716 if self.is_local() {
3717 let snapshots = self
3718 .visible_worktrees(cx)
3719 .filter_map(|tree| {
3720 let tree = tree.read(cx).as_local()?;
3721 Some(tree.snapshot())
3722 })
3723 .collect::<Vec<_>>();
3724
3725 let background = cx.background().clone();
3726 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3727 if path_count == 0 {
3728 return Task::ready(Ok(Default::default()));
3729 }
3730 let workers = background.num_cpus().min(path_count);
3731 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3732 cx.background()
3733 .spawn({
3734 let fs = self.fs.clone();
3735 let background = cx.background().clone();
3736 let query = query.clone();
3737 async move {
3738 let fs = &fs;
3739 let query = &query;
3740 let matching_paths_tx = &matching_paths_tx;
3741 let paths_per_worker = (path_count + workers - 1) / workers;
3742 let snapshots = &snapshots;
3743 background
3744 .scoped(|scope| {
3745 for worker_ix in 0..workers {
3746 let worker_start_ix = worker_ix * paths_per_worker;
3747 let worker_end_ix = worker_start_ix + paths_per_worker;
3748 scope.spawn(async move {
3749 let mut snapshot_start_ix = 0;
3750 let mut abs_path = PathBuf::new();
3751 for snapshot in snapshots {
3752 let snapshot_end_ix =
3753 snapshot_start_ix + snapshot.visible_file_count();
3754 if worker_end_ix <= snapshot_start_ix {
3755 break;
3756 } else if worker_start_ix > snapshot_end_ix {
3757 snapshot_start_ix = snapshot_end_ix;
3758 continue;
3759 } else {
3760 let start_in_snapshot = worker_start_ix
3761 .saturating_sub(snapshot_start_ix);
3762 let end_in_snapshot =
3763 cmp::min(worker_end_ix, snapshot_end_ix)
3764 - snapshot_start_ix;
3765
3766 for entry in snapshot
3767 .files(false, start_in_snapshot)
3768 .take(end_in_snapshot - start_in_snapshot)
3769 {
3770 if matching_paths_tx.is_closed() {
3771 break;
3772 }
3773
3774 abs_path.clear();
3775 abs_path.push(&snapshot.abs_path());
3776 abs_path.push(&entry.path);
3777 let matches = if let Some(file) =
3778 fs.open_sync(&abs_path).await.log_err()
3779 {
3780 query.detect(file).unwrap_or(false)
3781 } else {
3782 false
3783 };
3784
3785 if matches {
3786 let project_path =
3787 (snapshot.id(), entry.path.clone());
3788 if matching_paths_tx
3789 .send(project_path)
3790 .await
3791 .is_err()
3792 {
3793 break;
3794 }
3795 }
3796 }
3797
3798 snapshot_start_ix = snapshot_end_ix;
3799 }
3800 }
3801 });
3802 }
3803 })
3804 .await;
3805 }
3806 })
3807 .detach();
3808
3809 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3810 let open_buffers = self
3811 .opened_buffers
3812 .values()
3813 .filter_map(|b| b.upgrade(cx))
3814 .collect::<HashSet<_>>();
3815 cx.spawn(|this, cx| async move {
3816 for buffer in &open_buffers {
3817 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3818 buffers_tx.send((buffer.clone(), snapshot)).await?;
3819 }
3820
3821 let open_buffers = Rc::new(RefCell::new(open_buffers));
3822 while let Some(project_path) = matching_paths_rx.next().await {
3823 if buffers_tx.is_closed() {
3824 break;
3825 }
3826
3827 let this = this.clone();
3828 let open_buffers = open_buffers.clone();
3829 let buffers_tx = buffers_tx.clone();
3830 cx.spawn(|mut cx| async move {
3831 if let Some(buffer) = this
3832 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3833 .await
3834 .log_err()
3835 {
3836 if open_buffers.borrow_mut().insert(buffer.clone()) {
3837 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3838 buffers_tx.send((buffer, snapshot)).await?;
3839 }
3840 }
3841
3842 Ok::<_, anyhow::Error>(())
3843 })
3844 .detach();
3845 }
3846
3847 Ok::<_, anyhow::Error>(())
3848 })
3849 .detach_and_log_err(cx);
3850
3851 let background = cx.background().clone();
3852 cx.background().spawn(async move {
3853 let query = &query;
3854 let mut matched_buffers = Vec::new();
3855 for _ in 0..workers {
3856 matched_buffers.push(HashMap::default());
3857 }
3858 background
3859 .scoped(|scope| {
3860 for worker_matched_buffers in matched_buffers.iter_mut() {
3861 let mut buffers_rx = buffers_rx.clone();
3862 scope.spawn(async move {
3863 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3864 let buffer_matches = query
3865 .search(snapshot.as_rope())
3866 .await
3867 .iter()
3868 .map(|range| {
3869 snapshot.anchor_before(range.start)
3870 ..snapshot.anchor_after(range.end)
3871 })
3872 .collect::<Vec<_>>();
3873 if !buffer_matches.is_empty() {
3874 worker_matched_buffers
3875 .insert(buffer.clone(), buffer_matches);
3876 }
3877 }
3878 });
3879 }
3880 })
3881 .await;
3882 Ok(matched_buffers.into_iter().flatten().collect())
3883 })
3884 } else if let Some(project_id) = self.remote_id() {
3885 let request = self.client.request(query.to_proto(project_id));
3886 cx.spawn(|this, mut cx| async move {
3887 let response = request.await?;
3888 let mut result = HashMap::default();
3889 for location in response.locations {
3890 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3891 let target_buffer = this
3892 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3893 .await?;
3894 let start = location
3895 .start
3896 .and_then(deserialize_anchor)
3897 .ok_or_else(|| anyhow!("missing target start"))?;
3898 let end = location
3899 .end
3900 .and_then(deserialize_anchor)
3901 .ok_or_else(|| anyhow!("missing target end"))?;
3902 result
3903 .entry(target_buffer)
3904 .or_insert(Vec::new())
3905 .push(start..end)
3906 }
3907 Ok(result)
3908 })
3909 } else {
3910 Task::ready(Ok(Default::default()))
3911 }
3912 }
3913
3914 fn request_lsp<R: LspCommand>(
3915 &self,
3916 buffer_handle: ModelHandle<Buffer>,
3917 request: R,
3918 cx: &mut ModelContext<Self>,
3919 ) -> Task<Result<R::Response>>
3920 where
3921 <R::LspRequest as lsp::request::Request>::Result: Send,
3922 {
3923 let buffer = buffer_handle.read(cx);
3924 if self.is_local() {
3925 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3926 if let Some((file, (_, language_server))) =
3927 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3928 {
3929 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3930 return cx.spawn(|this, cx| async move {
3931 if !request.check_capabilities(&language_server.capabilities()) {
3932 return Ok(Default::default());
3933 }
3934
3935 let response = language_server
3936 .request::<R::LspRequest>(lsp_params)
3937 .await
3938 .context("lsp request failed")?;
3939 request
3940 .response_from_lsp(response, this, buffer_handle, cx)
3941 .await
3942 });
3943 }
3944 } else if let Some(project_id) = self.remote_id() {
3945 let rpc = self.client.clone();
3946 let message = request.to_proto(project_id, buffer);
3947 return cx.spawn(|this, cx| async move {
3948 let response = rpc.request(message).await?;
3949 request
3950 .response_from_proto(response, this, buffer_handle, cx)
3951 .await
3952 });
3953 }
3954 Task::ready(Ok(Default::default()))
3955 }
3956
3957 pub fn find_or_create_local_worktree(
3958 &mut self,
3959 abs_path: impl AsRef<Path>,
3960 visible: bool,
3961 cx: &mut ModelContext<Self>,
3962 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3963 let abs_path = abs_path.as_ref();
3964 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3965 Task::ready(Ok((tree.clone(), relative_path.into())))
3966 } else {
3967 let worktree = self.create_local_worktree(abs_path, visible, cx);
3968 cx.foreground()
3969 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3970 }
3971 }
3972
3973 pub fn find_local_worktree(
3974 &self,
3975 abs_path: &Path,
3976 cx: &AppContext,
3977 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3978 for tree in self.worktrees(cx) {
3979 if let Some(relative_path) = tree
3980 .read(cx)
3981 .as_local()
3982 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3983 {
3984 return Some((tree.clone(), relative_path.into()));
3985 }
3986 }
3987 None
3988 }
3989
3990 pub fn is_shared(&self) -> bool {
3991 match &self.client_state {
3992 ProjectClientState::Local { is_shared, .. } => *is_shared,
3993 ProjectClientState::Remote { .. } => false,
3994 }
3995 }
3996
3997 fn create_local_worktree(
3998 &mut self,
3999 abs_path: impl AsRef<Path>,
4000 visible: bool,
4001 cx: &mut ModelContext<Self>,
4002 ) -> Task<Result<ModelHandle<Worktree>>> {
4003 let fs = self.fs.clone();
4004 let client = self.client.clone();
4005 let next_entry_id = self.next_entry_id.clone();
4006 let path: Arc<Path> = abs_path.as_ref().into();
4007 let task = self
4008 .loading_local_worktrees
4009 .entry(path.clone())
4010 .or_insert_with(|| {
4011 cx.spawn(|project, mut cx| {
4012 async move {
4013 let worktree = Worktree::local(
4014 client.clone(),
4015 path.clone(),
4016 visible,
4017 fs,
4018 next_entry_id,
4019 &mut cx,
4020 )
4021 .await;
4022 project.update(&mut cx, |project, _| {
4023 project.loading_local_worktrees.remove(&path);
4024 });
4025 let worktree = worktree?;
4026
4027 let project_id = project.update(&mut cx, |project, cx| {
4028 project.add_worktree(&worktree, cx);
4029 project.shared_remote_id()
4030 });
4031
4032 if let Some(project_id) = project_id {
4033 worktree
4034 .update(&mut cx, |worktree, cx| {
4035 worktree.as_local_mut().unwrap().share(project_id, cx)
4036 })
4037 .await
4038 .log_err();
4039 }
4040
4041 Ok(worktree)
4042 }
4043 .map_err(|err| Arc::new(err))
4044 })
4045 .shared()
4046 })
4047 .clone();
4048 cx.foreground().spawn(async move {
4049 match task.await {
4050 Ok(worktree) => Ok(worktree),
4051 Err(err) => Err(anyhow!("{}", err)),
4052 }
4053 })
4054 }
4055
4056 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4057 self.worktrees.retain(|worktree| {
4058 if let Some(worktree) = worktree.upgrade(cx) {
4059 let id = worktree.read(cx).id();
4060 if id == id_to_remove {
4061 cx.emit(Event::WorktreeRemoved(id));
4062 false
4063 } else {
4064 true
4065 }
4066 } else {
4067 false
4068 }
4069 });
4070 self.metadata_changed(true, cx);
4071 cx.notify();
4072 }
4073
4074 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4075 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4076 if worktree.read(cx).is_local() {
4077 cx.subscribe(&worktree, |this, worktree, _, cx| {
4078 this.update_local_worktree_buffers(worktree, cx);
4079 })
4080 .detach();
4081 }
4082
4083 let push_strong_handle = {
4084 let worktree = worktree.read(cx);
4085 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4086 };
4087 if push_strong_handle {
4088 self.worktrees
4089 .push(WorktreeHandle::Strong(worktree.clone()));
4090 } else {
4091 cx.observe_release(&worktree, |this, _, cx| {
4092 this.worktrees
4093 .retain(|worktree| worktree.upgrade(cx).is_some());
4094 cx.notify();
4095 })
4096 .detach();
4097 self.worktrees
4098 .push(WorktreeHandle::Weak(worktree.downgrade()));
4099 }
4100 self.metadata_changed(true, cx);
4101 cx.emit(Event::WorktreeAdded);
4102 cx.notify();
4103 }
4104
4105 fn update_local_worktree_buffers(
4106 &mut self,
4107 worktree_handle: ModelHandle<Worktree>,
4108 cx: &mut ModelContext<Self>,
4109 ) {
4110 let snapshot = worktree_handle.read(cx).snapshot();
4111 let mut buffers_to_delete = Vec::new();
4112 let mut renamed_buffers = Vec::new();
4113 for (buffer_id, buffer) in &self.opened_buffers {
4114 if let Some(buffer) = buffer.upgrade(cx) {
4115 buffer.update(cx, |buffer, cx| {
4116 if let Some(old_file) = File::from_dyn(buffer.file()) {
4117 if old_file.worktree != worktree_handle {
4118 return;
4119 }
4120
4121 let new_file = if let Some(entry) = old_file
4122 .entry_id
4123 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4124 {
4125 File {
4126 is_local: true,
4127 entry_id: Some(entry.id),
4128 mtime: entry.mtime,
4129 path: entry.path.clone(),
4130 worktree: worktree_handle.clone(),
4131 }
4132 } else if let Some(entry) =
4133 snapshot.entry_for_path(old_file.path().as_ref())
4134 {
4135 File {
4136 is_local: true,
4137 entry_id: Some(entry.id),
4138 mtime: entry.mtime,
4139 path: entry.path.clone(),
4140 worktree: worktree_handle.clone(),
4141 }
4142 } else {
4143 File {
4144 is_local: true,
4145 entry_id: None,
4146 path: old_file.path().clone(),
4147 mtime: old_file.mtime(),
4148 worktree: worktree_handle.clone(),
4149 }
4150 };
4151
4152 let old_path = old_file.abs_path(cx);
4153 if new_file.abs_path(cx) != old_path {
4154 renamed_buffers.push((cx.handle(), old_path));
4155 }
4156
4157 if let Some(project_id) = self.shared_remote_id() {
4158 self.client
4159 .send(proto::UpdateBufferFile {
4160 project_id,
4161 buffer_id: *buffer_id as u64,
4162 file: Some(new_file.to_proto()),
4163 })
4164 .log_err();
4165 }
4166 buffer.file_updated(Arc::new(new_file), cx).detach();
4167 }
4168 });
4169 } else {
4170 buffers_to_delete.push(*buffer_id);
4171 }
4172 }
4173
4174 for buffer_id in buffers_to_delete {
4175 self.opened_buffers.remove(&buffer_id);
4176 }
4177
4178 for (buffer, old_path) in renamed_buffers {
4179 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4180 self.assign_language_to_buffer(&buffer, cx);
4181 self.register_buffer_with_language_server(&buffer, cx);
4182 }
4183 }
4184
4185 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4186 let new_active_entry = entry.and_then(|project_path| {
4187 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4188 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4189 Some(entry.id)
4190 });
4191 if new_active_entry != self.active_entry {
4192 self.active_entry = new_active_entry;
4193 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4194 }
4195 }
4196
4197 pub fn language_servers_running_disk_based_diagnostics<'a>(
4198 &'a self,
4199 ) -> impl 'a + Iterator<Item = usize> {
4200 self.language_server_statuses
4201 .iter()
4202 .filter_map(|(id, status)| {
4203 if status.pending_diagnostic_updates > 0 {
4204 Some(*id)
4205 } else {
4206 None
4207 }
4208 })
4209 }
4210
4211 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4212 let mut summary = DiagnosticSummary::default();
4213 for (_, path_summary) in self.diagnostic_summaries(cx) {
4214 summary.error_count += path_summary.error_count;
4215 summary.warning_count += path_summary.warning_count;
4216 }
4217 summary
4218 }
4219
4220 pub fn diagnostic_summaries<'a>(
4221 &'a self,
4222 cx: &'a AppContext,
4223 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4224 self.worktrees(cx).flat_map(move |worktree| {
4225 let worktree = worktree.read(cx);
4226 let worktree_id = worktree.id();
4227 worktree
4228 .diagnostic_summaries()
4229 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4230 })
4231 }
4232
4233 pub fn disk_based_diagnostics_started(
4234 &mut self,
4235 language_server_id: usize,
4236 cx: &mut ModelContext<Self>,
4237 ) {
4238 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4239 }
4240
4241 pub fn disk_based_diagnostics_finished(
4242 &mut self,
4243 language_server_id: usize,
4244 cx: &mut ModelContext<Self>,
4245 ) {
4246 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4247 }
4248
4249 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4250 self.active_entry
4251 }
4252
4253 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4254 self.worktree_for_id(path.worktree_id, cx)?
4255 .read(cx)
4256 .entry_for_path(&path.path)
4257 .map(|entry| entry.id)
4258 }
4259
4260 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4261 let worktree = self.worktree_for_entry(entry_id, cx)?;
4262 let worktree = worktree.read(cx);
4263 let worktree_id = worktree.id();
4264 let path = worktree.entry_for_id(entry_id)?.path.clone();
4265 Some(ProjectPath { worktree_id, path })
4266 }
4267
4268 // RPC message handlers
4269
4270 async fn handle_request_join_project(
4271 this: ModelHandle<Self>,
4272 message: TypedEnvelope<proto::RequestJoinProject>,
4273 _: Arc<Client>,
4274 mut cx: AsyncAppContext,
4275 ) -> Result<()> {
4276 let user_id = message.payload.requester_id;
4277 if this.read_with(&cx, |project, _| {
4278 project.collaborators.values().any(|c| c.user.id == user_id)
4279 }) {
4280 this.update(&mut cx, |this, cx| {
4281 this.respond_to_join_request(user_id, true, cx)
4282 });
4283 } else {
4284 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4285 let user = user_store
4286 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4287 .await?;
4288 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4289 }
4290 Ok(())
4291 }
4292
4293 async fn handle_unregister_project(
4294 this: ModelHandle<Self>,
4295 _: TypedEnvelope<proto::UnregisterProject>,
4296 _: Arc<Client>,
4297 mut cx: AsyncAppContext,
4298 ) -> Result<()> {
4299 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4300 Ok(())
4301 }
4302
4303 async fn handle_project_unshared(
4304 this: ModelHandle<Self>,
4305 _: TypedEnvelope<proto::ProjectUnshared>,
4306 _: Arc<Client>,
4307 mut cx: AsyncAppContext,
4308 ) -> Result<()> {
4309 this.update(&mut cx, |this, cx| this.unshared(cx));
4310 Ok(())
4311 }
4312
4313 async fn handle_add_collaborator(
4314 this: ModelHandle<Self>,
4315 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4316 _: Arc<Client>,
4317 mut cx: AsyncAppContext,
4318 ) -> Result<()> {
4319 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4320 let collaborator = envelope
4321 .payload
4322 .collaborator
4323 .take()
4324 .ok_or_else(|| anyhow!("empty collaborator"))?;
4325
4326 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4327 this.update(&mut cx, |this, cx| {
4328 this.collaborators
4329 .insert(collaborator.peer_id, collaborator);
4330 cx.notify();
4331 });
4332
4333 Ok(())
4334 }
4335
4336 async fn handle_remove_collaborator(
4337 this: ModelHandle<Self>,
4338 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4339 _: Arc<Client>,
4340 mut cx: AsyncAppContext,
4341 ) -> Result<()> {
4342 this.update(&mut cx, |this, cx| {
4343 let peer_id = PeerId(envelope.payload.peer_id);
4344 let replica_id = this
4345 .collaborators
4346 .remove(&peer_id)
4347 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4348 .replica_id;
4349 for (_, buffer) in &this.opened_buffers {
4350 if let Some(buffer) = buffer.upgrade(cx) {
4351 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4352 }
4353 }
4354
4355 cx.emit(Event::CollaboratorLeft(peer_id));
4356 cx.notify();
4357 Ok(())
4358 })
4359 }
4360
4361 async fn handle_join_project_request_cancelled(
4362 this: ModelHandle<Self>,
4363 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4364 _: Arc<Client>,
4365 mut cx: AsyncAppContext,
4366 ) -> Result<()> {
4367 let user = this
4368 .update(&mut cx, |this, cx| {
4369 this.user_store.update(cx, |user_store, cx| {
4370 user_store.fetch_user(envelope.payload.requester_id, cx)
4371 })
4372 })
4373 .await?;
4374
4375 this.update(&mut cx, |_, cx| {
4376 cx.emit(Event::ContactCancelledJoinRequest(user));
4377 });
4378
4379 Ok(())
4380 }
4381
4382 async fn handle_update_project(
4383 this: ModelHandle<Self>,
4384 envelope: TypedEnvelope<proto::UpdateProject>,
4385 client: Arc<Client>,
4386 mut cx: AsyncAppContext,
4387 ) -> Result<()> {
4388 this.update(&mut cx, |this, cx| {
4389 let replica_id = this.replica_id();
4390 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4391
4392 let mut old_worktrees_by_id = this
4393 .worktrees
4394 .drain(..)
4395 .filter_map(|worktree| {
4396 let worktree = worktree.upgrade(cx)?;
4397 Some((worktree.read(cx).id(), worktree))
4398 })
4399 .collect::<HashMap<_, _>>();
4400
4401 for worktree in envelope.payload.worktrees {
4402 if let Some(old_worktree) =
4403 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4404 {
4405 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4406 } else {
4407 let worktree = proto::Worktree {
4408 id: worktree.id,
4409 root_name: worktree.root_name,
4410 entries: Default::default(),
4411 diagnostic_summaries: Default::default(),
4412 visible: worktree.visible,
4413 scan_id: 0,
4414 };
4415 let (worktree, load_task) =
4416 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4417 this.add_worktree(&worktree, cx);
4418 load_task.detach();
4419 }
4420 }
4421
4422 this.metadata_changed(true, cx);
4423 for (id, _) in old_worktrees_by_id {
4424 cx.emit(Event::WorktreeRemoved(id));
4425 }
4426
4427 Ok(())
4428 })
4429 }
4430
4431 async fn handle_update_worktree(
4432 this: ModelHandle<Self>,
4433 envelope: TypedEnvelope<proto::UpdateWorktree>,
4434 _: Arc<Client>,
4435 mut cx: AsyncAppContext,
4436 ) -> Result<()> {
4437 this.update(&mut cx, |this, cx| {
4438 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4439 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4440 worktree.update(cx, |worktree, _| {
4441 let worktree = worktree.as_remote_mut().unwrap();
4442 worktree.update_from_remote(envelope)
4443 })?;
4444 }
4445 Ok(())
4446 })
4447 }
4448
4449 async fn handle_create_project_entry(
4450 this: ModelHandle<Self>,
4451 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4452 _: Arc<Client>,
4453 mut cx: AsyncAppContext,
4454 ) -> Result<proto::ProjectEntryResponse> {
4455 let worktree = this.update(&mut cx, |this, cx| {
4456 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4457 this.worktree_for_id(worktree_id, cx)
4458 .ok_or_else(|| anyhow!("worktree not found"))
4459 })?;
4460 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4461 let entry = worktree
4462 .update(&mut cx, |worktree, cx| {
4463 let worktree = worktree.as_local_mut().unwrap();
4464 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4465 worktree.create_entry(path, envelope.payload.is_directory, cx)
4466 })
4467 .await?;
4468 Ok(proto::ProjectEntryResponse {
4469 entry: Some((&entry).into()),
4470 worktree_scan_id: worktree_scan_id as u64,
4471 })
4472 }
4473
4474 async fn handle_rename_project_entry(
4475 this: ModelHandle<Self>,
4476 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4477 _: Arc<Client>,
4478 mut cx: AsyncAppContext,
4479 ) -> Result<proto::ProjectEntryResponse> {
4480 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4481 let worktree = this.read_with(&cx, |this, cx| {
4482 this.worktree_for_entry(entry_id, cx)
4483 .ok_or_else(|| anyhow!("worktree not found"))
4484 })?;
4485 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4486 let entry = worktree
4487 .update(&mut cx, |worktree, cx| {
4488 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4489 worktree
4490 .as_local_mut()
4491 .unwrap()
4492 .rename_entry(entry_id, new_path, cx)
4493 .ok_or_else(|| anyhow!("invalid entry"))
4494 })?
4495 .await?;
4496 Ok(proto::ProjectEntryResponse {
4497 entry: Some((&entry).into()),
4498 worktree_scan_id: worktree_scan_id as u64,
4499 })
4500 }
4501
4502 async fn handle_copy_project_entry(
4503 this: ModelHandle<Self>,
4504 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4505 _: Arc<Client>,
4506 mut cx: AsyncAppContext,
4507 ) -> Result<proto::ProjectEntryResponse> {
4508 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4509 let worktree = this.read_with(&cx, |this, cx| {
4510 this.worktree_for_entry(entry_id, cx)
4511 .ok_or_else(|| anyhow!("worktree not found"))
4512 })?;
4513 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4514 let entry = worktree
4515 .update(&mut cx, |worktree, cx| {
4516 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4517 worktree
4518 .as_local_mut()
4519 .unwrap()
4520 .copy_entry(entry_id, new_path, cx)
4521 .ok_or_else(|| anyhow!("invalid entry"))
4522 })?
4523 .await?;
4524 Ok(proto::ProjectEntryResponse {
4525 entry: Some((&entry).into()),
4526 worktree_scan_id: worktree_scan_id as u64,
4527 })
4528 }
4529
4530 async fn handle_delete_project_entry(
4531 this: ModelHandle<Self>,
4532 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4533 _: Arc<Client>,
4534 mut cx: AsyncAppContext,
4535 ) -> Result<proto::ProjectEntryResponse> {
4536 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4537 let worktree = this.read_with(&cx, |this, cx| {
4538 this.worktree_for_entry(entry_id, cx)
4539 .ok_or_else(|| anyhow!("worktree not found"))
4540 })?;
4541 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4542 worktree
4543 .update(&mut cx, |worktree, cx| {
4544 worktree
4545 .as_local_mut()
4546 .unwrap()
4547 .delete_entry(entry_id, cx)
4548 .ok_or_else(|| anyhow!("invalid entry"))
4549 })?
4550 .await?;
4551 Ok(proto::ProjectEntryResponse {
4552 entry: None,
4553 worktree_scan_id: worktree_scan_id as u64,
4554 })
4555 }
4556
4557 async fn handle_update_diagnostic_summary(
4558 this: ModelHandle<Self>,
4559 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4560 _: Arc<Client>,
4561 mut cx: AsyncAppContext,
4562 ) -> Result<()> {
4563 this.update(&mut cx, |this, cx| {
4564 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4565 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4566 if let Some(summary) = envelope.payload.summary {
4567 let project_path = ProjectPath {
4568 worktree_id,
4569 path: Path::new(&summary.path).into(),
4570 };
4571 worktree.update(cx, |worktree, _| {
4572 worktree
4573 .as_remote_mut()
4574 .unwrap()
4575 .update_diagnostic_summary(project_path.path.clone(), &summary);
4576 });
4577 cx.emit(Event::DiagnosticsUpdated {
4578 language_server_id: summary.language_server_id as usize,
4579 path: project_path,
4580 });
4581 }
4582 }
4583 Ok(())
4584 })
4585 }
4586
4587 async fn handle_start_language_server(
4588 this: ModelHandle<Self>,
4589 envelope: TypedEnvelope<proto::StartLanguageServer>,
4590 _: Arc<Client>,
4591 mut cx: AsyncAppContext,
4592 ) -> Result<()> {
4593 let server = envelope
4594 .payload
4595 .server
4596 .ok_or_else(|| anyhow!("invalid server"))?;
4597 this.update(&mut cx, |this, cx| {
4598 this.language_server_statuses.insert(
4599 server.id as usize,
4600 LanguageServerStatus {
4601 name: server.name,
4602 pending_work: Default::default(),
4603 pending_diagnostic_updates: 0,
4604 },
4605 );
4606 cx.notify();
4607 });
4608 Ok(())
4609 }
4610
4611 async fn handle_update_language_server(
4612 this: ModelHandle<Self>,
4613 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4614 _: Arc<Client>,
4615 mut cx: AsyncAppContext,
4616 ) -> Result<()> {
4617 let language_server_id = envelope.payload.language_server_id as usize;
4618 match envelope
4619 .payload
4620 .variant
4621 .ok_or_else(|| anyhow!("invalid variant"))?
4622 {
4623 proto::update_language_server::Variant::WorkStart(payload) => {
4624 this.update(&mut cx, |this, cx| {
4625 this.on_lsp_work_start(
4626 language_server_id,
4627 payload.token,
4628 LanguageServerProgress {
4629 message: payload.message,
4630 percentage: payload.percentage.map(|p| p as usize),
4631 last_update_at: Instant::now(),
4632 },
4633 cx,
4634 );
4635 })
4636 }
4637 proto::update_language_server::Variant::WorkProgress(payload) => {
4638 this.update(&mut cx, |this, cx| {
4639 this.on_lsp_work_progress(
4640 language_server_id,
4641 payload.token,
4642 LanguageServerProgress {
4643 message: payload.message,
4644 percentage: payload.percentage.map(|p| p as usize),
4645 last_update_at: Instant::now(),
4646 },
4647 cx,
4648 );
4649 })
4650 }
4651 proto::update_language_server::Variant::WorkEnd(payload) => {
4652 this.update(&mut cx, |this, cx| {
4653 this.on_lsp_work_end(language_server_id, payload.token, cx);
4654 })
4655 }
4656 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4657 this.update(&mut cx, |this, cx| {
4658 this.disk_based_diagnostics_started(language_server_id, cx);
4659 })
4660 }
4661 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4662 this.update(&mut cx, |this, cx| {
4663 this.disk_based_diagnostics_finished(language_server_id, cx)
4664 });
4665 }
4666 }
4667
4668 Ok(())
4669 }
4670
4671 async fn handle_update_buffer(
4672 this: ModelHandle<Self>,
4673 envelope: TypedEnvelope<proto::UpdateBuffer>,
4674 _: Arc<Client>,
4675 mut cx: AsyncAppContext,
4676 ) -> Result<()> {
4677 this.update(&mut cx, |this, cx| {
4678 let payload = envelope.payload.clone();
4679 let buffer_id = payload.buffer_id;
4680 let ops = payload
4681 .operations
4682 .into_iter()
4683 .map(|op| language::proto::deserialize_operation(op))
4684 .collect::<Result<Vec<_>, _>>()?;
4685 let is_remote = this.is_remote();
4686 match this.opened_buffers.entry(buffer_id) {
4687 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4688 OpenBuffer::Strong(buffer) => {
4689 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4690 }
4691 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4692 OpenBuffer::Weak(_) => {}
4693 },
4694 hash_map::Entry::Vacant(e) => {
4695 assert!(
4696 is_remote,
4697 "received buffer update from {:?}",
4698 envelope.original_sender_id
4699 );
4700 e.insert(OpenBuffer::Loading(ops));
4701 }
4702 }
4703 Ok(())
4704 })
4705 }
4706
4707 async fn handle_update_buffer_file(
4708 this: ModelHandle<Self>,
4709 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4710 _: Arc<Client>,
4711 mut cx: AsyncAppContext,
4712 ) -> Result<()> {
4713 this.update(&mut cx, |this, cx| {
4714 let payload = envelope.payload.clone();
4715 let buffer_id = payload.buffer_id;
4716 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4717 let worktree = this
4718 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4719 .ok_or_else(|| anyhow!("no such worktree"))?;
4720 let file = File::from_proto(file, worktree.clone(), cx)?;
4721 let buffer = this
4722 .opened_buffers
4723 .get_mut(&buffer_id)
4724 .and_then(|b| b.upgrade(cx))
4725 .ok_or_else(|| anyhow!("no such buffer"))?;
4726 buffer.update(cx, |buffer, cx| {
4727 buffer.file_updated(Arc::new(file), cx).detach();
4728 });
4729 Ok(())
4730 })
4731 }
4732
4733 async fn handle_save_buffer(
4734 this: ModelHandle<Self>,
4735 envelope: TypedEnvelope<proto::SaveBuffer>,
4736 _: Arc<Client>,
4737 mut cx: AsyncAppContext,
4738 ) -> Result<proto::BufferSaved> {
4739 let buffer_id = envelope.payload.buffer_id;
4740 let requested_version = deserialize_version(envelope.payload.version);
4741
4742 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4743 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4744 let buffer = this
4745 .opened_buffers
4746 .get(&buffer_id)
4747 .and_then(|buffer| buffer.upgrade(cx))
4748 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4749 Ok::<_, anyhow::Error>((project_id, buffer))
4750 })?;
4751 buffer
4752 .update(&mut cx, |buffer, _| {
4753 buffer.wait_for_version(requested_version)
4754 })
4755 .await;
4756
4757 let (saved_version, fingerprint, mtime) =
4758 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4759 Ok(proto::BufferSaved {
4760 project_id,
4761 buffer_id,
4762 version: serialize_version(&saved_version),
4763 mtime: Some(mtime.into()),
4764 fingerprint,
4765 })
4766 }
4767
4768 async fn handle_reload_buffers(
4769 this: ModelHandle<Self>,
4770 envelope: TypedEnvelope<proto::ReloadBuffers>,
4771 _: Arc<Client>,
4772 mut cx: AsyncAppContext,
4773 ) -> Result<proto::ReloadBuffersResponse> {
4774 let sender_id = envelope.original_sender_id()?;
4775 let reload = this.update(&mut cx, |this, cx| {
4776 let mut buffers = HashSet::default();
4777 for buffer_id in &envelope.payload.buffer_ids {
4778 buffers.insert(
4779 this.opened_buffers
4780 .get(buffer_id)
4781 .and_then(|buffer| buffer.upgrade(cx))
4782 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4783 );
4784 }
4785 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4786 })?;
4787
4788 let project_transaction = reload.await?;
4789 let project_transaction = this.update(&mut cx, |this, cx| {
4790 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4791 });
4792 Ok(proto::ReloadBuffersResponse {
4793 transaction: Some(project_transaction),
4794 })
4795 }
4796
4797 async fn handle_format_buffers(
4798 this: ModelHandle<Self>,
4799 envelope: TypedEnvelope<proto::FormatBuffers>,
4800 _: Arc<Client>,
4801 mut cx: AsyncAppContext,
4802 ) -> Result<proto::FormatBuffersResponse> {
4803 let sender_id = envelope.original_sender_id()?;
4804 let format = this.update(&mut cx, |this, cx| {
4805 let mut buffers = HashSet::default();
4806 for buffer_id in &envelope.payload.buffer_ids {
4807 buffers.insert(
4808 this.opened_buffers
4809 .get(buffer_id)
4810 .and_then(|buffer| buffer.upgrade(cx))
4811 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4812 );
4813 }
4814 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4815 })?;
4816
4817 let project_transaction = format.await?;
4818 let project_transaction = this.update(&mut cx, |this, cx| {
4819 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4820 });
4821 Ok(proto::FormatBuffersResponse {
4822 transaction: Some(project_transaction),
4823 })
4824 }
4825
4826 async fn handle_get_completions(
4827 this: ModelHandle<Self>,
4828 envelope: TypedEnvelope<proto::GetCompletions>,
4829 _: Arc<Client>,
4830 mut cx: AsyncAppContext,
4831 ) -> Result<proto::GetCompletionsResponse> {
4832 let position = envelope
4833 .payload
4834 .position
4835 .and_then(language::proto::deserialize_anchor)
4836 .ok_or_else(|| anyhow!("invalid position"))?;
4837 let version = deserialize_version(envelope.payload.version);
4838 let buffer = this.read_with(&cx, |this, cx| {
4839 this.opened_buffers
4840 .get(&envelope.payload.buffer_id)
4841 .and_then(|buffer| buffer.upgrade(cx))
4842 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4843 })?;
4844 buffer
4845 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4846 .await;
4847 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4848 let completions = this
4849 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4850 .await?;
4851
4852 Ok(proto::GetCompletionsResponse {
4853 completions: completions
4854 .iter()
4855 .map(language::proto::serialize_completion)
4856 .collect(),
4857 version: serialize_version(&version),
4858 })
4859 }
4860
4861 async fn handle_apply_additional_edits_for_completion(
4862 this: ModelHandle<Self>,
4863 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4864 _: Arc<Client>,
4865 mut cx: AsyncAppContext,
4866 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4867 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4868 let buffer = this
4869 .opened_buffers
4870 .get(&envelope.payload.buffer_id)
4871 .and_then(|buffer| buffer.upgrade(cx))
4872 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4873 let language = buffer.read(cx).language();
4874 let completion = language::proto::deserialize_completion(
4875 envelope
4876 .payload
4877 .completion
4878 .ok_or_else(|| anyhow!("invalid completion"))?,
4879 language,
4880 )?;
4881 Ok::<_, anyhow::Error>(
4882 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4883 )
4884 })?;
4885
4886 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4887 transaction: apply_additional_edits
4888 .await?
4889 .as_ref()
4890 .map(language::proto::serialize_transaction),
4891 })
4892 }
4893
4894 async fn handle_get_code_actions(
4895 this: ModelHandle<Self>,
4896 envelope: TypedEnvelope<proto::GetCodeActions>,
4897 _: Arc<Client>,
4898 mut cx: AsyncAppContext,
4899 ) -> Result<proto::GetCodeActionsResponse> {
4900 let start = envelope
4901 .payload
4902 .start
4903 .and_then(language::proto::deserialize_anchor)
4904 .ok_or_else(|| anyhow!("invalid start"))?;
4905 let end = envelope
4906 .payload
4907 .end
4908 .and_then(language::proto::deserialize_anchor)
4909 .ok_or_else(|| anyhow!("invalid end"))?;
4910 let buffer = this.update(&mut cx, |this, cx| {
4911 this.opened_buffers
4912 .get(&envelope.payload.buffer_id)
4913 .and_then(|buffer| buffer.upgrade(cx))
4914 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4915 })?;
4916 buffer
4917 .update(&mut cx, |buffer, _| {
4918 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4919 })
4920 .await;
4921
4922 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4923 let code_actions = this.update(&mut cx, |this, cx| {
4924 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4925 })?;
4926
4927 Ok(proto::GetCodeActionsResponse {
4928 actions: code_actions
4929 .await?
4930 .iter()
4931 .map(language::proto::serialize_code_action)
4932 .collect(),
4933 version: serialize_version(&version),
4934 })
4935 }
4936
4937 async fn handle_apply_code_action(
4938 this: ModelHandle<Self>,
4939 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4940 _: Arc<Client>,
4941 mut cx: AsyncAppContext,
4942 ) -> Result<proto::ApplyCodeActionResponse> {
4943 let sender_id = envelope.original_sender_id()?;
4944 let action = language::proto::deserialize_code_action(
4945 envelope
4946 .payload
4947 .action
4948 .ok_or_else(|| anyhow!("invalid action"))?,
4949 )?;
4950 let apply_code_action = this.update(&mut cx, |this, cx| {
4951 let buffer = this
4952 .opened_buffers
4953 .get(&envelope.payload.buffer_id)
4954 .and_then(|buffer| buffer.upgrade(cx))
4955 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4956 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4957 })?;
4958
4959 let project_transaction = apply_code_action.await?;
4960 let project_transaction = this.update(&mut cx, |this, cx| {
4961 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4962 });
4963 Ok(proto::ApplyCodeActionResponse {
4964 transaction: Some(project_transaction),
4965 })
4966 }
4967
4968 async fn handle_lsp_command<T: LspCommand>(
4969 this: ModelHandle<Self>,
4970 envelope: TypedEnvelope<T::ProtoRequest>,
4971 _: Arc<Client>,
4972 mut cx: AsyncAppContext,
4973 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4974 where
4975 <T::LspRequest as lsp::request::Request>::Result: Send,
4976 {
4977 let sender_id = envelope.original_sender_id()?;
4978 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4979 let buffer_handle = this.read_with(&cx, |this, _| {
4980 this.opened_buffers
4981 .get(&buffer_id)
4982 .and_then(|buffer| buffer.upgrade(&cx))
4983 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4984 })?;
4985 let request = T::from_proto(
4986 envelope.payload,
4987 this.clone(),
4988 buffer_handle.clone(),
4989 cx.clone(),
4990 )
4991 .await?;
4992 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4993 let response = this
4994 .update(&mut cx, |this, cx| {
4995 this.request_lsp(buffer_handle, request, cx)
4996 })
4997 .await?;
4998 this.update(&mut cx, |this, cx| {
4999 Ok(T::response_to_proto(
5000 response,
5001 this,
5002 sender_id,
5003 &buffer_version,
5004 cx,
5005 ))
5006 })
5007 }
5008
5009 async fn handle_get_project_symbols(
5010 this: ModelHandle<Self>,
5011 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5012 _: Arc<Client>,
5013 mut cx: AsyncAppContext,
5014 ) -> Result<proto::GetProjectSymbolsResponse> {
5015 let symbols = this
5016 .update(&mut cx, |this, cx| {
5017 this.symbols(&envelope.payload.query, cx)
5018 })
5019 .await?;
5020
5021 Ok(proto::GetProjectSymbolsResponse {
5022 symbols: symbols.iter().map(serialize_symbol).collect(),
5023 })
5024 }
5025
5026 async fn handle_search_project(
5027 this: ModelHandle<Self>,
5028 envelope: TypedEnvelope<proto::SearchProject>,
5029 _: Arc<Client>,
5030 mut cx: AsyncAppContext,
5031 ) -> Result<proto::SearchProjectResponse> {
5032 let peer_id = envelope.original_sender_id()?;
5033 let query = SearchQuery::from_proto(envelope.payload)?;
5034 let result = this
5035 .update(&mut cx, |this, cx| this.search(query, cx))
5036 .await?;
5037
5038 this.update(&mut cx, |this, cx| {
5039 let mut locations = Vec::new();
5040 for (buffer, ranges) in result {
5041 for range in ranges {
5042 let start = serialize_anchor(&range.start);
5043 let end = serialize_anchor(&range.end);
5044 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5045 locations.push(proto::Location {
5046 buffer: Some(buffer),
5047 start: Some(start),
5048 end: Some(end),
5049 });
5050 }
5051 }
5052 Ok(proto::SearchProjectResponse { locations })
5053 })
5054 }
5055
5056 async fn handle_open_buffer_for_symbol(
5057 this: ModelHandle<Self>,
5058 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5059 _: Arc<Client>,
5060 mut cx: AsyncAppContext,
5061 ) -> Result<proto::OpenBufferForSymbolResponse> {
5062 let peer_id = envelope.original_sender_id()?;
5063 let symbol = envelope
5064 .payload
5065 .symbol
5066 .ok_or_else(|| anyhow!("invalid symbol"))?;
5067 let symbol = this.read_with(&cx, |this, _| {
5068 let symbol = this.deserialize_symbol(symbol)?;
5069 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5070 if signature == symbol.signature {
5071 Ok(symbol)
5072 } else {
5073 Err(anyhow!("invalid symbol signature"))
5074 }
5075 })?;
5076 let buffer = this
5077 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5078 .await?;
5079
5080 Ok(proto::OpenBufferForSymbolResponse {
5081 buffer: Some(this.update(&mut cx, |this, cx| {
5082 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5083 })),
5084 })
5085 }
5086
5087 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5088 let mut hasher = Sha256::new();
5089 hasher.update(worktree_id.to_proto().to_be_bytes());
5090 hasher.update(path.to_string_lossy().as_bytes());
5091 hasher.update(self.nonce.to_be_bytes());
5092 hasher.finalize().as_slice().try_into().unwrap()
5093 }
5094
5095 async fn handle_open_buffer_by_id(
5096 this: ModelHandle<Self>,
5097 envelope: TypedEnvelope<proto::OpenBufferById>,
5098 _: Arc<Client>,
5099 mut cx: AsyncAppContext,
5100 ) -> Result<proto::OpenBufferResponse> {
5101 let peer_id = envelope.original_sender_id()?;
5102 let buffer = this
5103 .update(&mut cx, |this, cx| {
5104 this.open_buffer_by_id(envelope.payload.id, cx)
5105 })
5106 .await?;
5107 this.update(&mut cx, |this, cx| {
5108 Ok(proto::OpenBufferResponse {
5109 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5110 })
5111 })
5112 }
5113
5114 async fn handle_open_buffer_by_path(
5115 this: ModelHandle<Self>,
5116 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5117 _: Arc<Client>,
5118 mut cx: AsyncAppContext,
5119 ) -> Result<proto::OpenBufferResponse> {
5120 let peer_id = envelope.original_sender_id()?;
5121 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5122 let open_buffer = this.update(&mut cx, |this, cx| {
5123 this.open_buffer(
5124 ProjectPath {
5125 worktree_id,
5126 path: PathBuf::from(envelope.payload.path).into(),
5127 },
5128 cx,
5129 )
5130 });
5131
5132 let buffer = open_buffer.await?;
5133 this.update(&mut cx, |this, cx| {
5134 Ok(proto::OpenBufferResponse {
5135 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5136 })
5137 })
5138 }
5139
5140 fn serialize_project_transaction_for_peer(
5141 &mut self,
5142 project_transaction: ProjectTransaction,
5143 peer_id: PeerId,
5144 cx: &AppContext,
5145 ) -> proto::ProjectTransaction {
5146 let mut serialized_transaction = proto::ProjectTransaction {
5147 buffers: Default::default(),
5148 transactions: Default::default(),
5149 };
5150 for (buffer, transaction) in project_transaction.0 {
5151 serialized_transaction
5152 .buffers
5153 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5154 serialized_transaction
5155 .transactions
5156 .push(language::proto::serialize_transaction(&transaction));
5157 }
5158 serialized_transaction
5159 }
5160
5161 fn deserialize_project_transaction(
5162 &mut self,
5163 message: proto::ProjectTransaction,
5164 push_to_history: bool,
5165 cx: &mut ModelContext<Self>,
5166 ) -> Task<Result<ProjectTransaction>> {
5167 cx.spawn(|this, mut cx| async move {
5168 let mut project_transaction = ProjectTransaction::default();
5169 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5170 let buffer = this
5171 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5172 .await?;
5173 let transaction = language::proto::deserialize_transaction(transaction)?;
5174 project_transaction.0.insert(buffer, transaction);
5175 }
5176
5177 for (buffer, transaction) in &project_transaction.0 {
5178 buffer
5179 .update(&mut cx, |buffer, _| {
5180 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5181 })
5182 .await;
5183
5184 if push_to_history {
5185 buffer.update(&mut cx, |buffer, _| {
5186 buffer.push_transaction(transaction.clone(), Instant::now());
5187 });
5188 }
5189 }
5190
5191 Ok(project_transaction)
5192 })
5193 }
5194
5195 fn serialize_buffer_for_peer(
5196 &mut self,
5197 buffer: &ModelHandle<Buffer>,
5198 peer_id: PeerId,
5199 cx: &AppContext,
5200 ) -> proto::Buffer {
5201 let buffer_id = buffer.read(cx).remote_id();
5202 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5203 if shared_buffers.insert(buffer_id) {
5204 proto::Buffer {
5205 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5206 }
5207 } else {
5208 proto::Buffer {
5209 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5210 }
5211 }
5212 }
5213
5214 fn deserialize_buffer(
5215 &mut self,
5216 buffer: proto::Buffer,
5217 cx: &mut ModelContext<Self>,
5218 ) -> Task<Result<ModelHandle<Buffer>>> {
5219 let replica_id = self.replica_id();
5220
5221 let opened_buffer_tx = self.opened_buffer.0.clone();
5222 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5223 cx.spawn(|this, mut cx| async move {
5224 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5225 proto::buffer::Variant::Id(id) => {
5226 let buffer = loop {
5227 let buffer = this.read_with(&cx, |this, cx| {
5228 this.opened_buffers
5229 .get(&id)
5230 .and_then(|buffer| buffer.upgrade(cx))
5231 });
5232 if let Some(buffer) = buffer {
5233 break buffer;
5234 }
5235 opened_buffer_rx
5236 .next()
5237 .await
5238 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5239 };
5240 Ok(buffer)
5241 }
5242 proto::buffer::Variant::State(mut buffer) => {
5243 let mut buffer_worktree = None;
5244 let mut buffer_file = None;
5245 if let Some(file) = buffer.file.take() {
5246 this.read_with(&cx, |this, cx| {
5247 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5248 let worktree =
5249 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5250 anyhow!("no worktree found for id {}", file.worktree_id)
5251 })?;
5252 buffer_file =
5253 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5254 as Arc<dyn language::File>);
5255 buffer_worktree = Some(worktree);
5256 Ok::<_, anyhow::Error>(())
5257 })?;
5258 }
5259
5260 let buffer = cx.add_model(|cx| {
5261 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5262 });
5263
5264 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5265
5266 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5267 Ok(buffer)
5268 }
5269 }
5270 })
5271 }
5272
5273 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5274 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5275 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5276 let start = serialized_symbol
5277 .start
5278 .ok_or_else(|| anyhow!("invalid start"))?;
5279 let end = serialized_symbol
5280 .end
5281 .ok_or_else(|| anyhow!("invalid end"))?;
5282 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5283 let path = PathBuf::from(serialized_symbol.path);
5284 let language = self.languages.select_language(&path);
5285 Ok(Symbol {
5286 source_worktree_id,
5287 worktree_id,
5288 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5289 label: language
5290 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5291 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5292 name: serialized_symbol.name,
5293 path,
5294 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5295 kind,
5296 signature: serialized_symbol
5297 .signature
5298 .try_into()
5299 .map_err(|_| anyhow!("invalid signature"))?,
5300 })
5301 }
5302
5303 async fn handle_buffer_saved(
5304 this: ModelHandle<Self>,
5305 envelope: TypedEnvelope<proto::BufferSaved>,
5306 _: Arc<Client>,
5307 mut cx: AsyncAppContext,
5308 ) -> Result<()> {
5309 let version = deserialize_version(envelope.payload.version);
5310 let mtime = envelope
5311 .payload
5312 .mtime
5313 .ok_or_else(|| anyhow!("missing mtime"))?
5314 .into();
5315
5316 this.update(&mut cx, |this, cx| {
5317 let buffer = this
5318 .opened_buffers
5319 .get(&envelope.payload.buffer_id)
5320 .and_then(|buffer| buffer.upgrade(cx));
5321 if let Some(buffer) = buffer {
5322 buffer.update(cx, |buffer, cx| {
5323 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5324 });
5325 }
5326 Ok(())
5327 })
5328 }
5329
5330 async fn handle_buffer_reloaded(
5331 this: ModelHandle<Self>,
5332 envelope: TypedEnvelope<proto::BufferReloaded>,
5333 _: Arc<Client>,
5334 mut cx: AsyncAppContext,
5335 ) -> Result<()> {
5336 let payload = envelope.payload.clone();
5337 let version = deserialize_version(payload.version);
5338 let mtime = payload
5339 .mtime
5340 .ok_or_else(|| anyhow!("missing mtime"))?
5341 .into();
5342 this.update(&mut cx, |this, cx| {
5343 let buffer = this
5344 .opened_buffers
5345 .get(&payload.buffer_id)
5346 .and_then(|buffer| buffer.upgrade(cx));
5347 if let Some(buffer) = buffer {
5348 buffer.update(cx, |buffer, cx| {
5349 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5350 });
5351 }
5352 Ok(())
5353 })
5354 }
5355
5356 pub fn match_paths<'a>(
5357 &self,
5358 query: &'a str,
5359 include_ignored: bool,
5360 smart_case: bool,
5361 max_results: usize,
5362 cancel_flag: &'a AtomicBool,
5363 cx: &AppContext,
5364 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5365 let worktrees = self
5366 .worktrees(cx)
5367 .filter(|worktree| worktree.read(cx).is_visible())
5368 .collect::<Vec<_>>();
5369 let include_root_name = worktrees.len() > 1;
5370 let candidate_sets = worktrees
5371 .into_iter()
5372 .map(|worktree| CandidateSet {
5373 snapshot: worktree.read(cx).snapshot(),
5374 include_ignored,
5375 include_root_name,
5376 })
5377 .collect::<Vec<_>>();
5378
5379 let background = cx.background().clone();
5380 async move {
5381 fuzzy::match_paths(
5382 candidate_sets.as_slice(),
5383 query,
5384 smart_case,
5385 max_results,
5386 cancel_flag,
5387 background,
5388 )
5389 .await
5390 }
5391 }
5392
5393 fn edits_from_lsp(
5394 &mut self,
5395 buffer: &ModelHandle<Buffer>,
5396 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5397 version: Option<i32>,
5398 cx: &mut ModelContext<Self>,
5399 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5400 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5401 cx.background().spawn(async move {
5402 let snapshot = snapshot?;
5403 let mut lsp_edits = lsp_edits
5404 .into_iter()
5405 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5406 .collect::<Vec<_>>();
5407 lsp_edits.sort_by_key(|(range, _)| range.start);
5408
5409 let mut lsp_edits = lsp_edits.into_iter().peekable();
5410 let mut edits = Vec::new();
5411 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5412 // Combine any LSP edits that are adjacent.
5413 //
5414 // Also, combine LSP edits that are separated from each other by only
5415 // a newline. This is important because for some code actions,
5416 // Rust-analyzer rewrites the entire buffer via a series of edits that
5417 // are separated by unchanged newline characters.
5418 //
5419 // In order for the diffing logic below to work properly, any edits that
5420 // cancel each other out must be combined into one.
5421 while let Some((next_range, next_text)) = lsp_edits.peek() {
5422 if next_range.start > range.end {
5423 if next_range.start.row > range.end.row + 1
5424 || next_range.start.column > 0
5425 || snapshot.clip_point_utf16(
5426 PointUtf16::new(range.end.row, u32::MAX),
5427 Bias::Left,
5428 ) > range.end
5429 {
5430 break;
5431 }
5432 new_text.push('\n');
5433 }
5434 range.end = next_range.end;
5435 new_text.push_str(&next_text);
5436 lsp_edits.next();
5437 }
5438
5439 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5440 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5441 {
5442 return Err(anyhow!("invalid edits received from language server"));
5443 }
5444
5445 // For multiline edits, perform a diff of the old and new text so that
5446 // we can identify the changes more precisely, preserving the locations
5447 // of any anchors positioned in the unchanged regions.
5448 if range.end.row > range.start.row {
5449 let mut offset = range.start.to_offset(&snapshot);
5450 let old_text = snapshot.text_for_range(range).collect::<String>();
5451
5452 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5453 let mut moved_since_edit = true;
5454 for change in diff.iter_all_changes() {
5455 let tag = change.tag();
5456 let value = change.value();
5457 match tag {
5458 ChangeTag::Equal => {
5459 offset += value.len();
5460 moved_since_edit = true;
5461 }
5462 ChangeTag::Delete => {
5463 let start = snapshot.anchor_after(offset);
5464 let end = snapshot.anchor_before(offset + value.len());
5465 if moved_since_edit {
5466 edits.push((start..end, String::new()));
5467 } else {
5468 edits.last_mut().unwrap().0.end = end;
5469 }
5470 offset += value.len();
5471 moved_since_edit = false;
5472 }
5473 ChangeTag::Insert => {
5474 if moved_since_edit {
5475 let anchor = snapshot.anchor_after(offset);
5476 edits.push((anchor.clone()..anchor, value.to_string()));
5477 } else {
5478 edits.last_mut().unwrap().1.push_str(value);
5479 }
5480 moved_since_edit = false;
5481 }
5482 }
5483 }
5484 } else if range.end == range.start {
5485 let anchor = snapshot.anchor_after(range.start);
5486 edits.push((anchor.clone()..anchor, new_text));
5487 } else {
5488 let edit_start = snapshot.anchor_after(range.start);
5489 let edit_end = snapshot.anchor_before(range.end);
5490 edits.push((edit_start..edit_end, new_text));
5491 }
5492 }
5493
5494 Ok(edits)
5495 })
5496 }
5497
5498 fn buffer_snapshot_for_lsp_version(
5499 &mut self,
5500 buffer: &ModelHandle<Buffer>,
5501 version: Option<i32>,
5502 cx: &AppContext,
5503 ) -> Result<TextBufferSnapshot> {
5504 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5505
5506 if let Some(version) = version {
5507 let buffer_id = buffer.read(cx).remote_id();
5508 let snapshots = self
5509 .buffer_snapshots
5510 .get_mut(&buffer_id)
5511 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5512 let mut found_snapshot = None;
5513 snapshots.retain(|(snapshot_version, snapshot)| {
5514 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5515 false
5516 } else {
5517 if *snapshot_version == version {
5518 found_snapshot = Some(snapshot.clone());
5519 }
5520 true
5521 }
5522 });
5523
5524 found_snapshot.ok_or_else(|| {
5525 anyhow!(
5526 "snapshot not found for buffer {} at version {}",
5527 buffer_id,
5528 version
5529 )
5530 })
5531 } else {
5532 Ok((buffer.read(cx)).text_snapshot())
5533 }
5534 }
5535
5536 fn language_server_for_buffer(
5537 &self,
5538 buffer: &Buffer,
5539 cx: &AppContext,
5540 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5541 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5542 let worktree_id = file.worktree_id(cx);
5543 self.language_servers
5544 .get(&(worktree_id, language.lsp_adapter()?.name()))
5545 } else {
5546 None
5547 }
5548 }
5549}
5550
5551impl ProjectStore {
5552 pub fn new(db: Arc<Db>) -> Self {
5553 Self {
5554 db,
5555 projects: Default::default(),
5556 }
5557 }
5558
5559 pub fn projects<'a>(
5560 &'a self,
5561 cx: &'a AppContext,
5562 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5563 self.projects
5564 .iter()
5565 .filter_map(|project| project.upgrade(cx))
5566 }
5567
5568 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5569 if let Err(ix) = self
5570 .projects
5571 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5572 {
5573 self.projects.insert(ix, project);
5574 }
5575 cx.notify();
5576 }
5577
5578 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5579 let mut did_change = false;
5580 self.projects.retain(|project| {
5581 if project.is_upgradable(cx) {
5582 true
5583 } else {
5584 did_change = true;
5585 false
5586 }
5587 });
5588 if did_change {
5589 cx.notify();
5590 }
5591 }
5592}
5593
5594impl WorktreeHandle {
5595 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5596 match self {
5597 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5598 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5599 }
5600 }
5601}
5602
5603impl OpenBuffer {
5604 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5605 match self {
5606 OpenBuffer::Strong(handle) => Some(handle.clone()),
5607 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5608 OpenBuffer::Loading(_) => None,
5609 }
5610 }
5611}
5612
5613struct CandidateSet {
5614 snapshot: Snapshot,
5615 include_ignored: bool,
5616 include_root_name: bool,
5617}
5618
5619impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5620 type Candidates = CandidateSetIter<'a>;
5621
5622 fn id(&self) -> usize {
5623 self.snapshot.id().to_usize()
5624 }
5625
5626 fn len(&self) -> usize {
5627 if self.include_ignored {
5628 self.snapshot.file_count()
5629 } else {
5630 self.snapshot.visible_file_count()
5631 }
5632 }
5633
5634 fn prefix(&self) -> Arc<str> {
5635 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5636 self.snapshot.root_name().into()
5637 } else if self.include_root_name {
5638 format!("{}/", self.snapshot.root_name()).into()
5639 } else {
5640 "".into()
5641 }
5642 }
5643
5644 fn candidates(&'a self, start: usize) -> Self::Candidates {
5645 CandidateSetIter {
5646 traversal: self.snapshot.files(self.include_ignored, start),
5647 }
5648 }
5649}
5650
5651struct CandidateSetIter<'a> {
5652 traversal: Traversal<'a>,
5653}
5654
5655impl<'a> Iterator for CandidateSetIter<'a> {
5656 type Item = PathMatchCandidate<'a>;
5657
5658 fn next(&mut self) -> Option<Self::Item> {
5659 self.traversal.next().map(|entry| {
5660 if let EntryKind::File(char_bag) = entry.kind {
5661 PathMatchCandidate {
5662 path: &entry.path,
5663 char_bag,
5664 }
5665 } else {
5666 unreachable!()
5667 }
5668 })
5669 }
5670}
5671
5672impl Entity for ProjectStore {
5673 type Event = ();
5674}
5675
5676impl Entity for Project {
5677 type Event = Event;
5678
5679 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5680 self.project_store.update(cx, ProjectStore::prune_projects);
5681
5682 match &self.client_state {
5683 ProjectClientState::Local { remote_id_rx, .. } => {
5684 if let Some(project_id) = *remote_id_rx.borrow() {
5685 self.client
5686 .send(proto::UnregisterProject { project_id })
5687 .log_err();
5688 }
5689 }
5690 ProjectClientState::Remote { remote_id, .. } => {
5691 self.client
5692 .send(proto::LeaveProject {
5693 project_id: *remote_id,
5694 })
5695 .log_err();
5696 }
5697 }
5698 }
5699
5700 fn app_will_quit(
5701 &mut self,
5702 _: &mut MutableAppContext,
5703 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5704 let shutdown_futures = self
5705 .language_servers
5706 .drain()
5707 .filter_map(|(_, (_, server))| server.shutdown())
5708 .collect::<Vec<_>>();
5709 Some(
5710 async move {
5711 futures::future::join_all(shutdown_futures).await;
5712 }
5713 .boxed(),
5714 )
5715 }
5716}
5717
5718impl Collaborator {
5719 fn from_proto(
5720 message: proto::Collaborator,
5721 user_store: &ModelHandle<UserStore>,
5722 cx: &mut AsyncAppContext,
5723 ) -> impl Future<Output = Result<Self>> {
5724 let user = user_store.update(cx, |user_store, cx| {
5725 user_store.fetch_user(message.user_id, cx)
5726 });
5727
5728 async move {
5729 Ok(Self {
5730 peer_id: PeerId(message.peer_id),
5731 user: user.await?,
5732 replica_id: message.replica_id as ReplicaId,
5733 })
5734 }
5735 }
5736}
5737
5738impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5739 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5740 Self {
5741 worktree_id,
5742 path: path.as_ref().into(),
5743 }
5744 }
5745}
5746
5747impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5748 fn from(options: lsp::CreateFileOptions) -> Self {
5749 Self {
5750 overwrite: options.overwrite.unwrap_or(false),
5751 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5752 }
5753 }
5754}
5755
5756impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5757 fn from(options: lsp::RenameFileOptions) -> Self {
5758 Self {
5759 overwrite: options.overwrite.unwrap_or(false),
5760 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5761 }
5762 }
5763}
5764
5765impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5766 fn from(options: lsp::DeleteFileOptions) -> Self {
5767 Self {
5768 recursive: options.recursive.unwrap_or(false),
5769 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5770 }
5771 }
5772}
5773
5774fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5775 proto::Symbol {
5776 source_worktree_id: symbol.source_worktree_id.to_proto(),
5777 worktree_id: symbol.worktree_id.to_proto(),
5778 language_server_name: symbol.language_server_name.0.to_string(),
5779 name: symbol.name.clone(),
5780 kind: unsafe { mem::transmute(symbol.kind) },
5781 path: symbol.path.to_string_lossy().to_string(),
5782 start: Some(proto::Point {
5783 row: symbol.range.start.row,
5784 column: symbol.range.start.column,
5785 }),
5786 end: Some(proto::Point {
5787 row: symbol.range.end.row,
5788 column: symbol.range.end.column,
5789 }),
5790 signature: symbol.signature.to_vec(),
5791 }
5792}
5793
5794fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5795 let mut path_components = path.components();
5796 let mut base_components = base.components();
5797 let mut components: Vec<Component> = Vec::new();
5798 loop {
5799 match (path_components.next(), base_components.next()) {
5800 (None, None) => break,
5801 (Some(a), None) => {
5802 components.push(a);
5803 components.extend(path_components.by_ref());
5804 break;
5805 }
5806 (None, _) => components.push(Component::ParentDir),
5807 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5808 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5809 (Some(a), Some(_)) => {
5810 components.push(Component::ParentDir);
5811 for _ in base_components {
5812 components.push(Component::ParentDir);
5813 }
5814 components.push(a);
5815 components.extend(path_components.by_ref());
5816 break;
5817 }
5818 }
5819 }
5820 components.iter().map(|c| c.as_os_str()).collect()
5821}
5822
5823impl Item for Buffer {
5824 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5825 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5826 }
5827}
5828
5829#[cfg(test)]
5830mod tests {
5831 use crate::worktree::WorktreeHandle;
5832
5833 use super::{Event, *};
5834 use fs::RealFs;
5835 use futures::{future, StreamExt};
5836 use gpui::{executor::Deterministic, test::subscribe};
5837 use language::{
5838 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5839 OffsetRangeExt, Point, ToPoint,
5840 };
5841 use lsp::Url;
5842 use serde_json::json;
5843 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5844 use unindent::Unindent as _;
5845 use util::{assert_set_eq, test::temp_tree};
5846
5847 #[gpui::test]
5848 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5849 let dir = temp_tree(json!({
5850 "root": {
5851 "apple": "",
5852 "banana": {
5853 "carrot": {
5854 "date": "",
5855 "endive": "",
5856 }
5857 },
5858 "fennel": {
5859 "grape": "",
5860 }
5861 }
5862 }));
5863
5864 let root_link_path = dir.path().join("root_link");
5865 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5866 unix::fs::symlink(
5867 &dir.path().join("root/fennel"),
5868 &dir.path().join("root/finnochio"),
5869 )
5870 .unwrap();
5871
5872 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5873
5874 project.read_with(cx, |project, cx| {
5875 let tree = project.worktrees(cx).next().unwrap().read(cx);
5876 assert_eq!(tree.file_count(), 5);
5877 assert_eq!(
5878 tree.inode_for_path("fennel/grape"),
5879 tree.inode_for_path("finnochio/grape")
5880 );
5881 });
5882
5883 let cancel_flag = Default::default();
5884 let results = project
5885 .read_with(cx, |project, cx| {
5886 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5887 })
5888 .await;
5889 assert_eq!(
5890 results
5891 .into_iter()
5892 .map(|result| result.path)
5893 .collect::<Vec<Arc<Path>>>(),
5894 vec![
5895 PathBuf::from("banana/carrot/date").into(),
5896 PathBuf::from("banana/carrot/endive").into(),
5897 ]
5898 );
5899 }
5900
5901 #[gpui::test]
5902 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5903 cx.foreground().forbid_parking();
5904
5905 let mut rust_language = Language::new(
5906 LanguageConfig {
5907 name: "Rust".into(),
5908 path_suffixes: vec!["rs".to_string()],
5909 ..Default::default()
5910 },
5911 Some(tree_sitter_rust::language()),
5912 );
5913 let mut json_language = Language::new(
5914 LanguageConfig {
5915 name: "JSON".into(),
5916 path_suffixes: vec!["json".to_string()],
5917 ..Default::default()
5918 },
5919 None,
5920 );
5921 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5922 name: "the-rust-language-server",
5923 capabilities: lsp::ServerCapabilities {
5924 completion_provider: Some(lsp::CompletionOptions {
5925 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5926 ..Default::default()
5927 }),
5928 ..Default::default()
5929 },
5930 ..Default::default()
5931 });
5932 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5933 name: "the-json-language-server",
5934 capabilities: lsp::ServerCapabilities {
5935 completion_provider: Some(lsp::CompletionOptions {
5936 trigger_characters: Some(vec![":".to_string()]),
5937 ..Default::default()
5938 }),
5939 ..Default::default()
5940 },
5941 ..Default::default()
5942 });
5943
5944 let fs = FakeFs::new(cx.background());
5945 fs.insert_tree(
5946 "/the-root",
5947 json!({
5948 "test.rs": "const A: i32 = 1;",
5949 "test2.rs": "",
5950 "Cargo.toml": "a = 1",
5951 "package.json": "{\"a\": 1}",
5952 }),
5953 )
5954 .await;
5955
5956 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5957 project.update(cx, |project, _| {
5958 project.languages.add(Arc::new(rust_language));
5959 project.languages.add(Arc::new(json_language));
5960 });
5961
5962 // Open a buffer without an associated language server.
5963 let toml_buffer = project
5964 .update(cx, |project, cx| {
5965 project.open_local_buffer("/the-root/Cargo.toml", cx)
5966 })
5967 .await
5968 .unwrap();
5969
5970 // Open a buffer with an associated language server.
5971 let rust_buffer = project
5972 .update(cx, |project, cx| {
5973 project.open_local_buffer("/the-root/test.rs", cx)
5974 })
5975 .await
5976 .unwrap();
5977
5978 // A server is started up, and it is notified about Rust files.
5979 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5980 assert_eq!(
5981 fake_rust_server
5982 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5983 .await
5984 .text_document,
5985 lsp::TextDocumentItem {
5986 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5987 version: 0,
5988 text: "const A: i32 = 1;".to_string(),
5989 language_id: Default::default()
5990 }
5991 );
5992
5993 // The buffer is configured based on the language server's capabilities.
5994 rust_buffer.read_with(cx, |buffer, _| {
5995 assert_eq!(
5996 buffer.completion_triggers(),
5997 &[".".to_string(), "::".to_string()]
5998 );
5999 });
6000 toml_buffer.read_with(cx, |buffer, _| {
6001 assert!(buffer.completion_triggers().is_empty());
6002 });
6003
6004 // Edit a buffer. The changes are reported to the language server.
6005 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6006 assert_eq!(
6007 fake_rust_server
6008 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6009 .await
6010 .text_document,
6011 lsp::VersionedTextDocumentIdentifier::new(
6012 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6013 1
6014 )
6015 );
6016
6017 // Open a third buffer with a different associated language server.
6018 let json_buffer = project
6019 .update(cx, |project, cx| {
6020 project.open_local_buffer("/the-root/package.json", cx)
6021 })
6022 .await
6023 .unwrap();
6024
6025 // A json language server is started up and is only notified about the json buffer.
6026 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6027 assert_eq!(
6028 fake_json_server
6029 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6030 .await
6031 .text_document,
6032 lsp::TextDocumentItem {
6033 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6034 version: 0,
6035 text: "{\"a\": 1}".to_string(),
6036 language_id: Default::default()
6037 }
6038 );
6039
6040 // This buffer is configured based on the second language server's
6041 // capabilities.
6042 json_buffer.read_with(cx, |buffer, _| {
6043 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6044 });
6045
6046 // When opening another buffer whose language server is already running,
6047 // it is also configured based on the existing language server's capabilities.
6048 let rust_buffer2 = project
6049 .update(cx, |project, cx| {
6050 project.open_local_buffer("/the-root/test2.rs", cx)
6051 })
6052 .await
6053 .unwrap();
6054 rust_buffer2.read_with(cx, |buffer, _| {
6055 assert_eq!(
6056 buffer.completion_triggers(),
6057 &[".".to_string(), "::".to_string()]
6058 );
6059 });
6060
6061 // Changes are reported only to servers matching the buffer's language.
6062 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6063 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6064 assert_eq!(
6065 fake_rust_server
6066 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6067 .await
6068 .text_document,
6069 lsp::VersionedTextDocumentIdentifier::new(
6070 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6071 1
6072 )
6073 );
6074
6075 // Save notifications are reported to all servers.
6076 toml_buffer
6077 .update(cx, |buffer, cx| buffer.save(cx))
6078 .await
6079 .unwrap();
6080 assert_eq!(
6081 fake_rust_server
6082 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6083 .await
6084 .text_document,
6085 lsp::TextDocumentIdentifier::new(
6086 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6087 )
6088 );
6089 assert_eq!(
6090 fake_json_server
6091 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6092 .await
6093 .text_document,
6094 lsp::TextDocumentIdentifier::new(
6095 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6096 )
6097 );
6098
6099 // Renames are reported only to servers matching the buffer's language.
6100 fs.rename(
6101 Path::new("/the-root/test2.rs"),
6102 Path::new("/the-root/test3.rs"),
6103 Default::default(),
6104 )
6105 .await
6106 .unwrap();
6107 assert_eq!(
6108 fake_rust_server
6109 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6110 .await
6111 .text_document,
6112 lsp::TextDocumentIdentifier::new(
6113 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6114 ),
6115 );
6116 assert_eq!(
6117 fake_rust_server
6118 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6119 .await
6120 .text_document,
6121 lsp::TextDocumentItem {
6122 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6123 version: 0,
6124 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6125 language_id: Default::default()
6126 },
6127 );
6128
6129 rust_buffer2.update(cx, |buffer, cx| {
6130 buffer.update_diagnostics(
6131 DiagnosticSet::from_sorted_entries(
6132 vec![DiagnosticEntry {
6133 diagnostic: Default::default(),
6134 range: Anchor::MIN..Anchor::MAX,
6135 }],
6136 &buffer.snapshot(),
6137 ),
6138 cx,
6139 );
6140 assert_eq!(
6141 buffer
6142 .snapshot()
6143 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6144 .count(),
6145 1
6146 );
6147 });
6148
6149 // When the rename changes the extension of the file, the buffer gets closed on the old
6150 // language server and gets opened on the new one.
6151 fs.rename(
6152 Path::new("/the-root/test3.rs"),
6153 Path::new("/the-root/test3.json"),
6154 Default::default(),
6155 )
6156 .await
6157 .unwrap();
6158 assert_eq!(
6159 fake_rust_server
6160 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6161 .await
6162 .text_document,
6163 lsp::TextDocumentIdentifier::new(
6164 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6165 ),
6166 );
6167 assert_eq!(
6168 fake_json_server
6169 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6170 .await
6171 .text_document,
6172 lsp::TextDocumentItem {
6173 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6174 version: 0,
6175 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6176 language_id: Default::default()
6177 },
6178 );
6179
6180 // We clear the diagnostics, since the language has changed.
6181 rust_buffer2.read_with(cx, |buffer, _| {
6182 assert_eq!(
6183 buffer
6184 .snapshot()
6185 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6186 .count(),
6187 0
6188 );
6189 });
6190
6191 // The renamed file's version resets after changing language server.
6192 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6193 assert_eq!(
6194 fake_json_server
6195 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6196 .await
6197 .text_document,
6198 lsp::VersionedTextDocumentIdentifier::new(
6199 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6200 1
6201 )
6202 );
6203
6204 // Restart language servers
6205 project.update(cx, |project, cx| {
6206 project.restart_language_servers_for_buffers(
6207 vec![rust_buffer.clone(), json_buffer.clone()],
6208 cx,
6209 );
6210 });
6211
6212 let mut rust_shutdown_requests = fake_rust_server
6213 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6214 let mut json_shutdown_requests = fake_json_server
6215 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6216 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6217
6218 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6219 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6220
6221 // Ensure rust document is reopened in new rust language server
6222 assert_eq!(
6223 fake_rust_server
6224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6225 .await
6226 .text_document,
6227 lsp::TextDocumentItem {
6228 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6229 version: 1,
6230 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6231 language_id: Default::default()
6232 }
6233 );
6234
6235 // Ensure json documents are reopened in new json language server
6236 assert_set_eq!(
6237 [
6238 fake_json_server
6239 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6240 .await
6241 .text_document,
6242 fake_json_server
6243 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6244 .await
6245 .text_document,
6246 ],
6247 [
6248 lsp::TextDocumentItem {
6249 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6250 version: 0,
6251 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6252 language_id: Default::default()
6253 },
6254 lsp::TextDocumentItem {
6255 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6256 version: 1,
6257 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6258 language_id: Default::default()
6259 }
6260 ]
6261 );
6262
6263 // Close notifications are reported only to servers matching the buffer's language.
6264 cx.update(|_| drop(json_buffer));
6265 let close_message = lsp::DidCloseTextDocumentParams {
6266 text_document: lsp::TextDocumentIdentifier::new(
6267 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6268 ),
6269 };
6270 assert_eq!(
6271 fake_json_server
6272 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6273 .await,
6274 close_message,
6275 );
6276 }
6277
6278 #[gpui::test]
6279 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6280 cx.foreground().forbid_parking();
6281
6282 let fs = FakeFs::new(cx.background());
6283 fs.insert_tree(
6284 "/dir",
6285 json!({
6286 "a.rs": "let a = 1;",
6287 "b.rs": "let b = 2;"
6288 }),
6289 )
6290 .await;
6291
6292 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6293
6294 let buffer_a = project
6295 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6296 .await
6297 .unwrap();
6298 let buffer_b = project
6299 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6300 .await
6301 .unwrap();
6302
6303 project.update(cx, |project, cx| {
6304 project
6305 .update_diagnostics(
6306 0,
6307 lsp::PublishDiagnosticsParams {
6308 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6309 version: None,
6310 diagnostics: vec![lsp::Diagnostic {
6311 range: lsp::Range::new(
6312 lsp::Position::new(0, 4),
6313 lsp::Position::new(0, 5),
6314 ),
6315 severity: Some(lsp::DiagnosticSeverity::ERROR),
6316 message: "error 1".to_string(),
6317 ..Default::default()
6318 }],
6319 },
6320 &[],
6321 cx,
6322 )
6323 .unwrap();
6324 project
6325 .update_diagnostics(
6326 0,
6327 lsp::PublishDiagnosticsParams {
6328 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6329 version: None,
6330 diagnostics: vec![lsp::Diagnostic {
6331 range: lsp::Range::new(
6332 lsp::Position::new(0, 4),
6333 lsp::Position::new(0, 5),
6334 ),
6335 severity: Some(lsp::DiagnosticSeverity::WARNING),
6336 message: "error 2".to_string(),
6337 ..Default::default()
6338 }],
6339 },
6340 &[],
6341 cx,
6342 )
6343 .unwrap();
6344 });
6345
6346 buffer_a.read_with(cx, |buffer, _| {
6347 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6348 assert_eq!(
6349 chunks
6350 .iter()
6351 .map(|(s, d)| (s.as_str(), *d))
6352 .collect::<Vec<_>>(),
6353 &[
6354 ("let ", None),
6355 ("a", Some(DiagnosticSeverity::ERROR)),
6356 (" = 1;", None),
6357 ]
6358 );
6359 });
6360 buffer_b.read_with(cx, |buffer, _| {
6361 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6362 assert_eq!(
6363 chunks
6364 .iter()
6365 .map(|(s, d)| (s.as_str(), *d))
6366 .collect::<Vec<_>>(),
6367 &[
6368 ("let ", None),
6369 ("b", Some(DiagnosticSeverity::WARNING)),
6370 (" = 2;", None),
6371 ]
6372 );
6373 });
6374 }
6375
6376 #[gpui::test]
6377 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6378 cx.foreground().forbid_parking();
6379
6380 let progress_token = "the-progress-token";
6381 let mut language = Language::new(
6382 LanguageConfig {
6383 name: "Rust".into(),
6384 path_suffixes: vec!["rs".to_string()],
6385 ..Default::default()
6386 },
6387 Some(tree_sitter_rust::language()),
6388 );
6389 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6390 disk_based_diagnostics_progress_token: Some(progress_token),
6391 disk_based_diagnostics_sources: &["disk"],
6392 ..Default::default()
6393 });
6394
6395 let fs = FakeFs::new(cx.background());
6396 fs.insert_tree(
6397 "/dir",
6398 json!({
6399 "a.rs": "fn a() { A }",
6400 "b.rs": "const y: i32 = 1",
6401 }),
6402 )
6403 .await;
6404
6405 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6406 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6407 let worktree_id =
6408 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6409
6410 // Cause worktree to start the fake language server
6411 let _buffer = project
6412 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6413 .await
6414 .unwrap();
6415
6416 let mut events = subscribe(&project, cx);
6417
6418 let mut fake_server = fake_servers.next().await.unwrap();
6419 fake_server.start_progress(progress_token).await;
6420 assert_eq!(
6421 events.next().await.unwrap(),
6422 Event::DiskBasedDiagnosticsStarted {
6423 language_server_id: 0,
6424 }
6425 );
6426
6427 fake_server.start_progress(progress_token).await;
6428 fake_server.end_progress(progress_token).await;
6429 fake_server.start_progress(progress_token).await;
6430
6431 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6432 lsp::PublishDiagnosticsParams {
6433 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6434 version: None,
6435 diagnostics: vec![lsp::Diagnostic {
6436 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6437 severity: Some(lsp::DiagnosticSeverity::ERROR),
6438 message: "undefined variable 'A'".to_string(),
6439 ..Default::default()
6440 }],
6441 },
6442 );
6443 assert_eq!(
6444 events.next().await.unwrap(),
6445 Event::DiagnosticsUpdated {
6446 language_server_id: 0,
6447 path: (worktree_id, Path::new("a.rs")).into()
6448 }
6449 );
6450
6451 fake_server.end_progress(progress_token).await;
6452 fake_server.end_progress(progress_token).await;
6453 assert_eq!(
6454 events.next().await.unwrap(),
6455 Event::DiskBasedDiagnosticsFinished {
6456 language_server_id: 0
6457 }
6458 );
6459
6460 let buffer = project
6461 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6462 .await
6463 .unwrap();
6464
6465 buffer.read_with(cx, |buffer, _| {
6466 let snapshot = buffer.snapshot();
6467 let diagnostics = snapshot
6468 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6469 .collect::<Vec<_>>();
6470 assert_eq!(
6471 diagnostics,
6472 &[DiagnosticEntry {
6473 range: Point::new(0, 9)..Point::new(0, 10),
6474 diagnostic: Diagnostic {
6475 severity: lsp::DiagnosticSeverity::ERROR,
6476 message: "undefined variable 'A'".to_string(),
6477 group_id: 0,
6478 is_primary: true,
6479 ..Default::default()
6480 }
6481 }]
6482 )
6483 });
6484
6485 // Ensure publishing empty diagnostics twice only results in one update event.
6486 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6487 lsp::PublishDiagnosticsParams {
6488 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6489 version: None,
6490 diagnostics: Default::default(),
6491 },
6492 );
6493 assert_eq!(
6494 events.next().await.unwrap(),
6495 Event::DiagnosticsUpdated {
6496 language_server_id: 0,
6497 path: (worktree_id, Path::new("a.rs")).into()
6498 }
6499 );
6500
6501 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6502 lsp::PublishDiagnosticsParams {
6503 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6504 version: None,
6505 diagnostics: Default::default(),
6506 },
6507 );
6508 cx.foreground().run_until_parked();
6509 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6510 }
6511
6512 #[gpui::test]
6513 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6514 cx.foreground().forbid_parking();
6515
6516 let progress_token = "the-progress-token";
6517 let mut language = Language::new(
6518 LanguageConfig {
6519 path_suffixes: vec!["rs".to_string()],
6520 ..Default::default()
6521 },
6522 None,
6523 );
6524 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6525 disk_based_diagnostics_sources: &["disk"],
6526 disk_based_diagnostics_progress_token: Some(progress_token),
6527 ..Default::default()
6528 });
6529
6530 let fs = FakeFs::new(cx.background());
6531 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6532
6533 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6534 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6535
6536 let buffer = project
6537 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6538 .await
6539 .unwrap();
6540
6541 // Simulate diagnostics starting to update.
6542 let mut fake_server = fake_servers.next().await.unwrap();
6543 fake_server.start_progress(progress_token).await;
6544
6545 // Restart the server before the diagnostics finish updating.
6546 project.update(cx, |project, cx| {
6547 project.restart_language_servers_for_buffers([buffer], cx);
6548 });
6549 let mut events = subscribe(&project, cx);
6550
6551 // Simulate the newly started server sending more diagnostics.
6552 let mut fake_server = fake_servers.next().await.unwrap();
6553 fake_server.start_progress(progress_token).await;
6554 assert_eq!(
6555 events.next().await.unwrap(),
6556 Event::DiskBasedDiagnosticsStarted {
6557 language_server_id: 1
6558 }
6559 );
6560 project.read_with(cx, |project, _| {
6561 assert_eq!(
6562 project
6563 .language_servers_running_disk_based_diagnostics()
6564 .collect::<Vec<_>>(),
6565 [1]
6566 );
6567 });
6568
6569 // All diagnostics are considered done, despite the old server's diagnostic
6570 // task never completing.
6571 fake_server.end_progress(progress_token).await;
6572 assert_eq!(
6573 events.next().await.unwrap(),
6574 Event::DiskBasedDiagnosticsFinished {
6575 language_server_id: 1
6576 }
6577 );
6578 project.read_with(cx, |project, _| {
6579 assert_eq!(
6580 project
6581 .language_servers_running_disk_based_diagnostics()
6582 .collect::<Vec<_>>(),
6583 [0; 0]
6584 );
6585 });
6586 }
6587
6588 #[gpui::test]
6589 async fn test_toggling_enable_language_server(
6590 deterministic: Arc<Deterministic>,
6591 cx: &mut gpui::TestAppContext,
6592 ) {
6593 deterministic.forbid_parking();
6594
6595 let mut rust = Language::new(
6596 LanguageConfig {
6597 name: Arc::from("Rust"),
6598 path_suffixes: vec!["rs".to_string()],
6599 ..Default::default()
6600 },
6601 None,
6602 );
6603 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6604 name: "rust-lsp",
6605 ..Default::default()
6606 });
6607 let mut js = Language::new(
6608 LanguageConfig {
6609 name: Arc::from("JavaScript"),
6610 path_suffixes: vec!["js".to_string()],
6611 ..Default::default()
6612 },
6613 None,
6614 );
6615 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6616 name: "js-lsp",
6617 ..Default::default()
6618 });
6619
6620 let fs = FakeFs::new(cx.background());
6621 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6622 .await;
6623
6624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6625 project.update(cx, |project, _| {
6626 project.languages.add(Arc::new(rust));
6627 project.languages.add(Arc::new(js));
6628 });
6629
6630 let _rs_buffer = project
6631 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6632 .await
6633 .unwrap();
6634 let _js_buffer = project
6635 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6636 .await
6637 .unwrap();
6638
6639 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6640 assert_eq!(
6641 fake_rust_server_1
6642 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6643 .await
6644 .text_document
6645 .uri
6646 .as_str(),
6647 "file:///dir/a.rs"
6648 );
6649
6650 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6651 assert_eq!(
6652 fake_js_server
6653 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6654 .await
6655 .text_document
6656 .uri
6657 .as_str(),
6658 "file:///dir/b.js"
6659 );
6660
6661 // Disable Rust language server, ensuring only that server gets stopped.
6662 cx.update(|cx| {
6663 cx.update_global(|settings: &mut Settings, _| {
6664 settings.language_overrides.insert(
6665 Arc::from("Rust"),
6666 settings::LanguageOverride {
6667 enable_language_server: Some(false),
6668 ..Default::default()
6669 },
6670 );
6671 })
6672 });
6673 fake_rust_server_1
6674 .receive_notification::<lsp::notification::Exit>()
6675 .await;
6676
6677 // Enable Rust and disable JavaScript language servers, ensuring that the
6678 // former gets started again and that the latter stops.
6679 cx.update(|cx| {
6680 cx.update_global(|settings: &mut Settings, _| {
6681 settings.language_overrides.insert(
6682 Arc::from("Rust"),
6683 settings::LanguageOverride {
6684 enable_language_server: Some(true),
6685 ..Default::default()
6686 },
6687 );
6688 settings.language_overrides.insert(
6689 Arc::from("JavaScript"),
6690 settings::LanguageOverride {
6691 enable_language_server: Some(false),
6692 ..Default::default()
6693 },
6694 );
6695 })
6696 });
6697 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6698 assert_eq!(
6699 fake_rust_server_2
6700 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6701 .await
6702 .text_document
6703 .uri
6704 .as_str(),
6705 "file:///dir/a.rs"
6706 );
6707 fake_js_server
6708 .receive_notification::<lsp::notification::Exit>()
6709 .await;
6710 }
6711
6712 #[gpui::test]
6713 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6714 cx.foreground().forbid_parking();
6715
6716 let mut language = Language::new(
6717 LanguageConfig {
6718 name: "Rust".into(),
6719 path_suffixes: vec!["rs".to_string()],
6720 ..Default::default()
6721 },
6722 Some(tree_sitter_rust::language()),
6723 );
6724 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6725 disk_based_diagnostics_sources: &["disk"],
6726 ..Default::default()
6727 });
6728
6729 let text = "
6730 fn a() { A }
6731 fn b() { BB }
6732 fn c() { CCC }
6733 "
6734 .unindent();
6735
6736 let fs = FakeFs::new(cx.background());
6737 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6738
6739 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6740 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6741
6742 let buffer = project
6743 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6744 .await
6745 .unwrap();
6746
6747 let mut fake_server = fake_servers.next().await.unwrap();
6748 let open_notification = fake_server
6749 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6750 .await;
6751
6752 // Edit the buffer, moving the content down
6753 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6754 let change_notification_1 = fake_server
6755 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6756 .await;
6757 assert!(
6758 change_notification_1.text_document.version > open_notification.text_document.version
6759 );
6760
6761 // Report some diagnostics for the initial version of the buffer
6762 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6763 lsp::PublishDiagnosticsParams {
6764 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6765 version: Some(open_notification.text_document.version),
6766 diagnostics: vec![
6767 lsp::Diagnostic {
6768 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6769 severity: Some(DiagnosticSeverity::ERROR),
6770 message: "undefined variable 'A'".to_string(),
6771 source: Some("disk".to_string()),
6772 ..Default::default()
6773 },
6774 lsp::Diagnostic {
6775 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6776 severity: Some(DiagnosticSeverity::ERROR),
6777 message: "undefined variable 'BB'".to_string(),
6778 source: Some("disk".to_string()),
6779 ..Default::default()
6780 },
6781 lsp::Diagnostic {
6782 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6783 severity: Some(DiagnosticSeverity::ERROR),
6784 source: Some("disk".to_string()),
6785 message: "undefined variable 'CCC'".to_string(),
6786 ..Default::default()
6787 },
6788 ],
6789 },
6790 );
6791
6792 // The diagnostics have moved down since they were created.
6793 buffer.next_notification(cx).await;
6794 buffer.read_with(cx, |buffer, _| {
6795 assert_eq!(
6796 buffer
6797 .snapshot()
6798 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6799 .collect::<Vec<_>>(),
6800 &[
6801 DiagnosticEntry {
6802 range: Point::new(3, 9)..Point::new(3, 11),
6803 diagnostic: Diagnostic {
6804 severity: DiagnosticSeverity::ERROR,
6805 message: "undefined variable 'BB'".to_string(),
6806 is_disk_based: true,
6807 group_id: 1,
6808 is_primary: true,
6809 ..Default::default()
6810 },
6811 },
6812 DiagnosticEntry {
6813 range: Point::new(4, 9)..Point::new(4, 12),
6814 diagnostic: Diagnostic {
6815 severity: DiagnosticSeverity::ERROR,
6816 message: "undefined variable 'CCC'".to_string(),
6817 is_disk_based: true,
6818 group_id: 2,
6819 is_primary: true,
6820 ..Default::default()
6821 }
6822 }
6823 ]
6824 );
6825 assert_eq!(
6826 chunks_with_diagnostics(buffer, 0..buffer.len()),
6827 [
6828 ("\n\nfn a() { ".to_string(), None),
6829 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6830 (" }\nfn b() { ".to_string(), None),
6831 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6832 (" }\nfn c() { ".to_string(), None),
6833 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6834 (" }\n".to_string(), None),
6835 ]
6836 );
6837 assert_eq!(
6838 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6839 [
6840 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6841 (" }\nfn c() { ".to_string(), None),
6842 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6843 ]
6844 );
6845 });
6846
6847 // Ensure overlapping diagnostics are highlighted correctly.
6848 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6849 lsp::PublishDiagnosticsParams {
6850 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6851 version: Some(open_notification.text_document.version),
6852 diagnostics: vec![
6853 lsp::Diagnostic {
6854 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6855 severity: Some(DiagnosticSeverity::ERROR),
6856 message: "undefined variable 'A'".to_string(),
6857 source: Some("disk".to_string()),
6858 ..Default::default()
6859 },
6860 lsp::Diagnostic {
6861 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6862 severity: Some(DiagnosticSeverity::WARNING),
6863 message: "unreachable statement".to_string(),
6864 source: Some("disk".to_string()),
6865 ..Default::default()
6866 },
6867 ],
6868 },
6869 );
6870
6871 buffer.next_notification(cx).await;
6872 buffer.read_with(cx, |buffer, _| {
6873 assert_eq!(
6874 buffer
6875 .snapshot()
6876 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6877 .collect::<Vec<_>>(),
6878 &[
6879 DiagnosticEntry {
6880 range: Point::new(2, 9)..Point::new(2, 12),
6881 diagnostic: Diagnostic {
6882 severity: DiagnosticSeverity::WARNING,
6883 message: "unreachable statement".to_string(),
6884 is_disk_based: true,
6885 group_id: 4,
6886 is_primary: true,
6887 ..Default::default()
6888 }
6889 },
6890 DiagnosticEntry {
6891 range: Point::new(2, 9)..Point::new(2, 10),
6892 diagnostic: Diagnostic {
6893 severity: DiagnosticSeverity::ERROR,
6894 message: "undefined variable 'A'".to_string(),
6895 is_disk_based: true,
6896 group_id: 3,
6897 is_primary: true,
6898 ..Default::default()
6899 },
6900 }
6901 ]
6902 );
6903 assert_eq!(
6904 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6905 [
6906 ("fn a() { ".to_string(), None),
6907 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6908 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6909 ("\n".to_string(), None),
6910 ]
6911 );
6912 assert_eq!(
6913 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6914 [
6915 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6916 ("\n".to_string(), None),
6917 ]
6918 );
6919 });
6920
6921 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6922 // changes since the last save.
6923 buffer.update(cx, |buffer, cx| {
6924 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6925 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6926 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6927 });
6928 let change_notification_2 = fake_server
6929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6930 .await;
6931 assert!(
6932 change_notification_2.text_document.version
6933 > change_notification_1.text_document.version
6934 );
6935
6936 // Handle out-of-order diagnostics
6937 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6938 lsp::PublishDiagnosticsParams {
6939 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6940 version: Some(change_notification_2.text_document.version),
6941 diagnostics: vec![
6942 lsp::Diagnostic {
6943 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6944 severity: Some(DiagnosticSeverity::ERROR),
6945 message: "undefined variable 'BB'".to_string(),
6946 source: Some("disk".to_string()),
6947 ..Default::default()
6948 },
6949 lsp::Diagnostic {
6950 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6951 severity: Some(DiagnosticSeverity::WARNING),
6952 message: "undefined variable 'A'".to_string(),
6953 source: Some("disk".to_string()),
6954 ..Default::default()
6955 },
6956 ],
6957 },
6958 );
6959
6960 buffer.next_notification(cx).await;
6961 buffer.read_with(cx, |buffer, _| {
6962 assert_eq!(
6963 buffer
6964 .snapshot()
6965 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6966 .collect::<Vec<_>>(),
6967 &[
6968 DiagnosticEntry {
6969 range: Point::new(2, 21)..Point::new(2, 22),
6970 diagnostic: Diagnostic {
6971 severity: DiagnosticSeverity::WARNING,
6972 message: "undefined variable 'A'".to_string(),
6973 is_disk_based: true,
6974 group_id: 6,
6975 is_primary: true,
6976 ..Default::default()
6977 }
6978 },
6979 DiagnosticEntry {
6980 range: Point::new(3, 9)..Point::new(3, 14),
6981 diagnostic: Diagnostic {
6982 severity: DiagnosticSeverity::ERROR,
6983 message: "undefined variable 'BB'".to_string(),
6984 is_disk_based: true,
6985 group_id: 5,
6986 is_primary: true,
6987 ..Default::default()
6988 },
6989 }
6990 ]
6991 );
6992 });
6993 }
6994
6995 #[gpui::test]
6996 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6997 cx.foreground().forbid_parking();
6998
6999 let text = concat!(
7000 "let one = ;\n", //
7001 "let two = \n",
7002 "let three = 3;\n",
7003 );
7004
7005 let fs = FakeFs::new(cx.background());
7006 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7007
7008 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7009 let buffer = project
7010 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7011 .await
7012 .unwrap();
7013
7014 project.update(cx, |project, cx| {
7015 project
7016 .update_buffer_diagnostics(
7017 &buffer,
7018 vec![
7019 DiagnosticEntry {
7020 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7021 diagnostic: Diagnostic {
7022 severity: DiagnosticSeverity::ERROR,
7023 message: "syntax error 1".to_string(),
7024 ..Default::default()
7025 },
7026 },
7027 DiagnosticEntry {
7028 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7029 diagnostic: Diagnostic {
7030 severity: DiagnosticSeverity::ERROR,
7031 message: "syntax error 2".to_string(),
7032 ..Default::default()
7033 },
7034 },
7035 ],
7036 None,
7037 cx,
7038 )
7039 .unwrap();
7040 });
7041
7042 // An empty range is extended forward to include the following character.
7043 // At the end of a line, an empty range is extended backward to include
7044 // the preceding character.
7045 buffer.read_with(cx, |buffer, _| {
7046 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7047 assert_eq!(
7048 chunks
7049 .iter()
7050 .map(|(s, d)| (s.as_str(), *d))
7051 .collect::<Vec<_>>(),
7052 &[
7053 ("let one = ", None),
7054 (";", Some(DiagnosticSeverity::ERROR)),
7055 ("\nlet two =", None),
7056 (" ", Some(DiagnosticSeverity::ERROR)),
7057 ("\nlet three = 3;\n", None)
7058 ]
7059 );
7060 });
7061 }
7062
7063 #[gpui::test]
7064 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7065 cx.foreground().forbid_parking();
7066
7067 let mut language = Language::new(
7068 LanguageConfig {
7069 name: "Rust".into(),
7070 path_suffixes: vec!["rs".to_string()],
7071 ..Default::default()
7072 },
7073 Some(tree_sitter_rust::language()),
7074 );
7075 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7076
7077 let text = "
7078 fn a() {
7079 f1();
7080 }
7081 fn b() {
7082 f2();
7083 }
7084 fn c() {
7085 f3();
7086 }
7087 "
7088 .unindent();
7089
7090 let fs = FakeFs::new(cx.background());
7091 fs.insert_tree(
7092 "/dir",
7093 json!({
7094 "a.rs": text.clone(),
7095 }),
7096 )
7097 .await;
7098
7099 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7100 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7101 let buffer = project
7102 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7103 .await
7104 .unwrap();
7105
7106 let mut fake_server = fake_servers.next().await.unwrap();
7107 let lsp_document_version = fake_server
7108 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7109 .await
7110 .text_document
7111 .version;
7112
7113 // Simulate editing the buffer after the language server computes some edits.
7114 buffer.update(cx, |buffer, cx| {
7115 buffer.edit(
7116 [(
7117 Point::new(0, 0)..Point::new(0, 0),
7118 "// above first function\n",
7119 )],
7120 cx,
7121 );
7122 buffer.edit(
7123 [(
7124 Point::new(2, 0)..Point::new(2, 0),
7125 " // inside first function\n",
7126 )],
7127 cx,
7128 );
7129 buffer.edit(
7130 [(
7131 Point::new(6, 4)..Point::new(6, 4),
7132 "// inside second function ",
7133 )],
7134 cx,
7135 );
7136
7137 assert_eq!(
7138 buffer.text(),
7139 "
7140 // above first function
7141 fn a() {
7142 // inside first function
7143 f1();
7144 }
7145 fn b() {
7146 // inside second function f2();
7147 }
7148 fn c() {
7149 f3();
7150 }
7151 "
7152 .unindent()
7153 );
7154 });
7155
7156 let edits = project
7157 .update(cx, |project, cx| {
7158 project.edits_from_lsp(
7159 &buffer,
7160 vec![
7161 // replace body of first function
7162 lsp::TextEdit {
7163 range: lsp::Range::new(
7164 lsp::Position::new(0, 0),
7165 lsp::Position::new(3, 0),
7166 ),
7167 new_text: "
7168 fn a() {
7169 f10();
7170 }
7171 "
7172 .unindent(),
7173 },
7174 // edit inside second function
7175 lsp::TextEdit {
7176 range: lsp::Range::new(
7177 lsp::Position::new(4, 6),
7178 lsp::Position::new(4, 6),
7179 ),
7180 new_text: "00".into(),
7181 },
7182 // edit inside third function via two distinct edits
7183 lsp::TextEdit {
7184 range: lsp::Range::new(
7185 lsp::Position::new(7, 5),
7186 lsp::Position::new(7, 5),
7187 ),
7188 new_text: "4000".into(),
7189 },
7190 lsp::TextEdit {
7191 range: lsp::Range::new(
7192 lsp::Position::new(7, 5),
7193 lsp::Position::new(7, 6),
7194 ),
7195 new_text: "".into(),
7196 },
7197 ],
7198 Some(lsp_document_version),
7199 cx,
7200 )
7201 })
7202 .await
7203 .unwrap();
7204
7205 buffer.update(cx, |buffer, cx| {
7206 for (range, new_text) in edits {
7207 buffer.edit([(range, new_text)], cx);
7208 }
7209 assert_eq!(
7210 buffer.text(),
7211 "
7212 // above first function
7213 fn a() {
7214 // inside first function
7215 f10();
7216 }
7217 fn b() {
7218 // inside second function f200();
7219 }
7220 fn c() {
7221 f4000();
7222 }
7223 "
7224 .unindent()
7225 );
7226 });
7227 }
7228
7229 #[gpui::test]
7230 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7231 cx.foreground().forbid_parking();
7232
7233 let text = "
7234 use a::b;
7235 use a::c;
7236
7237 fn f() {
7238 b();
7239 c();
7240 }
7241 "
7242 .unindent();
7243
7244 let fs = FakeFs::new(cx.background());
7245 fs.insert_tree(
7246 "/dir",
7247 json!({
7248 "a.rs": text.clone(),
7249 }),
7250 )
7251 .await;
7252
7253 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7254 let buffer = project
7255 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7256 .await
7257 .unwrap();
7258
7259 // Simulate the language server sending us a small edit in the form of a very large diff.
7260 // Rust-analyzer does this when performing a merge-imports code action.
7261 let edits = project
7262 .update(cx, |project, cx| {
7263 project.edits_from_lsp(
7264 &buffer,
7265 [
7266 // Replace the first use statement without editing the semicolon.
7267 lsp::TextEdit {
7268 range: lsp::Range::new(
7269 lsp::Position::new(0, 4),
7270 lsp::Position::new(0, 8),
7271 ),
7272 new_text: "a::{b, c}".into(),
7273 },
7274 // Reinsert the remainder of the file between the semicolon and the final
7275 // newline of the file.
7276 lsp::TextEdit {
7277 range: lsp::Range::new(
7278 lsp::Position::new(0, 9),
7279 lsp::Position::new(0, 9),
7280 ),
7281 new_text: "\n\n".into(),
7282 },
7283 lsp::TextEdit {
7284 range: lsp::Range::new(
7285 lsp::Position::new(0, 9),
7286 lsp::Position::new(0, 9),
7287 ),
7288 new_text: "
7289 fn f() {
7290 b();
7291 c();
7292 }"
7293 .unindent(),
7294 },
7295 // Delete everything after the first newline of the file.
7296 lsp::TextEdit {
7297 range: lsp::Range::new(
7298 lsp::Position::new(1, 0),
7299 lsp::Position::new(7, 0),
7300 ),
7301 new_text: "".into(),
7302 },
7303 ],
7304 None,
7305 cx,
7306 )
7307 })
7308 .await
7309 .unwrap();
7310
7311 buffer.update(cx, |buffer, cx| {
7312 let edits = edits
7313 .into_iter()
7314 .map(|(range, text)| {
7315 (
7316 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7317 text,
7318 )
7319 })
7320 .collect::<Vec<_>>();
7321
7322 assert_eq!(
7323 edits,
7324 [
7325 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7326 (Point::new(1, 0)..Point::new(2, 0), "".into())
7327 ]
7328 );
7329
7330 for (range, new_text) in edits {
7331 buffer.edit([(range, new_text)], cx);
7332 }
7333 assert_eq!(
7334 buffer.text(),
7335 "
7336 use a::{b, c};
7337
7338 fn f() {
7339 b();
7340 c();
7341 }
7342 "
7343 .unindent()
7344 );
7345 });
7346 }
7347
7348 #[gpui::test]
7349 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7350 cx.foreground().forbid_parking();
7351
7352 let text = "
7353 use a::b;
7354 use a::c;
7355
7356 fn f() {
7357 b();
7358 c();
7359 }
7360 "
7361 .unindent();
7362
7363 let fs = FakeFs::new(cx.background());
7364 fs.insert_tree(
7365 "/dir",
7366 json!({
7367 "a.rs": text.clone(),
7368 }),
7369 )
7370 .await;
7371
7372 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7373 let buffer = project
7374 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7375 .await
7376 .unwrap();
7377
7378 // Simulate the language server sending us edits in a non-ordered fashion,
7379 // with ranges sometimes being inverted.
7380 let edits = project
7381 .update(cx, |project, cx| {
7382 project.edits_from_lsp(
7383 &buffer,
7384 [
7385 lsp::TextEdit {
7386 range: lsp::Range::new(
7387 lsp::Position::new(0, 9),
7388 lsp::Position::new(0, 9),
7389 ),
7390 new_text: "\n\n".into(),
7391 },
7392 lsp::TextEdit {
7393 range: lsp::Range::new(
7394 lsp::Position::new(0, 8),
7395 lsp::Position::new(0, 4),
7396 ),
7397 new_text: "a::{b, c}".into(),
7398 },
7399 lsp::TextEdit {
7400 range: lsp::Range::new(
7401 lsp::Position::new(1, 0),
7402 lsp::Position::new(7, 0),
7403 ),
7404 new_text: "".into(),
7405 },
7406 lsp::TextEdit {
7407 range: lsp::Range::new(
7408 lsp::Position::new(0, 9),
7409 lsp::Position::new(0, 9),
7410 ),
7411 new_text: "
7412 fn f() {
7413 b();
7414 c();
7415 }"
7416 .unindent(),
7417 },
7418 ],
7419 None,
7420 cx,
7421 )
7422 })
7423 .await
7424 .unwrap();
7425
7426 buffer.update(cx, |buffer, cx| {
7427 let edits = edits
7428 .into_iter()
7429 .map(|(range, text)| {
7430 (
7431 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7432 text,
7433 )
7434 })
7435 .collect::<Vec<_>>();
7436
7437 assert_eq!(
7438 edits,
7439 [
7440 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7441 (Point::new(1, 0)..Point::new(2, 0), "".into())
7442 ]
7443 );
7444
7445 for (range, new_text) in edits {
7446 buffer.edit([(range, new_text)], cx);
7447 }
7448 assert_eq!(
7449 buffer.text(),
7450 "
7451 use a::{b, c};
7452
7453 fn f() {
7454 b();
7455 c();
7456 }
7457 "
7458 .unindent()
7459 );
7460 });
7461 }
7462
7463 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7464 buffer: &Buffer,
7465 range: Range<T>,
7466 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7467 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7468 for chunk in buffer.snapshot().chunks(range, true) {
7469 if chunks.last().map_or(false, |prev_chunk| {
7470 prev_chunk.1 == chunk.diagnostic_severity
7471 }) {
7472 chunks.last_mut().unwrap().0.push_str(chunk.text);
7473 } else {
7474 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7475 }
7476 }
7477 chunks
7478 }
7479
7480 #[gpui::test]
7481 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7482 let dir = temp_tree(json!({
7483 "root": {
7484 "dir1": {},
7485 "dir2": {
7486 "dir3": {}
7487 }
7488 }
7489 }));
7490
7491 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7492 let cancel_flag = Default::default();
7493 let results = project
7494 .read_with(cx, |project, cx| {
7495 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7496 })
7497 .await;
7498
7499 assert!(results.is_empty());
7500 }
7501
7502 #[gpui::test(iterations = 10)]
7503 async fn test_definition(cx: &mut gpui::TestAppContext) {
7504 let mut language = Language::new(
7505 LanguageConfig {
7506 name: "Rust".into(),
7507 path_suffixes: vec!["rs".to_string()],
7508 ..Default::default()
7509 },
7510 Some(tree_sitter_rust::language()),
7511 );
7512 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7513
7514 let fs = FakeFs::new(cx.background());
7515 fs.insert_tree(
7516 "/dir",
7517 json!({
7518 "a.rs": "const fn a() { A }",
7519 "b.rs": "const y: i32 = crate::a()",
7520 }),
7521 )
7522 .await;
7523
7524 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7525 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7526
7527 let buffer = project
7528 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7529 .await
7530 .unwrap();
7531
7532 let fake_server = fake_servers.next().await.unwrap();
7533 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7534 let params = params.text_document_position_params;
7535 assert_eq!(
7536 params.text_document.uri.to_file_path().unwrap(),
7537 Path::new("/dir/b.rs"),
7538 );
7539 assert_eq!(params.position, lsp::Position::new(0, 22));
7540
7541 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7542 lsp::Location::new(
7543 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7544 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7545 ),
7546 )))
7547 });
7548
7549 let mut definitions = project
7550 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7551 .await
7552 .unwrap();
7553
7554 assert_eq!(definitions.len(), 1);
7555 let definition = definitions.pop().unwrap();
7556 cx.update(|cx| {
7557 let target_buffer = definition.buffer.read(cx);
7558 assert_eq!(
7559 target_buffer
7560 .file()
7561 .unwrap()
7562 .as_local()
7563 .unwrap()
7564 .abs_path(cx),
7565 Path::new("/dir/a.rs"),
7566 );
7567 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7568 assert_eq!(
7569 list_worktrees(&project, cx),
7570 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7571 );
7572
7573 drop(definition);
7574 });
7575 cx.read(|cx| {
7576 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7577 });
7578
7579 fn list_worktrees<'a>(
7580 project: &'a ModelHandle<Project>,
7581 cx: &'a AppContext,
7582 ) -> Vec<(&'a Path, bool)> {
7583 project
7584 .read(cx)
7585 .worktrees(cx)
7586 .map(|worktree| {
7587 let worktree = worktree.read(cx);
7588 (
7589 worktree.as_local().unwrap().abs_path().as_ref(),
7590 worktree.is_visible(),
7591 )
7592 })
7593 .collect::<Vec<_>>()
7594 }
7595 }
7596
7597 #[gpui::test]
7598 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7599 let mut language = Language::new(
7600 LanguageConfig {
7601 name: "TypeScript".into(),
7602 path_suffixes: vec!["ts".to_string()],
7603 ..Default::default()
7604 },
7605 Some(tree_sitter_typescript::language_typescript()),
7606 );
7607 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7608
7609 let fs = FakeFs::new(cx.background());
7610 fs.insert_tree(
7611 "/dir",
7612 json!({
7613 "a.ts": "",
7614 }),
7615 )
7616 .await;
7617
7618 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7619 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7620 let buffer = project
7621 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7622 .await
7623 .unwrap();
7624
7625 let fake_server = fake_language_servers.next().await.unwrap();
7626
7627 let text = "let a = b.fqn";
7628 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7629 let completions = project.update(cx, |project, cx| {
7630 project.completions(&buffer, text.len(), cx)
7631 });
7632
7633 fake_server
7634 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7635 Ok(Some(lsp::CompletionResponse::Array(vec![
7636 lsp::CompletionItem {
7637 label: "fullyQualifiedName?".into(),
7638 insert_text: Some("fullyQualifiedName".into()),
7639 ..Default::default()
7640 },
7641 ])))
7642 })
7643 .next()
7644 .await;
7645 let completions = completions.await.unwrap();
7646 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7647 assert_eq!(completions.len(), 1);
7648 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7649 assert_eq!(
7650 completions[0].old_range.to_offset(&snapshot),
7651 text.len() - 3..text.len()
7652 );
7653
7654 let text = "let a = \"atoms/cmp\"";
7655 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7656 let completions = project.update(cx, |project, cx| {
7657 project.completions(&buffer, text.len() - 1, cx)
7658 });
7659
7660 fake_server
7661 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7662 Ok(Some(lsp::CompletionResponse::Array(vec![
7663 lsp::CompletionItem {
7664 label: "component".into(),
7665 ..Default::default()
7666 },
7667 ])))
7668 })
7669 .next()
7670 .await;
7671 let completions = completions.await.unwrap();
7672 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7673 assert_eq!(completions.len(), 1);
7674 assert_eq!(completions[0].new_text, "component");
7675 assert_eq!(
7676 completions[0].old_range.to_offset(&snapshot),
7677 text.len() - 4..text.len() - 1
7678 );
7679 }
7680
7681 #[gpui::test(iterations = 10)]
7682 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7683 let mut language = Language::new(
7684 LanguageConfig {
7685 name: "TypeScript".into(),
7686 path_suffixes: vec!["ts".to_string()],
7687 ..Default::default()
7688 },
7689 None,
7690 );
7691 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7692
7693 let fs = FakeFs::new(cx.background());
7694 fs.insert_tree(
7695 "/dir",
7696 json!({
7697 "a.ts": "a",
7698 }),
7699 )
7700 .await;
7701
7702 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7703 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7704 let buffer = project
7705 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7706 .await
7707 .unwrap();
7708
7709 let fake_server = fake_language_servers.next().await.unwrap();
7710
7711 // Language server returns code actions that contain commands, and not edits.
7712 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7713 fake_server
7714 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7715 Ok(Some(vec![
7716 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7717 title: "The code action".into(),
7718 command: Some(lsp::Command {
7719 title: "The command".into(),
7720 command: "_the/command".into(),
7721 arguments: Some(vec![json!("the-argument")]),
7722 }),
7723 ..Default::default()
7724 }),
7725 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7726 title: "two".into(),
7727 ..Default::default()
7728 }),
7729 ]))
7730 })
7731 .next()
7732 .await;
7733
7734 let action = actions.await.unwrap()[0].clone();
7735 let apply = project.update(cx, |project, cx| {
7736 project.apply_code_action(buffer.clone(), action, true, cx)
7737 });
7738
7739 // Resolving the code action does not populate its edits. In absence of
7740 // edits, we must execute the given command.
7741 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7742 |action, _| async move { Ok(action) },
7743 );
7744
7745 // While executing the command, the language server sends the editor
7746 // a `workspaceEdit` request.
7747 fake_server
7748 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7749 let fake = fake_server.clone();
7750 move |params, _| {
7751 assert_eq!(params.command, "_the/command");
7752 let fake = fake.clone();
7753 async move {
7754 fake.server
7755 .request::<lsp::request::ApplyWorkspaceEdit>(
7756 lsp::ApplyWorkspaceEditParams {
7757 label: None,
7758 edit: lsp::WorkspaceEdit {
7759 changes: Some(
7760 [(
7761 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7762 vec![lsp::TextEdit {
7763 range: lsp::Range::new(
7764 lsp::Position::new(0, 0),
7765 lsp::Position::new(0, 0),
7766 ),
7767 new_text: "X".into(),
7768 }],
7769 )]
7770 .into_iter()
7771 .collect(),
7772 ),
7773 ..Default::default()
7774 },
7775 },
7776 )
7777 .await
7778 .unwrap();
7779 Ok(Some(json!(null)))
7780 }
7781 }
7782 })
7783 .next()
7784 .await;
7785
7786 // Applying the code action returns a project transaction containing the edits
7787 // sent by the language server in its `workspaceEdit` request.
7788 let transaction = apply.await.unwrap();
7789 assert!(transaction.0.contains_key(&buffer));
7790 buffer.update(cx, |buffer, cx| {
7791 assert_eq!(buffer.text(), "Xa");
7792 buffer.undo(cx);
7793 assert_eq!(buffer.text(), "a");
7794 });
7795 }
7796
7797 #[gpui::test]
7798 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7799 let fs = FakeFs::new(cx.background());
7800 fs.insert_tree(
7801 "/dir",
7802 json!({
7803 "file1": "the old contents",
7804 }),
7805 )
7806 .await;
7807
7808 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7809 let buffer = project
7810 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7811 .await
7812 .unwrap();
7813 buffer
7814 .update(cx, |buffer, cx| {
7815 assert_eq!(buffer.text(), "the old contents");
7816 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7817 buffer.save(cx)
7818 })
7819 .await
7820 .unwrap();
7821
7822 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7823 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7824 }
7825
7826 #[gpui::test]
7827 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7828 let fs = FakeFs::new(cx.background());
7829 fs.insert_tree(
7830 "/dir",
7831 json!({
7832 "file1": "the old contents",
7833 }),
7834 )
7835 .await;
7836
7837 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7838 let buffer = project
7839 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7840 .await
7841 .unwrap();
7842 buffer
7843 .update(cx, |buffer, cx| {
7844 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7845 buffer.save(cx)
7846 })
7847 .await
7848 .unwrap();
7849
7850 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7851 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7852 }
7853
7854 #[gpui::test]
7855 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7856 let fs = FakeFs::new(cx.background());
7857 fs.insert_tree("/dir", json!({})).await;
7858
7859 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7860 let buffer = project.update(cx, |project, cx| {
7861 project.create_buffer("", None, cx).unwrap()
7862 });
7863 buffer.update(cx, |buffer, cx| {
7864 buffer.edit([(0..0, "abc")], cx);
7865 assert!(buffer.is_dirty());
7866 assert!(!buffer.has_conflict());
7867 });
7868 project
7869 .update(cx, |project, cx| {
7870 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7871 })
7872 .await
7873 .unwrap();
7874 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7875 buffer.read_with(cx, |buffer, cx| {
7876 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7877 assert!(!buffer.is_dirty());
7878 assert!(!buffer.has_conflict());
7879 });
7880
7881 let opened_buffer = project
7882 .update(cx, |project, cx| {
7883 project.open_local_buffer("/dir/file1", cx)
7884 })
7885 .await
7886 .unwrap();
7887 assert_eq!(opened_buffer, buffer);
7888 }
7889
7890 #[gpui::test(retries = 5)]
7891 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7892 let dir = temp_tree(json!({
7893 "a": {
7894 "file1": "",
7895 "file2": "",
7896 "file3": "",
7897 },
7898 "b": {
7899 "c": {
7900 "file4": "",
7901 "file5": "",
7902 }
7903 }
7904 }));
7905
7906 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7907 let rpc = project.read_with(cx, |p, _| p.client.clone());
7908
7909 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7910 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7911 async move { buffer.await.unwrap() }
7912 };
7913 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7914 project.read_with(cx, |project, cx| {
7915 let tree = project.worktrees(cx).next().unwrap();
7916 tree.read(cx)
7917 .entry_for_path(path)
7918 .expect(&format!("no entry for path {}", path))
7919 .id
7920 })
7921 };
7922
7923 let buffer2 = buffer_for_path("a/file2", cx).await;
7924 let buffer3 = buffer_for_path("a/file3", cx).await;
7925 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7926 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7927
7928 let file2_id = id_for_path("a/file2", &cx);
7929 let file3_id = id_for_path("a/file3", &cx);
7930 let file4_id = id_for_path("b/c/file4", &cx);
7931
7932 // Create a remote copy of this worktree.
7933 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7934 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7935 let (remote, load_task) = cx.update(|cx| {
7936 Worktree::remote(
7937 1,
7938 1,
7939 initial_snapshot.to_proto(&Default::default(), true),
7940 rpc.clone(),
7941 cx,
7942 )
7943 });
7944 // tree
7945 load_task.await;
7946
7947 cx.read(|cx| {
7948 assert!(!buffer2.read(cx).is_dirty());
7949 assert!(!buffer3.read(cx).is_dirty());
7950 assert!(!buffer4.read(cx).is_dirty());
7951 assert!(!buffer5.read(cx).is_dirty());
7952 });
7953
7954 // Rename and delete files and directories.
7955 tree.flush_fs_events(&cx).await;
7956 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7957 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7958 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7959 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7960 tree.flush_fs_events(&cx).await;
7961
7962 let expected_paths = vec![
7963 "a",
7964 "a/file1",
7965 "a/file2.new",
7966 "b",
7967 "d",
7968 "d/file3",
7969 "d/file4",
7970 ];
7971
7972 cx.read(|app| {
7973 assert_eq!(
7974 tree.read(app)
7975 .paths()
7976 .map(|p| p.to_str().unwrap())
7977 .collect::<Vec<_>>(),
7978 expected_paths
7979 );
7980
7981 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7982 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7983 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7984
7985 assert_eq!(
7986 buffer2.read(app).file().unwrap().path().as_ref(),
7987 Path::new("a/file2.new")
7988 );
7989 assert_eq!(
7990 buffer3.read(app).file().unwrap().path().as_ref(),
7991 Path::new("d/file3")
7992 );
7993 assert_eq!(
7994 buffer4.read(app).file().unwrap().path().as_ref(),
7995 Path::new("d/file4")
7996 );
7997 assert_eq!(
7998 buffer5.read(app).file().unwrap().path().as_ref(),
7999 Path::new("b/c/file5")
8000 );
8001
8002 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8003 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8004 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8005 assert!(buffer5.read(app).file().unwrap().is_deleted());
8006 });
8007
8008 // Update the remote worktree. Check that it becomes consistent with the
8009 // local worktree.
8010 remote.update(cx, |remote, cx| {
8011 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8012 &initial_snapshot,
8013 1,
8014 1,
8015 true,
8016 );
8017 remote
8018 .as_remote_mut()
8019 .unwrap()
8020 .snapshot
8021 .apply_remote_update(update_message)
8022 .unwrap();
8023
8024 assert_eq!(
8025 remote
8026 .paths()
8027 .map(|p| p.to_str().unwrap())
8028 .collect::<Vec<_>>(),
8029 expected_paths
8030 );
8031 });
8032 }
8033
8034 #[gpui::test]
8035 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8036 let fs = FakeFs::new(cx.background());
8037 fs.insert_tree(
8038 "/dir",
8039 json!({
8040 "a.txt": "a-contents",
8041 "b.txt": "b-contents",
8042 }),
8043 )
8044 .await;
8045
8046 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8047
8048 // Spawn multiple tasks to open paths, repeating some paths.
8049 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8050 (
8051 p.open_local_buffer("/dir/a.txt", cx),
8052 p.open_local_buffer("/dir/b.txt", cx),
8053 p.open_local_buffer("/dir/a.txt", cx),
8054 )
8055 });
8056
8057 let buffer_a_1 = buffer_a_1.await.unwrap();
8058 let buffer_a_2 = buffer_a_2.await.unwrap();
8059 let buffer_b = buffer_b.await.unwrap();
8060 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8061 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8062
8063 // There is only one buffer per path.
8064 let buffer_a_id = buffer_a_1.id();
8065 assert_eq!(buffer_a_2.id(), buffer_a_id);
8066
8067 // Open the same path again while it is still open.
8068 drop(buffer_a_1);
8069 let buffer_a_3 = project
8070 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8071 .await
8072 .unwrap();
8073
8074 // There's still only one buffer per path.
8075 assert_eq!(buffer_a_3.id(), buffer_a_id);
8076 }
8077
8078 #[gpui::test]
8079 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8080 let fs = FakeFs::new(cx.background());
8081 fs.insert_tree(
8082 "/dir",
8083 json!({
8084 "file1": "abc",
8085 "file2": "def",
8086 "file3": "ghi",
8087 }),
8088 )
8089 .await;
8090
8091 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8092
8093 let buffer1 = project
8094 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8095 .await
8096 .unwrap();
8097 let events = Rc::new(RefCell::new(Vec::new()));
8098
8099 // initially, the buffer isn't dirty.
8100 buffer1.update(cx, |buffer, cx| {
8101 cx.subscribe(&buffer1, {
8102 let events = events.clone();
8103 move |_, _, event, _| match event {
8104 BufferEvent::Operation(_) => {}
8105 _ => events.borrow_mut().push(event.clone()),
8106 }
8107 })
8108 .detach();
8109
8110 assert!(!buffer.is_dirty());
8111 assert!(events.borrow().is_empty());
8112
8113 buffer.edit([(1..2, "")], cx);
8114 });
8115
8116 // after the first edit, the buffer is dirty, and emits a dirtied event.
8117 buffer1.update(cx, |buffer, cx| {
8118 assert!(buffer.text() == "ac");
8119 assert!(buffer.is_dirty());
8120 assert_eq!(
8121 *events.borrow(),
8122 &[language::Event::Edited, language::Event::DirtyChanged]
8123 );
8124 events.borrow_mut().clear();
8125 buffer.did_save(
8126 buffer.version(),
8127 buffer.as_rope().fingerprint(),
8128 buffer.file().unwrap().mtime(),
8129 None,
8130 cx,
8131 );
8132 });
8133
8134 // after saving, the buffer is not dirty, and emits a saved event.
8135 buffer1.update(cx, |buffer, cx| {
8136 assert!(!buffer.is_dirty());
8137 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8138 events.borrow_mut().clear();
8139
8140 buffer.edit([(1..1, "B")], cx);
8141 buffer.edit([(2..2, "D")], cx);
8142 });
8143
8144 // after editing again, the buffer is dirty, and emits another dirty event.
8145 buffer1.update(cx, |buffer, cx| {
8146 assert!(buffer.text() == "aBDc");
8147 assert!(buffer.is_dirty());
8148 assert_eq!(
8149 *events.borrow(),
8150 &[
8151 language::Event::Edited,
8152 language::Event::DirtyChanged,
8153 language::Event::Edited,
8154 ],
8155 );
8156 events.borrow_mut().clear();
8157
8158 // After restoring the buffer to its previously-saved state,
8159 // the buffer is not considered dirty anymore.
8160 buffer.edit([(1..3, "")], cx);
8161 assert!(buffer.text() == "ac");
8162 assert!(!buffer.is_dirty());
8163 });
8164
8165 assert_eq!(
8166 *events.borrow(),
8167 &[language::Event::Edited, language::Event::DirtyChanged]
8168 );
8169
8170 // When a file is deleted, the buffer is considered dirty.
8171 let events = Rc::new(RefCell::new(Vec::new()));
8172 let buffer2 = project
8173 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8174 .await
8175 .unwrap();
8176 buffer2.update(cx, |_, cx| {
8177 cx.subscribe(&buffer2, {
8178 let events = events.clone();
8179 move |_, _, event, _| events.borrow_mut().push(event.clone())
8180 })
8181 .detach();
8182 });
8183
8184 fs.remove_file("/dir/file2".as_ref(), Default::default())
8185 .await
8186 .unwrap();
8187 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8188 assert_eq!(
8189 *events.borrow(),
8190 &[
8191 language::Event::DirtyChanged,
8192 language::Event::FileHandleChanged
8193 ]
8194 );
8195
8196 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8197 let events = Rc::new(RefCell::new(Vec::new()));
8198 let buffer3 = project
8199 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8200 .await
8201 .unwrap();
8202 buffer3.update(cx, |_, cx| {
8203 cx.subscribe(&buffer3, {
8204 let events = events.clone();
8205 move |_, _, event, _| events.borrow_mut().push(event.clone())
8206 })
8207 .detach();
8208 });
8209
8210 buffer3.update(cx, |buffer, cx| {
8211 buffer.edit([(0..0, "x")], cx);
8212 });
8213 events.borrow_mut().clear();
8214 fs.remove_file("/dir/file3".as_ref(), Default::default())
8215 .await
8216 .unwrap();
8217 buffer3
8218 .condition(&cx, |_, _| !events.borrow().is_empty())
8219 .await;
8220 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8221 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8222 }
8223
8224 #[gpui::test]
8225 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8226 let initial_contents = "aaa\nbbbbb\nc\n";
8227 let fs = FakeFs::new(cx.background());
8228 fs.insert_tree(
8229 "/dir",
8230 json!({
8231 "the-file": initial_contents,
8232 }),
8233 )
8234 .await;
8235 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8236 let buffer = project
8237 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8238 .await
8239 .unwrap();
8240
8241 let anchors = (0..3)
8242 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8243 .collect::<Vec<_>>();
8244
8245 // Change the file on disk, adding two new lines of text, and removing
8246 // one line.
8247 buffer.read_with(cx, |buffer, _| {
8248 assert!(!buffer.is_dirty());
8249 assert!(!buffer.has_conflict());
8250 });
8251 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8252 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8253 .await
8254 .unwrap();
8255
8256 // Because the buffer was not modified, it is reloaded from disk. Its
8257 // contents are edited according to the diff between the old and new
8258 // file contents.
8259 buffer
8260 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8261 .await;
8262
8263 buffer.update(cx, |buffer, _| {
8264 assert_eq!(buffer.text(), new_contents);
8265 assert!(!buffer.is_dirty());
8266 assert!(!buffer.has_conflict());
8267
8268 let anchor_positions = anchors
8269 .iter()
8270 .map(|anchor| anchor.to_point(&*buffer))
8271 .collect::<Vec<_>>();
8272 assert_eq!(
8273 anchor_positions,
8274 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8275 );
8276 });
8277
8278 // Modify the buffer
8279 buffer.update(cx, |buffer, cx| {
8280 buffer.edit([(0..0, " ")], cx);
8281 assert!(buffer.is_dirty());
8282 assert!(!buffer.has_conflict());
8283 });
8284
8285 // Change the file on disk again, adding blank lines to the beginning.
8286 fs.save(
8287 "/dir/the-file".as_ref(),
8288 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8289 )
8290 .await
8291 .unwrap();
8292
8293 // Because the buffer is modified, it doesn't reload from disk, but is
8294 // marked as having a conflict.
8295 buffer
8296 .condition(&cx, |buffer, _| buffer.has_conflict())
8297 .await;
8298 }
8299
8300 #[gpui::test]
8301 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8302 cx.foreground().forbid_parking();
8303
8304 let fs = FakeFs::new(cx.background());
8305 fs.insert_tree(
8306 "/the-dir",
8307 json!({
8308 "a.rs": "
8309 fn foo(mut v: Vec<usize>) {
8310 for x in &v {
8311 v.push(1);
8312 }
8313 }
8314 "
8315 .unindent(),
8316 }),
8317 )
8318 .await;
8319
8320 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8321 let buffer = project
8322 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8323 .await
8324 .unwrap();
8325
8326 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8327 let message = lsp::PublishDiagnosticsParams {
8328 uri: buffer_uri.clone(),
8329 diagnostics: vec![
8330 lsp::Diagnostic {
8331 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8332 severity: Some(DiagnosticSeverity::WARNING),
8333 message: "error 1".to_string(),
8334 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8335 location: lsp::Location {
8336 uri: buffer_uri.clone(),
8337 range: lsp::Range::new(
8338 lsp::Position::new(1, 8),
8339 lsp::Position::new(1, 9),
8340 ),
8341 },
8342 message: "error 1 hint 1".to_string(),
8343 }]),
8344 ..Default::default()
8345 },
8346 lsp::Diagnostic {
8347 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8348 severity: Some(DiagnosticSeverity::HINT),
8349 message: "error 1 hint 1".to_string(),
8350 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8351 location: lsp::Location {
8352 uri: buffer_uri.clone(),
8353 range: lsp::Range::new(
8354 lsp::Position::new(1, 8),
8355 lsp::Position::new(1, 9),
8356 ),
8357 },
8358 message: "original diagnostic".to_string(),
8359 }]),
8360 ..Default::default()
8361 },
8362 lsp::Diagnostic {
8363 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8364 severity: Some(DiagnosticSeverity::ERROR),
8365 message: "error 2".to_string(),
8366 related_information: Some(vec![
8367 lsp::DiagnosticRelatedInformation {
8368 location: lsp::Location {
8369 uri: buffer_uri.clone(),
8370 range: lsp::Range::new(
8371 lsp::Position::new(1, 13),
8372 lsp::Position::new(1, 15),
8373 ),
8374 },
8375 message: "error 2 hint 1".to_string(),
8376 },
8377 lsp::DiagnosticRelatedInformation {
8378 location: lsp::Location {
8379 uri: buffer_uri.clone(),
8380 range: lsp::Range::new(
8381 lsp::Position::new(1, 13),
8382 lsp::Position::new(1, 15),
8383 ),
8384 },
8385 message: "error 2 hint 2".to_string(),
8386 },
8387 ]),
8388 ..Default::default()
8389 },
8390 lsp::Diagnostic {
8391 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8392 severity: Some(DiagnosticSeverity::HINT),
8393 message: "error 2 hint 1".to_string(),
8394 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8395 location: lsp::Location {
8396 uri: buffer_uri.clone(),
8397 range: lsp::Range::new(
8398 lsp::Position::new(2, 8),
8399 lsp::Position::new(2, 17),
8400 ),
8401 },
8402 message: "original diagnostic".to_string(),
8403 }]),
8404 ..Default::default()
8405 },
8406 lsp::Diagnostic {
8407 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8408 severity: Some(DiagnosticSeverity::HINT),
8409 message: "error 2 hint 2".to_string(),
8410 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8411 location: lsp::Location {
8412 uri: buffer_uri.clone(),
8413 range: lsp::Range::new(
8414 lsp::Position::new(2, 8),
8415 lsp::Position::new(2, 17),
8416 ),
8417 },
8418 message: "original diagnostic".to_string(),
8419 }]),
8420 ..Default::default()
8421 },
8422 ],
8423 version: None,
8424 };
8425
8426 project
8427 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8428 .unwrap();
8429 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8430
8431 assert_eq!(
8432 buffer
8433 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8434 .collect::<Vec<_>>(),
8435 &[
8436 DiagnosticEntry {
8437 range: Point::new(1, 8)..Point::new(1, 9),
8438 diagnostic: Diagnostic {
8439 severity: DiagnosticSeverity::WARNING,
8440 message: "error 1".to_string(),
8441 group_id: 0,
8442 is_primary: true,
8443 ..Default::default()
8444 }
8445 },
8446 DiagnosticEntry {
8447 range: Point::new(1, 8)..Point::new(1, 9),
8448 diagnostic: Diagnostic {
8449 severity: DiagnosticSeverity::HINT,
8450 message: "error 1 hint 1".to_string(),
8451 group_id: 0,
8452 is_primary: false,
8453 ..Default::default()
8454 }
8455 },
8456 DiagnosticEntry {
8457 range: Point::new(1, 13)..Point::new(1, 15),
8458 diagnostic: Diagnostic {
8459 severity: DiagnosticSeverity::HINT,
8460 message: "error 2 hint 1".to_string(),
8461 group_id: 1,
8462 is_primary: false,
8463 ..Default::default()
8464 }
8465 },
8466 DiagnosticEntry {
8467 range: Point::new(1, 13)..Point::new(1, 15),
8468 diagnostic: Diagnostic {
8469 severity: DiagnosticSeverity::HINT,
8470 message: "error 2 hint 2".to_string(),
8471 group_id: 1,
8472 is_primary: false,
8473 ..Default::default()
8474 }
8475 },
8476 DiagnosticEntry {
8477 range: Point::new(2, 8)..Point::new(2, 17),
8478 diagnostic: Diagnostic {
8479 severity: DiagnosticSeverity::ERROR,
8480 message: "error 2".to_string(),
8481 group_id: 1,
8482 is_primary: true,
8483 ..Default::default()
8484 }
8485 }
8486 ]
8487 );
8488
8489 assert_eq!(
8490 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8491 &[
8492 DiagnosticEntry {
8493 range: Point::new(1, 8)..Point::new(1, 9),
8494 diagnostic: Diagnostic {
8495 severity: DiagnosticSeverity::WARNING,
8496 message: "error 1".to_string(),
8497 group_id: 0,
8498 is_primary: true,
8499 ..Default::default()
8500 }
8501 },
8502 DiagnosticEntry {
8503 range: Point::new(1, 8)..Point::new(1, 9),
8504 diagnostic: Diagnostic {
8505 severity: DiagnosticSeverity::HINT,
8506 message: "error 1 hint 1".to_string(),
8507 group_id: 0,
8508 is_primary: false,
8509 ..Default::default()
8510 }
8511 },
8512 ]
8513 );
8514 assert_eq!(
8515 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8516 &[
8517 DiagnosticEntry {
8518 range: Point::new(1, 13)..Point::new(1, 15),
8519 diagnostic: Diagnostic {
8520 severity: DiagnosticSeverity::HINT,
8521 message: "error 2 hint 1".to_string(),
8522 group_id: 1,
8523 is_primary: false,
8524 ..Default::default()
8525 }
8526 },
8527 DiagnosticEntry {
8528 range: Point::new(1, 13)..Point::new(1, 15),
8529 diagnostic: Diagnostic {
8530 severity: DiagnosticSeverity::HINT,
8531 message: "error 2 hint 2".to_string(),
8532 group_id: 1,
8533 is_primary: false,
8534 ..Default::default()
8535 }
8536 },
8537 DiagnosticEntry {
8538 range: Point::new(2, 8)..Point::new(2, 17),
8539 diagnostic: Diagnostic {
8540 severity: DiagnosticSeverity::ERROR,
8541 message: "error 2".to_string(),
8542 group_id: 1,
8543 is_primary: true,
8544 ..Default::default()
8545 }
8546 }
8547 ]
8548 );
8549 }
8550
8551 #[gpui::test]
8552 async fn test_rename(cx: &mut gpui::TestAppContext) {
8553 cx.foreground().forbid_parking();
8554
8555 let mut language = Language::new(
8556 LanguageConfig {
8557 name: "Rust".into(),
8558 path_suffixes: vec!["rs".to_string()],
8559 ..Default::default()
8560 },
8561 Some(tree_sitter_rust::language()),
8562 );
8563 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8564 capabilities: lsp::ServerCapabilities {
8565 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8566 prepare_provider: Some(true),
8567 work_done_progress_options: Default::default(),
8568 })),
8569 ..Default::default()
8570 },
8571 ..Default::default()
8572 });
8573
8574 let fs = FakeFs::new(cx.background());
8575 fs.insert_tree(
8576 "/dir",
8577 json!({
8578 "one.rs": "const ONE: usize = 1;",
8579 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8580 }),
8581 )
8582 .await;
8583
8584 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8585 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8586 let buffer = project
8587 .update(cx, |project, cx| {
8588 project.open_local_buffer("/dir/one.rs", cx)
8589 })
8590 .await
8591 .unwrap();
8592
8593 let fake_server = fake_servers.next().await.unwrap();
8594
8595 let response = project.update(cx, |project, cx| {
8596 project.prepare_rename(buffer.clone(), 7, cx)
8597 });
8598 fake_server
8599 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8600 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8601 assert_eq!(params.position, lsp::Position::new(0, 7));
8602 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8603 lsp::Position::new(0, 6),
8604 lsp::Position::new(0, 9),
8605 ))))
8606 })
8607 .next()
8608 .await
8609 .unwrap();
8610 let range = response.await.unwrap().unwrap();
8611 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8612 assert_eq!(range, 6..9);
8613
8614 let response = project.update(cx, |project, cx| {
8615 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8616 });
8617 fake_server
8618 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8619 assert_eq!(
8620 params.text_document_position.text_document.uri.as_str(),
8621 "file:///dir/one.rs"
8622 );
8623 assert_eq!(
8624 params.text_document_position.position,
8625 lsp::Position::new(0, 7)
8626 );
8627 assert_eq!(params.new_name, "THREE");
8628 Ok(Some(lsp::WorkspaceEdit {
8629 changes: Some(
8630 [
8631 (
8632 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8633 vec![lsp::TextEdit::new(
8634 lsp::Range::new(
8635 lsp::Position::new(0, 6),
8636 lsp::Position::new(0, 9),
8637 ),
8638 "THREE".to_string(),
8639 )],
8640 ),
8641 (
8642 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8643 vec![
8644 lsp::TextEdit::new(
8645 lsp::Range::new(
8646 lsp::Position::new(0, 24),
8647 lsp::Position::new(0, 27),
8648 ),
8649 "THREE".to_string(),
8650 ),
8651 lsp::TextEdit::new(
8652 lsp::Range::new(
8653 lsp::Position::new(0, 35),
8654 lsp::Position::new(0, 38),
8655 ),
8656 "THREE".to_string(),
8657 ),
8658 ],
8659 ),
8660 ]
8661 .into_iter()
8662 .collect(),
8663 ),
8664 ..Default::default()
8665 }))
8666 })
8667 .next()
8668 .await
8669 .unwrap();
8670 let mut transaction = response.await.unwrap().0;
8671 assert_eq!(transaction.len(), 2);
8672 assert_eq!(
8673 transaction
8674 .remove_entry(&buffer)
8675 .unwrap()
8676 .0
8677 .read_with(cx, |buffer, _| buffer.text()),
8678 "const THREE: usize = 1;"
8679 );
8680 assert_eq!(
8681 transaction
8682 .into_keys()
8683 .next()
8684 .unwrap()
8685 .read_with(cx, |buffer, _| buffer.text()),
8686 "const TWO: usize = one::THREE + one::THREE;"
8687 );
8688 }
8689
8690 #[gpui::test]
8691 async fn test_search(cx: &mut gpui::TestAppContext) {
8692 let fs = FakeFs::new(cx.background());
8693 fs.insert_tree(
8694 "/dir",
8695 json!({
8696 "one.rs": "const ONE: usize = 1;",
8697 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8698 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8699 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8700 }),
8701 )
8702 .await;
8703 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8704 assert_eq!(
8705 search(&project, SearchQuery::text("TWO", false, true), cx)
8706 .await
8707 .unwrap(),
8708 HashMap::from_iter([
8709 ("two.rs".to_string(), vec![6..9]),
8710 ("three.rs".to_string(), vec![37..40])
8711 ])
8712 );
8713
8714 let buffer_4 = project
8715 .update(cx, |project, cx| {
8716 project.open_local_buffer("/dir/four.rs", cx)
8717 })
8718 .await
8719 .unwrap();
8720 buffer_4.update(cx, |buffer, cx| {
8721 let text = "two::TWO";
8722 buffer.edit([(20..28, text), (31..43, text)], cx);
8723 });
8724
8725 assert_eq!(
8726 search(&project, SearchQuery::text("TWO", false, true), cx)
8727 .await
8728 .unwrap(),
8729 HashMap::from_iter([
8730 ("two.rs".to_string(), vec![6..9]),
8731 ("three.rs".to_string(), vec![37..40]),
8732 ("four.rs".to_string(), vec![25..28, 36..39])
8733 ])
8734 );
8735
8736 async fn search(
8737 project: &ModelHandle<Project>,
8738 query: SearchQuery,
8739 cx: &mut gpui::TestAppContext,
8740 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8741 let results = project
8742 .update(cx, |project, cx| project.search(query, cx))
8743 .await?;
8744
8745 Ok(results
8746 .into_iter()
8747 .map(|(buffer, ranges)| {
8748 buffer.read_with(cx, |buffer, _| {
8749 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8750 let ranges = ranges
8751 .into_iter()
8752 .map(|range| range.to_offset(buffer))
8753 .collect::<Vec<_>>();
8754 (path, ranges)
8755 })
8756 })
8757 .collect())
8758 }
8759 }
8760}