1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73pub struct Project {
74 worktrees: Vec<WorktreeHandle>,
75 active_entry: Option<ProjectEntryId>,
76 languages: Arc<LanguageRegistry>,
77 language_servers:
78 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
79 started_language_servers:
80 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
81 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
82 language_server_settings: Arc<Mutex<serde_json::Value>>,
83 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
84 next_language_server_id: usize,
85 client: Arc<client::Client>,
86 next_entry_id: Arc<AtomicUsize>,
87 next_diagnostic_group_id: usize,
88 user_store: ModelHandle<UserStore>,
89 project_store: ModelHandle<ProjectStore>,
90 fs: Arc<dyn Fs>,
91 client_state: ProjectClientState,
92 collaborators: HashMap<PeerId, Collaborator>,
93 client_subscriptions: Vec<client::Subscription>,
94 _subscriptions: Vec<gpui::Subscription>,
95 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
96 shared_buffers: HashMap<PeerId, HashSet<u64>>,
97 loading_buffers: HashMap<
98 ProjectPath,
99 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
100 >,
101 loading_local_worktrees:
102 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
103 opened_buffers: HashMap<u64, OpenBuffer>,
104 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
105 nonce: u128,
106 initialized_persistent_state: bool,
107}
108
109#[derive(Error, Debug)]
110pub enum JoinProjectError {
111 #[error("host declined join request")]
112 HostDeclined,
113 #[error("host closed the project")]
114 HostClosedProject,
115 #[error("host went offline")]
116 HostWentOffline,
117 #[error("{0}")]
118 Other(#[from] anyhow::Error),
119}
120
121enum OpenBuffer {
122 Strong(ModelHandle<Buffer>),
123 Weak(WeakModelHandle<Buffer>),
124 Loading(Vec<Operation>),
125}
126
127enum WorktreeHandle {
128 Strong(ModelHandle<Worktree>),
129 Weak(WeakModelHandle<Worktree>),
130}
131
132enum ProjectClientState {
133 Local {
134 is_shared: bool,
135 remote_id_tx: watch::Sender<Option<u64>>,
136 remote_id_rx: watch::Receiver<Option<u64>>,
137 online_tx: watch::Sender<bool>,
138 online_rx: watch::Receiver<bool>,
139 _maintain_remote_id_task: Task<Option<()>>,
140 },
141 Remote {
142 sharing_has_stopped: bool,
143 remote_id: u64,
144 replica_id: ReplicaId,
145 _detect_unshare_task: Task<Option<()>>,
146 },
147}
148
149#[derive(Clone, Debug)]
150pub struct Collaborator {
151 pub user: Arc<User>,
152 pub peer_id: PeerId,
153 pub replica_id: ReplicaId,
154}
155
156#[derive(Clone, Debug, PartialEq, Eq)]
157pub enum Event {
158 ActiveEntryChanged(Option<ProjectEntryId>),
159 WorktreeAdded,
160 WorktreeRemoved(WorktreeId),
161 DiskBasedDiagnosticsStarted {
162 language_server_id: usize,
163 },
164 DiskBasedDiagnosticsFinished {
165 language_server_id: usize,
166 },
167 DiagnosticsUpdated {
168 path: ProjectPath,
169 language_server_id: usize,
170 },
171 RemoteIdChanged(Option<u64>),
172 CollaboratorLeft(PeerId),
173 ContactRequestedJoin(Arc<User>),
174 ContactCancelledJoinRequest(Arc<User>),
175}
176
177#[derive(Serialize)]
178pub struct LanguageServerStatus {
179 pub name: String,
180 pub pending_work: BTreeMap<String, LanguageServerProgress>,
181 pub pending_diagnostic_updates: isize,
182}
183
184#[derive(Clone, Debug, Serialize)]
185pub struct LanguageServerProgress {
186 pub message: Option<String>,
187 pub percentage: Option<usize>,
188 #[serde(skip_serializing)]
189 pub last_update_at: Instant,
190}
191
192#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
193pub struct ProjectPath {
194 pub worktree_id: WorktreeId,
195 pub path: Arc<Path>,
196}
197
198#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
199pub struct DiagnosticSummary {
200 pub language_server_id: usize,
201 pub error_count: usize,
202 pub warning_count: usize,
203}
204
205#[derive(Debug)]
206pub struct Location {
207 pub buffer: ModelHandle<Buffer>,
208 pub range: Range<language::Anchor>,
209}
210
211#[derive(Debug)]
212pub struct DocumentHighlight {
213 pub range: Range<language::Anchor>,
214 pub kind: DocumentHighlightKind,
215}
216
217#[derive(Clone, Debug)]
218pub struct Symbol {
219 pub source_worktree_id: WorktreeId,
220 pub worktree_id: WorktreeId,
221 pub language_server_name: LanguageServerName,
222 pub path: PathBuf,
223 pub label: CodeLabel,
224 pub name: String,
225 pub kind: lsp::SymbolKind,
226 pub range: Range<PointUtf16>,
227 pub signature: [u8; 32],
228}
229
230#[derive(Clone, Debug, PartialEq)]
231pub struct HoverBlock {
232 pub text: String,
233 pub language: Option<String>,
234}
235
236impl HoverBlock {
237 fn try_new(marked_string: MarkedString) -> Option<Self> {
238 let result = match marked_string {
239 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
240 text: value,
241 language: Some(language),
242 },
243 MarkedString::String(text) => HoverBlock {
244 text,
245 language: None,
246 },
247 };
248 if result.text.is_empty() {
249 None
250 } else {
251 Some(result)
252 }
253 }
254}
255
256#[derive(Debug)]
257pub struct Hover {
258 pub contents: Vec<HoverBlock>,
259 pub range: Option<Range<language::Anchor>>,
260}
261
262#[derive(Default)]
263pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
264
265impl DiagnosticSummary {
266 fn new<'a, T: 'a>(
267 language_server_id: usize,
268 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
269 ) -> Self {
270 let mut this = Self {
271 language_server_id,
272 error_count: 0,
273 warning_count: 0,
274 };
275
276 for entry in diagnostics {
277 if entry.diagnostic.is_primary {
278 match entry.diagnostic.severity {
279 DiagnosticSeverity::ERROR => this.error_count += 1,
280 DiagnosticSeverity::WARNING => this.warning_count += 1,
281 _ => {}
282 }
283 }
284 }
285
286 this
287 }
288
289 pub fn is_empty(&self) -> bool {
290 self.error_count == 0 && self.warning_count == 0
291 }
292
293 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
294 proto::DiagnosticSummary {
295 path: path.to_string_lossy().to_string(),
296 language_server_id: self.language_server_id as u64,
297 error_count: self.error_count as u32,
298 warning_count: self.warning_count as u32,
299 }
300 }
301}
302
303#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
304pub struct ProjectEntryId(usize);
305
306impl ProjectEntryId {
307 pub const MAX: Self = Self(usize::MAX);
308
309 pub fn new(counter: &AtomicUsize) -> Self {
310 Self(counter.fetch_add(1, SeqCst))
311 }
312
313 pub fn from_proto(id: u64) -> Self {
314 Self(id as usize)
315 }
316
317 pub fn to_proto(&self) -> u64 {
318 self.0 as u64
319 }
320
321 pub fn to_usize(&self) -> usize {
322 self.0
323 }
324}
325
326impl Project {
327 pub fn init(client: &Arc<Client>) {
328 client.add_model_message_handler(Self::handle_request_join_project);
329 client.add_model_message_handler(Self::handle_add_collaborator);
330 client.add_model_message_handler(Self::handle_buffer_reloaded);
331 client.add_model_message_handler(Self::handle_buffer_saved);
332 client.add_model_message_handler(Self::handle_start_language_server);
333 client.add_model_message_handler(Self::handle_update_language_server);
334 client.add_model_message_handler(Self::handle_remove_collaborator);
335 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
336 client.add_model_message_handler(Self::handle_update_project);
337 client.add_model_message_handler(Self::handle_unregister_project);
338 client.add_model_message_handler(Self::handle_project_unshared);
339 client.add_model_message_handler(Self::handle_update_buffer_file);
340 client.add_model_message_handler(Self::handle_update_buffer);
341 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
342 client.add_model_message_handler(Self::handle_update_worktree);
343 client.add_model_request_handler(Self::handle_create_project_entry);
344 client.add_model_request_handler(Self::handle_rename_project_entry);
345 client.add_model_request_handler(Self::handle_copy_project_entry);
346 client.add_model_request_handler(Self::handle_delete_project_entry);
347 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
348 client.add_model_request_handler(Self::handle_apply_code_action);
349 client.add_model_request_handler(Self::handle_reload_buffers);
350 client.add_model_request_handler(Self::handle_format_buffers);
351 client.add_model_request_handler(Self::handle_get_code_actions);
352 client.add_model_request_handler(Self::handle_get_completions);
353 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
354 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
355 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
356 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
357 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
358 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
359 client.add_model_request_handler(Self::handle_search_project);
360 client.add_model_request_handler(Self::handle_get_project_symbols);
361 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
362 client.add_model_request_handler(Self::handle_open_buffer_by_id);
363 client.add_model_request_handler(Self::handle_open_buffer_by_path);
364 client.add_model_request_handler(Self::handle_save_buffer);
365 }
366
367 pub fn local(
368 online: bool,
369 client: Arc<Client>,
370 user_store: ModelHandle<UserStore>,
371 project_store: ModelHandle<ProjectStore>,
372 languages: Arc<LanguageRegistry>,
373 fs: Arc<dyn Fs>,
374 cx: &mut MutableAppContext,
375 ) -> ModelHandle<Self> {
376 cx.add_model(|cx: &mut ModelContext<Self>| {
377 let (online_tx, online_rx) = watch::channel_with(online);
378 let (remote_id_tx, remote_id_rx) = watch::channel();
379 let _maintain_remote_id_task = cx.spawn_weak({
380 let status_rx = client.clone().status();
381 let online_rx = online_rx.clone();
382 move |this, mut cx| async move {
383 let mut stream = Stream::map(status_rx.clone(), drop)
384 .merge(Stream::map(online_rx.clone(), drop));
385 while stream.recv().await.is_some() {
386 let this = this.upgrade(&cx)?;
387 if status_rx.borrow().is_connected() && *online_rx.borrow() {
388 this.update(&mut cx, |this, cx| this.register(cx))
389 .await
390 .log_err()?;
391 } else {
392 this.update(&mut cx, |this, cx| this.unregister(cx))
393 .await
394 .log_err();
395 }
396 }
397 None
398 }
399 });
400
401 let handle = cx.weak_handle();
402 project_store.update(cx, |store, cx| store.add_project(handle, cx));
403
404 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
405 Self {
406 worktrees: Default::default(),
407 collaborators: Default::default(),
408 opened_buffers: Default::default(),
409 shared_buffers: Default::default(),
410 loading_buffers: Default::default(),
411 loading_local_worktrees: Default::default(),
412 buffer_snapshots: Default::default(),
413 client_state: ProjectClientState::Local {
414 is_shared: false,
415 remote_id_tx,
416 remote_id_rx,
417 online_tx,
418 online_rx,
419 _maintain_remote_id_task,
420 },
421 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
422 client_subscriptions: Vec::new(),
423 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
424 active_entry: None,
425 languages,
426 client,
427 user_store,
428 project_store,
429 fs,
430 next_entry_id: Default::default(),
431 next_diagnostic_group_id: Default::default(),
432 language_servers: Default::default(),
433 started_language_servers: Default::default(),
434 language_server_statuses: Default::default(),
435 last_workspace_edits_by_language_server: Default::default(),
436 language_server_settings: Default::default(),
437 next_language_server_id: 0,
438 nonce: StdRng::from_entropy().gen(),
439 initialized_persistent_state: false,
440 }
441 })
442 }
443
444 pub async fn remote(
445 remote_id: u64,
446 client: Arc<Client>,
447 user_store: ModelHandle<UserStore>,
448 project_store: ModelHandle<ProjectStore>,
449 languages: Arc<LanguageRegistry>,
450 fs: Arc<dyn Fs>,
451 mut cx: AsyncAppContext,
452 ) -> Result<ModelHandle<Self>, JoinProjectError> {
453 client.authenticate_and_connect(true, &cx).await?;
454
455 let response = client
456 .request(proto::JoinProject {
457 project_id: remote_id,
458 })
459 .await?;
460
461 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
462 proto::join_project_response::Variant::Accept(response) => response,
463 proto::join_project_response::Variant::Decline(decline) => {
464 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
465 Some(proto::join_project_response::decline::Reason::Declined) => {
466 Err(JoinProjectError::HostDeclined)?
467 }
468 Some(proto::join_project_response::decline::Reason::Closed) => {
469 Err(JoinProjectError::HostClosedProject)?
470 }
471 Some(proto::join_project_response::decline::Reason::WentOffline) => {
472 Err(JoinProjectError::HostWentOffline)?
473 }
474 None => Err(anyhow!("missing decline reason"))?,
475 }
476 }
477 };
478
479 let replica_id = response.replica_id as ReplicaId;
480
481 let mut worktrees = Vec::new();
482 for worktree in response.worktrees {
483 let (worktree, load_task) = cx
484 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
485 worktrees.push(worktree);
486 load_task.detach();
487 }
488
489 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
490 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
491 let handle = cx.weak_handle();
492 project_store.update(cx, |store, cx| store.add_project(handle, cx));
493
494 let mut this = Self {
495 worktrees: Vec::new(),
496 loading_buffers: Default::default(),
497 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
498 shared_buffers: Default::default(),
499 loading_local_worktrees: Default::default(),
500 active_entry: None,
501 collaborators: Default::default(),
502 languages,
503 user_store: user_store.clone(),
504 project_store,
505 fs,
506 next_entry_id: Default::default(),
507 next_diagnostic_group_id: Default::default(),
508 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
509 _subscriptions: Default::default(),
510 client: client.clone(),
511 client_state: ProjectClientState::Remote {
512 sharing_has_stopped: false,
513 remote_id,
514 replica_id,
515 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
516 async move {
517 let mut status = client.status();
518 let is_connected =
519 status.next().await.map_or(false, |s| s.is_connected());
520 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
521 if !is_connected || status.next().await.is_some() {
522 if let Some(this) = this.upgrade(&cx) {
523 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
524 }
525 }
526 Ok(())
527 }
528 .log_err()
529 }),
530 },
531 language_servers: Default::default(),
532 started_language_servers: Default::default(),
533 language_server_settings: Default::default(),
534 language_server_statuses: response
535 .language_servers
536 .into_iter()
537 .map(|server| {
538 (
539 server.id as usize,
540 LanguageServerStatus {
541 name: server.name,
542 pending_work: Default::default(),
543 pending_diagnostic_updates: 0,
544 },
545 )
546 })
547 .collect(),
548 last_workspace_edits_by_language_server: Default::default(),
549 next_language_server_id: 0,
550 opened_buffers: Default::default(),
551 buffer_snapshots: Default::default(),
552 nonce: StdRng::from_entropy().gen(),
553 initialized_persistent_state: false,
554 };
555 for worktree in worktrees {
556 this.add_worktree(&worktree, cx);
557 }
558 this
559 });
560
561 let user_ids = response
562 .collaborators
563 .iter()
564 .map(|peer| peer.user_id)
565 .collect();
566 user_store
567 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
568 .await?;
569 let mut collaborators = HashMap::default();
570 for message in response.collaborators {
571 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
572 collaborators.insert(collaborator.peer_id, collaborator);
573 }
574
575 this.update(&mut cx, |this, _| {
576 this.collaborators = collaborators;
577 });
578
579 Ok(this)
580 }
581
582 #[cfg(any(test, feature = "test-support"))]
583 pub async fn test(
584 fs: Arc<dyn Fs>,
585 root_paths: impl IntoIterator<Item = &Path>,
586 cx: &mut gpui::TestAppContext,
587 ) -> ModelHandle<Project> {
588 if !cx.read(|cx| cx.has_global::<Settings>()) {
589 cx.update(|cx| cx.set_global(Settings::test(cx)));
590 }
591
592 let languages = Arc::new(LanguageRegistry::test());
593 let http_client = client::test::FakeHttpClient::with_404_response();
594 let client = client::Client::new(http_client.clone());
595 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
596 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
597 let project = cx.update(|cx| {
598 Project::local(true, client, user_store, project_store, languages, fs, cx)
599 });
600 for path in root_paths {
601 let (tree, _) = project
602 .update(cx, |project, cx| {
603 project.find_or_create_local_worktree(path, true, cx)
604 })
605 .await
606 .unwrap();
607 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
608 .await;
609 }
610 project
611 }
612
613 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
614 if self.is_remote() {
615 return Task::ready(Ok(()));
616 }
617
618 let db = self.project_store.read(cx).db.clone();
619 let keys = self.db_keys_for_online_state(cx);
620 let online_by_default = cx.global::<Settings>().projects_online_by_default;
621 let read_online = cx.background().spawn(async move {
622 let values = db.read(keys)?;
623 anyhow::Ok(
624 values
625 .into_iter()
626 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
627 )
628 });
629 cx.spawn(|this, mut cx| async move {
630 let online = read_online.await.log_err().unwrap_or(false);
631 this.update(&mut cx, |this, cx| {
632 this.initialized_persistent_state = true;
633 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
634 let mut online_tx = online_tx.borrow_mut();
635 if *online_tx != online {
636 *online_tx = online;
637 drop(online_tx);
638 this.metadata_changed(false, cx);
639 }
640 }
641 });
642 Ok(())
643 })
644 }
645
646 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
647 if self.is_remote() || !self.initialized_persistent_state {
648 return Task::ready(Ok(()));
649 }
650
651 let db = self.project_store.read(cx).db.clone();
652 let keys = self.db_keys_for_online_state(cx);
653 let is_online = self.is_online();
654 cx.background().spawn(async move {
655 let value = &[is_online as u8];
656 db.write(keys.into_iter().map(|key| (key, value)))
657 })
658 }
659
660 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
661 let settings = cx.global::<Settings>();
662
663 let mut language_servers_to_start = Vec::new();
664 for buffer in self.opened_buffers.values() {
665 if let Some(buffer) = buffer.upgrade(cx) {
666 let buffer = buffer.read(cx);
667 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
668 {
669 if settings.enable_language_server(Some(&language.name())) {
670 let worktree = file.worktree.read(cx);
671 language_servers_to_start.push((
672 worktree.id(),
673 worktree.as_local().unwrap().abs_path().clone(),
674 language.clone(),
675 ));
676 }
677 }
678 }
679 }
680
681 let mut language_servers_to_stop = Vec::new();
682 for language in self.languages.to_vec() {
683 if let Some(lsp_adapter) = language.lsp_adapter() {
684 if !settings.enable_language_server(Some(&language.name())) {
685 let lsp_name = lsp_adapter.name();
686 for (worktree_id, started_lsp_name) in self.started_language_servers.keys() {
687 if lsp_name == *started_lsp_name {
688 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
689 }
690 }
691 }
692 }
693 }
694
695 // Stop all newly-disabled language servers.
696 for (worktree_id, adapter_name) in language_servers_to_stop {
697 self.stop_language_server(worktree_id, adapter_name, cx)
698 .detach();
699 }
700
701 // Start all the newly-enabled language servers.
702 for (worktree_id, worktree_path, language) in language_servers_to_start {
703 self.start_language_server(worktree_id, worktree_path, language, cx);
704 }
705
706 cx.notify();
707 }
708
709 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
710 self.opened_buffers
711 .get(&remote_id)
712 .and_then(|buffer| buffer.upgrade(cx))
713 }
714
715 pub fn languages(&self) -> &Arc<LanguageRegistry> {
716 &self.languages
717 }
718
719 pub fn client(&self) -> Arc<Client> {
720 self.client.clone()
721 }
722
723 pub fn user_store(&self) -> ModelHandle<UserStore> {
724 self.user_store.clone()
725 }
726
727 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
728 self.project_store.clone()
729 }
730
731 #[cfg(any(test, feature = "test-support"))]
732 pub fn check_invariants(&self, cx: &AppContext) {
733 if self.is_local() {
734 let mut worktree_root_paths = HashMap::default();
735 for worktree in self.worktrees(cx) {
736 let worktree = worktree.read(cx);
737 let abs_path = worktree.as_local().unwrap().abs_path().clone();
738 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
739 assert_eq!(
740 prev_worktree_id,
741 None,
742 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
743 abs_path,
744 worktree.id(),
745 prev_worktree_id
746 )
747 }
748 } else {
749 let replica_id = self.replica_id();
750 for buffer in self.opened_buffers.values() {
751 if let Some(buffer) = buffer.upgrade(cx) {
752 let buffer = buffer.read(cx);
753 assert_eq!(
754 buffer.deferred_ops_len(),
755 0,
756 "replica {}, buffer {} has deferred operations",
757 replica_id,
758 buffer.remote_id()
759 );
760 }
761 }
762 }
763 }
764
765 #[cfg(any(test, feature = "test-support"))]
766 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
767 let path = path.into();
768 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
769 self.opened_buffers.iter().any(|(_, buffer)| {
770 if let Some(buffer) = buffer.upgrade(cx) {
771 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
772 if file.worktree == worktree && file.path() == &path.path {
773 return true;
774 }
775 }
776 }
777 false
778 })
779 } else {
780 false
781 }
782 }
783
784 pub fn fs(&self) -> &Arc<dyn Fs> {
785 &self.fs
786 }
787
788 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
789 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
790 let mut online_tx = online_tx.borrow_mut();
791 if *online_tx != online {
792 *online_tx = online;
793 drop(online_tx);
794 self.metadata_changed(true, cx);
795 }
796 }
797 }
798
799 pub fn is_online(&self) -> bool {
800 match &self.client_state {
801 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
802 ProjectClientState::Remote { .. } => true,
803 }
804 }
805
806 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
807 self.unshared(cx);
808 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
809 if let Some(remote_id) = *remote_id_rx.borrow() {
810 let request = self.client.request(proto::UnregisterProject {
811 project_id: remote_id,
812 });
813 return cx.spawn(|this, mut cx| async move {
814 let response = request.await;
815
816 // Unregistering the project causes the server to send out a
817 // contact update removing this project from the host's list
818 // of online projects. Wait until this contact update has been
819 // processed before clearing out this project's remote id, so
820 // that there is no moment where this project appears in the
821 // contact metadata and *also* has no remote id.
822 this.update(&mut cx, |this, cx| {
823 this.user_store()
824 .update(cx, |store, _| store.contact_updates_done())
825 })
826 .await;
827
828 this.update(&mut cx, |this, cx| {
829 if let ProjectClientState::Local { remote_id_tx, .. } =
830 &mut this.client_state
831 {
832 *remote_id_tx.borrow_mut() = None;
833 }
834 this.client_subscriptions.clear();
835 this.metadata_changed(false, cx);
836 });
837 response.map(drop)
838 });
839 }
840 }
841 Task::ready(Ok(()))
842 }
843
844 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
845 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
846 if remote_id_rx.borrow().is_some() {
847 return Task::ready(Ok(()));
848 }
849 }
850
851 let response = self.client.request(proto::RegisterProject {});
852 cx.spawn(|this, mut cx| async move {
853 let remote_id = response.await?.project_id;
854 this.update(&mut cx, |this, cx| {
855 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
856 *remote_id_tx.borrow_mut() = Some(remote_id);
857 }
858
859 this.metadata_changed(false, cx);
860 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
861 this.client_subscriptions
862 .push(this.client.add_model_for_remote_entity(remote_id, cx));
863 Ok(())
864 })
865 })
866 }
867
868 pub fn remote_id(&self) -> Option<u64> {
869 match &self.client_state {
870 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
871 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
872 }
873 }
874
875 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
876 let mut id = None;
877 let mut watch = None;
878 match &self.client_state {
879 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
880 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
881 }
882
883 async move {
884 if let Some(id) = id {
885 return id;
886 }
887 let mut watch = watch.unwrap();
888 loop {
889 let id = *watch.borrow();
890 if let Some(id) = id {
891 return id;
892 }
893 watch.next().await;
894 }
895 }
896 }
897
898 pub fn shared_remote_id(&self) -> Option<u64> {
899 match &self.client_state {
900 ProjectClientState::Local {
901 remote_id_rx,
902 is_shared,
903 ..
904 } => {
905 if *is_shared {
906 *remote_id_rx.borrow()
907 } else {
908 None
909 }
910 }
911 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
912 }
913 }
914
915 pub fn replica_id(&self) -> ReplicaId {
916 match &self.client_state {
917 ProjectClientState::Local { .. } => 0,
918 ProjectClientState::Remote { replica_id, .. } => *replica_id,
919 }
920 }
921
922 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
923 if let ProjectClientState::Local {
924 remote_id_rx,
925 online_rx,
926 ..
927 } = &self.client_state
928 {
929 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
930 self.client
931 .send(proto::UpdateProject {
932 project_id,
933 worktrees: self
934 .worktrees
935 .iter()
936 .filter_map(|worktree| {
937 worktree.upgrade(&cx).map(|worktree| {
938 worktree.read(cx).as_local().unwrap().metadata_proto()
939 })
940 })
941 .collect(),
942 })
943 .log_err();
944 }
945
946 self.project_store.update(cx, |_, cx| cx.notify());
947 if persist {
948 self.persist_state(cx).detach_and_log_err(cx);
949 }
950 cx.notify();
951 }
952 }
953
954 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
955 &self.collaborators
956 }
957
958 pub fn worktrees<'a>(
959 &'a self,
960 cx: &'a AppContext,
961 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
962 self.worktrees
963 .iter()
964 .filter_map(move |worktree| worktree.upgrade(cx))
965 }
966
967 pub fn visible_worktrees<'a>(
968 &'a self,
969 cx: &'a AppContext,
970 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
971 self.worktrees.iter().filter_map(|worktree| {
972 worktree.upgrade(cx).and_then(|worktree| {
973 if worktree.read(cx).is_visible() {
974 Some(worktree)
975 } else {
976 None
977 }
978 })
979 })
980 }
981
982 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
983 self.visible_worktrees(cx)
984 .map(|tree| tree.read(cx).root_name())
985 }
986
987 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
988 self.worktrees
989 .iter()
990 .filter_map(|worktree| {
991 let worktree = worktree.upgrade(&cx)?.read(cx);
992 if worktree.is_visible() {
993 Some(format!(
994 "project-path-online:{}",
995 worktree.as_local().unwrap().abs_path().to_string_lossy()
996 ))
997 } else {
998 None
999 }
1000 })
1001 .collect::<Vec<_>>()
1002 }
1003
1004 pub fn worktree_for_id(
1005 &self,
1006 id: WorktreeId,
1007 cx: &AppContext,
1008 ) -> Option<ModelHandle<Worktree>> {
1009 self.worktrees(cx)
1010 .find(|worktree| worktree.read(cx).id() == id)
1011 }
1012
1013 pub fn worktree_for_entry(
1014 &self,
1015 entry_id: ProjectEntryId,
1016 cx: &AppContext,
1017 ) -> Option<ModelHandle<Worktree>> {
1018 self.worktrees(cx)
1019 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1020 }
1021
1022 pub fn worktree_id_for_entry(
1023 &self,
1024 entry_id: ProjectEntryId,
1025 cx: &AppContext,
1026 ) -> Option<WorktreeId> {
1027 self.worktree_for_entry(entry_id, cx)
1028 .map(|worktree| worktree.read(cx).id())
1029 }
1030
1031 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1032 paths.iter().all(|path| self.contains_path(&path, cx))
1033 }
1034
1035 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1036 for worktree in self.worktrees(cx) {
1037 let worktree = worktree.read(cx).as_local();
1038 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1039 return true;
1040 }
1041 }
1042 false
1043 }
1044
1045 pub fn create_entry(
1046 &mut self,
1047 project_path: impl Into<ProjectPath>,
1048 is_directory: bool,
1049 cx: &mut ModelContext<Self>,
1050 ) -> Option<Task<Result<Entry>>> {
1051 let project_path = project_path.into();
1052 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1053 if self.is_local() {
1054 Some(worktree.update(cx, |worktree, cx| {
1055 worktree
1056 .as_local_mut()
1057 .unwrap()
1058 .create_entry(project_path.path, is_directory, cx)
1059 }))
1060 } else {
1061 let client = self.client.clone();
1062 let project_id = self.remote_id().unwrap();
1063 Some(cx.spawn_weak(|_, mut cx| async move {
1064 let response = client
1065 .request(proto::CreateProjectEntry {
1066 worktree_id: project_path.worktree_id.to_proto(),
1067 project_id,
1068 path: project_path.path.as_os_str().as_bytes().to_vec(),
1069 is_directory,
1070 })
1071 .await?;
1072 let entry = response
1073 .entry
1074 .ok_or_else(|| anyhow!("missing entry in response"))?;
1075 worktree
1076 .update(&mut cx, |worktree, cx| {
1077 worktree.as_remote().unwrap().insert_entry(
1078 entry,
1079 response.worktree_scan_id as usize,
1080 cx,
1081 )
1082 })
1083 .await
1084 }))
1085 }
1086 }
1087
1088 pub fn copy_entry(
1089 &mut self,
1090 entry_id: ProjectEntryId,
1091 new_path: impl Into<Arc<Path>>,
1092 cx: &mut ModelContext<Self>,
1093 ) -> Option<Task<Result<Entry>>> {
1094 let worktree = self.worktree_for_entry(entry_id, cx)?;
1095 let new_path = new_path.into();
1096 if self.is_local() {
1097 worktree.update(cx, |worktree, cx| {
1098 worktree
1099 .as_local_mut()
1100 .unwrap()
1101 .copy_entry(entry_id, new_path, cx)
1102 })
1103 } else {
1104 let client = self.client.clone();
1105 let project_id = self.remote_id().unwrap();
1106
1107 Some(cx.spawn_weak(|_, mut cx| async move {
1108 let response = client
1109 .request(proto::CopyProjectEntry {
1110 project_id,
1111 entry_id: entry_id.to_proto(),
1112 new_path: new_path.as_os_str().as_bytes().to_vec(),
1113 })
1114 .await?;
1115 let entry = response
1116 .entry
1117 .ok_or_else(|| anyhow!("missing entry in response"))?;
1118 worktree
1119 .update(&mut cx, |worktree, cx| {
1120 worktree.as_remote().unwrap().insert_entry(
1121 entry,
1122 response.worktree_scan_id as usize,
1123 cx,
1124 )
1125 })
1126 .await
1127 }))
1128 }
1129 }
1130
1131 pub fn rename_entry(
1132 &mut self,
1133 entry_id: ProjectEntryId,
1134 new_path: impl Into<Arc<Path>>,
1135 cx: &mut ModelContext<Self>,
1136 ) -> Option<Task<Result<Entry>>> {
1137 let worktree = self.worktree_for_entry(entry_id, cx)?;
1138 let new_path = new_path.into();
1139 if self.is_local() {
1140 worktree.update(cx, |worktree, cx| {
1141 worktree
1142 .as_local_mut()
1143 .unwrap()
1144 .rename_entry(entry_id, new_path, cx)
1145 })
1146 } else {
1147 let client = self.client.clone();
1148 let project_id = self.remote_id().unwrap();
1149
1150 Some(cx.spawn_weak(|_, mut cx| async move {
1151 let response = client
1152 .request(proto::RenameProjectEntry {
1153 project_id,
1154 entry_id: entry_id.to_proto(),
1155 new_path: new_path.as_os_str().as_bytes().to_vec(),
1156 })
1157 .await?;
1158 let entry = response
1159 .entry
1160 .ok_or_else(|| anyhow!("missing entry in response"))?;
1161 worktree
1162 .update(&mut cx, |worktree, cx| {
1163 worktree.as_remote().unwrap().insert_entry(
1164 entry,
1165 response.worktree_scan_id as usize,
1166 cx,
1167 )
1168 })
1169 .await
1170 }))
1171 }
1172 }
1173
1174 pub fn delete_entry(
1175 &mut self,
1176 entry_id: ProjectEntryId,
1177 cx: &mut ModelContext<Self>,
1178 ) -> Option<Task<Result<()>>> {
1179 let worktree = self.worktree_for_entry(entry_id, cx)?;
1180 if self.is_local() {
1181 worktree.update(cx, |worktree, cx| {
1182 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1183 })
1184 } else {
1185 let client = self.client.clone();
1186 let project_id = self.remote_id().unwrap();
1187 Some(cx.spawn_weak(|_, mut cx| async move {
1188 let response = client
1189 .request(proto::DeleteProjectEntry {
1190 project_id,
1191 entry_id: entry_id.to_proto(),
1192 })
1193 .await?;
1194 worktree
1195 .update(&mut cx, move |worktree, cx| {
1196 worktree.as_remote().unwrap().delete_entry(
1197 entry_id,
1198 response.worktree_scan_id as usize,
1199 cx,
1200 )
1201 })
1202 .await
1203 }))
1204 }
1205 }
1206
1207 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1208 let project_id;
1209 if let ProjectClientState::Local {
1210 remote_id_rx,
1211 is_shared,
1212 ..
1213 } = &mut self.client_state
1214 {
1215 if *is_shared {
1216 return Task::ready(Ok(()));
1217 }
1218 *is_shared = true;
1219 if let Some(id) = *remote_id_rx.borrow() {
1220 project_id = id;
1221 } else {
1222 return Task::ready(Err(anyhow!("project hasn't been registered")));
1223 }
1224 } else {
1225 return Task::ready(Err(anyhow!("can't share a remote project")));
1226 };
1227
1228 for open_buffer in self.opened_buffers.values_mut() {
1229 match open_buffer {
1230 OpenBuffer::Strong(_) => {}
1231 OpenBuffer::Weak(buffer) => {
1232 if let Some(buffer) = buffer.upgrade(cx) {
1233 *open_buffer = OpenBuffer::Strong(buffer);
1234 }
1235 }
1236 OpenBuffer::Loading(_) => unreachable!(),
1237 }
1238 }
1239
1240 for worktree_handle in self.worktrees.iter_mut() {
1241 match worktree_handle {
1242 WorktreeHandle::Strong(_) => {}
1243 WorktreeHandle::Weak(worktree) => {
1244 if let Some(worktree) = worktree.upgrade(cx) {
1245 *worktree_handle = WorktreeHandle::Strong(worktree);
1246 }
1247 }
1248 }
1249 }
1250
1251 let mut tasks = Vec::new();
1252 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1253 worktree.update(cx, |worktree, cx| {
1254 let worktree = worktree.as_local_mut().unwrap();
1255 tasks.push(worktree.share(project_id, cx));
1256 });
1257 }
1258
1259 cx.spawn(|this, mut cx| async move {
1260 for task in tasks {
1261 task.await?;
1262 }
1263 this.update(&mut cx, |_, cx| cx.notify());
1264 Ok(())
1265 })
1266 }
1267
1268 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1269 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1270 if !*is_shared {
1271 return;
1272 }
1273
1274 *is_shared = false;
1275 self.collaborators.clear();
1276 self.shared_buffers.clear();
1277 for worktree_handle in self.worktrees.iter_mut() {
1278 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1279 let is_visible = worktree.update(cx, |worktree, _| {
1280 worktree.as_local_mut().unwrap().unshare();
1281 worktree.is_visible()
1282 });
1283 if !is_visible {
1284 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1285 }
1286 }
1287 }
1288
1289 for open_buffer in self.opened_buffers.values_mut() {
1290 match open_buffer {
1291 OpenBuffer::Strong(buffer) => {
1292 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1293 }
1294 _ => {}
1295 }
1296 }
1297
1298 cx.notify();
1299 } else {
1300 log::error!("attempted to unshare a remote project");
1301 }
1302 }
1303
1304 pub fn respond_to_join_request(
1305 &mut self,
1306 requester_id: u64,
1307 allow: bool,
1308 cx: &mut ModelContext<Self>,
1309 ) {
1310 if let Some(project_id) = self.remote_id() {
1311 let share = self.share(cx);
1312 let client = self.client.clone();
1313 cx.foreground()
1314 .spawn(async move {
1315 share.await?;
1316 client.send(proto::RespondToJoinProjectRequest {
1317 requester_id,
1318 project_id,
1319 allow,
1320 })
1321 })
1322 .detach_and_log_err(cx);
1323 }
1324 }
1325
1326 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1327 if let ProjectClientState::Remote {
1328 sharing_has_stopped,
1329 ..
1330 } = &mut self.client_state
1331 {
1332 *sharing_has_stopped = true;
1333 self.collaborators.clear();
1334 cx.notify();
1335 }
1336 }
1337
1338 pub fn is_read_only(&self) -> bool {
1339 match &self.client_state {
1340 ProjectClientState::Local { .. } => false,
1341 ProjectClientState::Remote {
1342 sharing_has_stopped,
1343 ..
1344 } => *sharing_has_stopped,
1345 }
1346 }
1347
1348 pub fn is_local(&self) -> bool {
1349 match &self.client_state {
1350 ProjectClientState::Local { .. } => true,
1351 ProjectClientState::Remote { .. } => false,
1352 }
1353 }
1354
1355 pub fn is_remote(&self) -> bool {
1356 !self.is_local()
1357 }
1358
1359 pub fn create_buffer(
1360 &mut self,
1361 text: &str,
1362 language: Option<Arc<Language>>,
1363 cx: &mut ModelContext<Self>,
1364 ) -> Result<ModelHandle<Buffer>> {
1365 if self.is_remote() {
1366 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1367 }
1368
1369 let buffer = cx.add_model(|cx| {
1370 Buffer::new(self.replica_id(), text, cx)
1371 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1372 });
1373 self.register_buffer(&buffer, cx)?;
1374 Ok(buffer)
1375 }
1376
1377 pub fn open_path(
1378 &mut self,
1379 path: impl Into<ProjectPath>,
1380 cx: &mut ModelContext<Self>,
1381 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1382 let task = self.open_buffer(path, cx);
1383 cx.spawn_weak(|_, cx| async move {
1384 let buffer = task.await?;
1385 let project_entry_id = buffer
1386 .read_with(&cx, |buffer, cx| {
1387 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1388 })
1389 .ok_or_else(|| anyhow!("no project entry"))?;
1390 Ok((project_entry_id, buffer.into()))
1391 })
1392 }
1393
1394 pub fn open_local_buffer(
1395 &mut self,
1396 abs_path: impl AsRef<Path>,
1397 cx: &mut ModelContext<Self>,
1398 ) -> Task<Result<ModelHandle<Buffer>>> {
1399 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1400 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1401 } else {
1402 Task::ready(Err(anyhow!("no such path")))
1403 }
1404 }
1405
1406 pub fn open_buffer(
1407 &mut self,
1408 path: impl Into<ProjectPath>,
1409 cx: &mut ModelContext<Self>,
1410 ) -> Task<Result<ModelHandle<Buffer>>> {
1411 let project_path = path.into();
1412 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1413 worktree
1414 } else {
1415 return Task::ready(Err(anyhow!("no such worktree")));
1416 };
1417
1418 // If there is already a buffer for the given path, then return it.
1419 let existing_buffer = self.get_open_buffer(&project_path, cx);
1420 if let Some(existing_buffer) = existing_buffer {
1421 return Task::ready(Ok(existing_buffer));
1422 }
1423
1424 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1425 // If the given path is already being loaded, then wait for that existing
1426 // task to complete and return the same buffer.
1427 hash_map::Entry::Occupied(e) => e.get().clone(),
1428
1429 // Otherwise, record the fact that this path is now being loaded.
1430 hash_map::Entry::Vacant(entry) => {
1431 let (mut tx, rx) = postage::watch::channel();
1432 entry.insert(rx.clone());
1433
1434 let load_buffer = if worktree.read(cx).is_local() {
1435 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1436 } else {
1437 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1438 };
1439
1440 cx.spawn(move |this, mut cx| async move {
1441 let load_result = load_buffer.await;
1442 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1443 // Record the fact that the buffer is no longer loading.
1444 this.loading_buffers.remove(&project_path);
1445 let buffer = load_result.map_err(Arc::new)?;
1446 Ok(buffer)
1447 }));
1448 })
1449 .detach();
1450 rx
1451 }
1452 };
1453
1454 cx.foreground().spawn(async move {
1455 loop {
1456 if let Some(result) = loading_watch.borrow().as_ref() {
1457 match result {
1458 Ok(buffer) => return Ok(buffer.clone()),
1459 Err(error) => return Err(anyhow!("{}", error)),
1460 }
1461 }
1462 loading_watch.next().await;
1463 }
1464 })
1465 }
1466
1467 fn open_local_buffer_internal(
1468 &mut self,
1469 path: &Arc<Path>,
1470 worktree: &ModelHandle<Worktree>,
1471 cx: &mut ModelContext<Self>,
1472 ) -> Task<Result<ModelHandle<Buffer>>> {
1473 let load_buffer = worktree.update(cx, |worktree, cx| {
1474 let worktree = worktree.as_local_mut().unwrap();
1475 worktree.load_buffer(path, cx)
1476 });
1477 cx.spawn(|this, mut cx| async move {
1478 let buffer = load_buffer.await?;
1479 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1480 Ok(buffer)
1481 })
1482 }
1483
1484 fn open_remote_buffer_internal(
1485 &mut self,
1486 path: &Arc<Path>,
1487 worktree: &ModelHandle<Worktree>,
1488 cx: &mut ModelContext<Self>,
1489 ) -> Task<Result<ModelHandle<Buffer>>> {
1490 let rpc = self.client.clone();
1491 let project_id = self.remote_id().unwrap();
1492 let remote_worktree_id = worktree.read(cx).id();
1493 let path = path.clone();
1494 let path_string = path.to_string_lossy().to_string();
1495 cx.spawn(|this, mut cx| async move {
1496 let response = rpc
1497 .request(proto::OpenBufferByPath {
1498 project_id,
1499 worktree_id: remote_worktree_id.to_proto(),
1500 path: path_string,
1501 })
1502 .await?;
1503 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1504 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1505 .await
1506 })
1507 }
1508
1509 fn open_local_buffer_via_lsp(
1510 &mut self,
1511 abs_path: lsp::Url,
1512 lsp_adapter: Arc<dyn LspAdapter>,
1513 lsp_server: Arc<LanguageServer>,
1514 cx: &mut ModelContext<Self>,
1515 ) -> Task<Result<ModelHandle<Buffer>>> {
1516 cx.spawn(|this, mut cx| async move {
1517 let abs_path = abs_path
1518 .to_file_path()
1519 .map_err(|_| anyhow!("can't convert URI to path"))?;
1520 let (worktree, relative_path) = if let Some(result) =
1521 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1522 {
1523 result
1524 } else {
1525 let worktree = this
1526 .update(&mut cx, |this, cx| {
1527 this.create_local_worktree(&abs_path, false, cx)
1528 })
1529 .await?;
1530 this.update(&mut cx, |this, cx| {
1531 this.language_servers.insert(
1532 (worktree.read(cx).id(), lsp_adapter.name()),
1533 (lsp_adapter, lsp_server),
1534 );
1535 });
1536 (worktree, PathBuf::new())
1537 };
1538
1539 let project_path = ProjectPath {
1540 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1541 path: relative_path.into(),
1542 };
1543 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1544 .await
1545 })
1546 }
1547
1548 pub fn open_buffer_by_id(
1549 &mut self,
1550 id: u64,
1551 cx: &mut ModelContext<Self>,
1552 ) -> Task<Result<ModelHandle<Buffer>>> {
1553 if let Some(buffer) = self.buffer_for_id(id, cx) {
1554 Task::ready(Ok(buffer))
1555 } else if self.is_local() {
1556 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1557 } else if let Some(project_id) = self.remote_id() {
1558 let request = self
1559 .client
1560 .request(proto::OpenBufferById { project_id, id });
1561 cx.spawn(|this, mut cx| async move {
1562 let buffer = request
1563 .await?
1564 .buffer
1565 .ok_or_else(|| anyhow!("invalid buffer"))?;
1566 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1567 .await
1568 })
1569 } else {
1570 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1571 }
1572 }
1573
1574 pub fn save_buffer_as(
1575 &mut self,
1576 buffer: ModelHandle<Buffer>,
1577 abs_path: PathBuf,
1578 cx: &mut ModelContext<Project>,
1579 ) -> Task<Result<()>> {
1580 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1581 let old_path =
1582 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1583 cx.spawn(|this, mut cx| async move {
1584 if let Some(old_path) = old_path {
1585 this.update(&mut cx, |this, cx| {
1586 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1587 });
1588 }
1589 let (worktree, path) = worktree_task.await?;
1590 worktree
1591 .update(&mut cx, |worktree, cx| {
1592 worktree
1593 .as_local_mut()
1594 .unwrap()
1595 .save_buffer_as(buffer.clone(), path, cx)
1596 })
1597 .await?;
1598 this.update(&mut cx, |this, cx| {
1599 this.assign_language_to_buffer(&buffer, cx);
1600 this.register_buffer_with_language_server(&buffer, cx);
1601 });
1602 Ok(())
1603 })
1604 }
1605
1606 pub fn get_open_buffer(
1607 &mut self,
1608 path: &ProjectPath,
1609 cx: &mut ModelContext<Self>,
1610 ) -> Option<ModelHandle<Buffer>> {
1611 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1612 self.opened_buffers.values().find_map(|buffer| {
1613 let buffer = buffer.upgrade(cx)?;
1614 let file = File::from_dyn(buffer.read(cx).file())?;
1615 if file.worktree == worktree && file.path() == &path.path {
1616 Some(buffer)
1617 } else {
1618 None
1619 }
1620 })
1621 }
1622
1623 fn register_buffer(
1624 &mut self,
1625 buffer: &ModelHandle<Buffer>,
1626 cx: &mut ModelContext<Self>,
1627 ) -> Result<()> {
1628 let remote_id = buffer.read(cx).remote_id();
1629 let open_buffer = if self.is_remote() || self.is_shared() {
1630 OpenBuffer::Strong(buffer.clone())
1631 } else {
1632 OpenBuffer::Weak(buffer.downgrade())
1633 };
1634
1635 match self.opened_buffers.insert(remote_id, open_buffer) {
1636 None => {}
1637 Some(OpenBuffer::Loading(operations)) => {
1638 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1639 }
1640 Some(OpenBuffer::Weak(existing_handle)) => {
1641 if existing_handle.upgrade(cx).is_some() {
1642 Err(anyhow!(
1643 "already registered buffer with remote id {}",
1644 remote_id
1645 ))?
1646 }
1647 }
1648 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1649 "already registered buffer with remote id {}",
1650 remote_id
1651 ))?,
1652 }
1653 cx.subscribe(buffer, |this, buffer, event, cx| {
1654 this.on_buffer_event(buffer, event, cx);
1655 })
1656 .detach();
1657
1658 self.assign_language_to_buffer(buffer, cx);
1659 self.register_buffer_with_language_server(buffer, cx);
1660 cx.observe_release(buffer, |this, buffer, cx| {
1661 if let Some(file) = File::from_dyn(buffer.file()) {
1662 if file.is_local() {
1663 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1664 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1665 server
1666 .notify::<lsp::notification::DidCloseTextDocument>(
1667 lsp::DidCloseTextDocumentParams {
1668 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1669 },
1670 )
1671 .log_err();
1672 }
1673 }
1674 }
1675 })
1676 .detach();
1677
1678 Ok(())
1679 }
1680
1681 fn register_buffer_with_language_server(
1682 &mut self,
1683 buffer_handle: &ModelHandle<Buffer>,
1684 cx: &mut ModelContext<Self>,
1685 ) {
1686 let buffer = buffer_handle.read(cx);
1687 let buffer_id = buffer.remote_id();
1688 if let Some(file) = File::from_dyn(buffer.file()) {
1689 if file.is_local() {
1690 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1691 let initial_snapshot = buffer.text_snapshot();
1692
1693 let mut language_server = None;
1694 let mut language_id = None;
1695 if let Some(language) = buffer.language() {
1696 let worktree_id = file.worktree_id(cx);
1697 if let Some(adapter) = language.lsp_adapter() {
1698 language_id = adapter.id_for_language(language.name().as_ref());
1699 language_server = self
1700 .language_servers
1701 .get(&(worktree_id, adapter.name()))
1702 .cloned();
1703 }
1704 }
1705
1706 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1707 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1708 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1709 .log_err();
1710 }
1711 }
1712
1713 if let Some((_, server)) = language_server {
1714 server
1715 .notify::<lsp::notification::DidOpenTextDocument>(
1716 lsp::DidOpenTextDocumentParams {
1717 text_document: lsp::TextDocumentItem::new(
1718 uri,
1719 language_id.unwrap_or_default(),
1720 0,
1721 initial_snapshot.text(),
1722 ),
1723 }
1724 .clone(),
1725 )
1726 .log_err();
1727 buffer_handle.update(cx, |buffer, cx| {
1728 buffer.set_completion_triggers(
1729 server
1730 .capabilities()
1731 .completion_provider
1732 .as_ref()
1733 .and_then(|provider| provider.trigger_characters.clone())
1734 .unwrap_or(Vec::new()),
1735 cx,
1736 )
1737 });
1738 self.buffer_snapshots
1739 .insert(buffer_id, vec![(0, initial_snapshot)]);
1740 }
1741 }
1742 }
1743 }
1744
1745 fn unregister_buffer_from_language_server(
1746 &mut self,
1747 buffer: &ModelHandle<Buffer>,
1748 old_path: PathBuf,
1749 cx: &mut ModelContext<Self>,
1750 ) {
1751 buffer.update(cx, |buffer, cx| {
1752 buffer.update_diagnostics(Default::default(), cx);
1753 self.buffer_snapshots.remove(&buffer.remote_id());
1754 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1755 language_server
1756 .notify::<lsp::notification::DidCloseTextDocument>(
1757 lsp::DidCloseTextDocumentParams {
1758 text_document: lsp::TextDocumentIdentifier::new(
1759 lsp::Url::from_file_path(old_path).unwrap(),
1760 ),
1761 },
1762 )
1763 .log_err();
1764 }
1765 });
1766 }
1767
1768 fn on_buffer_event(
1769 &mut self,
1770 buffer: ModelHandle<Buffer>,
1771 event: &BufferEvent,
1772 cx: &mut ModelContext<Self>,
1773 ) -> Option<()> {
1774 match event {
1775 BufferEvent::Operation(operation) => {
1776 if let Some(project_id) = self.shared_remote_id() {
1777 let request = self.client.request(proto::UpdateBuffer {
1778 project_id,
1779 buffer_id: buffer.read(cx).remote_id(),
1780 operations: vec![language::proto::serialize_operation(&operation)],
1781 });
1782 cx.background().spawn(request).detach_and_log_err(cx);
1783 }
1784 }
1785 BufferEvent::Edited { .. } => {
1786 let (_, language_server) = self
1787 .language_server_for_buffer(buffer.read(cx), cx)?
1788 .clone();
1789 let buffer = buffer.read(cx);
1790 let file = File::from_dyn(buffer.file())?;
1791 let abs_path = file.as_local()?.abs_path(cx);
1792 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1793 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1794 let (version, prev_snapshot) = buffer_snapshots.last()?;
1795 let next_snapshot = buffer.text_snapshot();
1796 let next_version = version + 1;
1797
1798 let content_changes = buffer
1799 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1800 .map(|edit| {
1801 let edit_start = edit.new.start.0;
1802 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1803 let new_text = next_snapshot
1804 .text_for_range(edit.new.start.1..edit.new.end.1)
1805 .collect();
1806 lsp::TextDocumentContentChangeEvent {
1807 range: Some(lsp::Range::new(
1808 point_to_lsp(edit_start),
1809 point_to_lsp(edit_end),
1810 )),
1811 range_length: None,
1812 text: new_text,
1813 }
1814 })
1815 .collect();
1816
1817 buffer_snapshots.push((next_version, next_snapshot));
1818
1819 language_server
1820 .notify::<lsp::notification::DidChangeTextDocument>(
1821 lsp::DidChangeTextDocumentParams {
1822 text_document: lsp::VersionedTextDocumentIdentifier::new(
1823 uri,
1824 next_version,
1825 ),
1826 content_changes,
1827 },
1828 )
1829 .log_err();
1830 }
1831 BufferEvent::Saved => {
1832 let file = File::from_dyn(buffer.read(cx).file())?;
1833 let worktree_id = file.worktree_id(cx);
1834 let abs_path = file.as_local()?.abs_path(cx);
1835 let text_document = lsp::TextDocumentIdentifier {
1836 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1837 };
1838
1839 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1840 server
1841 .notify::<lsp::notification::DidSaveTextDocument>(
1842 lsp::DidSaveTextDocumentParams {
1843 text_document: text_document.clone(),
1844 text: None,
1845 },
1846 )
1847 .log_err();
1848 }
1849
1850 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1851 // that don't support a disk-based progress token.
1852 let (lsp_adapter, language_server) =
1853 self.language_server_for_buffer(buffer.read(cx), cx)?;
1854 if lsp_adapter
1855 .disk_based_diagnostics_progress_token()
1856 .is_none()
1857 {
1858 let server_id = language_server.server_id();
1859 self.disk_based_diagnostics_finished(server_id, cx);
1860 self.broadcast_language_server_update(
1861 server_id,
1862 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1863 proto::LspDiskBasedDiagnosticsUpdated {},
1864 ),
1865 );
1866 }
1867 }
1868 _ => {}
1869 }
1870
1871 None
1872 }
1873
1874 fn language_servers_for_worktree(
1875 &self,
1876 worktree_id: WorktreeId,
1877 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1878 self.language_servers.iter().filter_map(
1879 move |((language_server_worktree_id, _), server)| {
1880 if *language_server_worktree_id == worktree_id {
1881 Some(server)
1882 } else {
1883 None
1884 }
1885 },
1886 )
1887 }
1888
1889 fn assign_language_to_buffer(
1890 &mut self,
1891 buffer: &ModelHandle<Buffer>,
1892 cx: &mut ModelContext<Self>,
1893 ) -> Option<()> {
1894 // If the buffer has a language, set it and start the language server if we haven't already.
1895 let full_path = buffer.read(cx).file()?.full_path(cx);
1896 let language = self.languages.select_language(&full_path)?;
1897 buffer.update(cx, |buffer, cx| {
1898 buffer.set_language(Some(language.clone()), cx);
1899 });
1900
1901 let file = File::from_dyn(buffer.read(cx).file())?;
1902 let worktree = file.worktree.read(cx).as_local()?;
1903 let worktree_id = worktree.id();
1904 let worktree_abs_path = worktree.abs_path().clone();
1905 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1906
1907 None
1908 }
1909
1910 fn start_language_server(
1911 &mut self,
1912 worktree_id: WorktreeId,
1913 worktree_path: Arc<Path>,
1914 language: Arc<Language>,
1915 cx: &mut ModelContext<Self>,
1916 ) {
1917 if !cx
1918 .global::<Settings>()
1919 .enable_language_server(Some(&language.name()))
1920 {
1921 return;
1922 }
1923
1924 let adapter = if let Some(adapter) = language.lsp_adapter() {
1925 adapter
1926 } else {
1927 return;
1928 };
1929 let key = (worktree_id, adapter.name());
1930 self.started_language_servers
1931 .entry(key.clone())
1932 .or_insert_with(|| {
1933 let server_id = post_inc(&mut self.next_language_server_id);
1934 let language_server = self.languages.start_language_server(
1935 server_id,
1936 language.clone(),
1937 worktree_path,
1938 self.client.http_client(),
1939 cx,
1940 );
1941 cx.spawn_weak(|this, mut cx| async move {
1942 let language_server = language_server?.await.log_err()?;
1943 let language_server = language_server
1944 .initialize(adapter.initialization_options())
1945 .await
1946 .log_err()?;
1947 let this = this.upgrade(&cx)?;
1948 let disk_based_diagnostics_progress_token =
1949 adapter.disk_based_diagnostics_progress_token();
1950
1951 language_server
1952 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1953 let this = this.downgrade();
1954 let adapter = adapter.clone();
1955 move |params, mut cx| {
1956 if let Some(this) = this.upgrade(&cx) {
1957 this.update(&mut cx, |this, cx| {
1958 this.on_lsp_diagnostics_published(
1959 server_id, params, &adapter, cx,
1960 );
1961 });
1962 }
1963 }
1964 })
1965 .detach();
1966
1967 language_server
1968 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1969 let settings = this
1970 .read_with(&cx, |this, _| this.language_server_settings.clone());
1971 move |params, _| {
1972 let settings = settings.lock().clone();
1973 async move {
1974 Ok(params
1975 .items
1976 .into_iter()
1977 .map(|item| {
1978 if let Some(section) = &item.section {
1979 settings
1980 .get(section)
1981 .cloned()
1982 .unwrap_or(serde_json::Value::Null)
1983 } else {
1984 settings.clone()
1985 }
1986 })
1987 .collect())
1988 }
1989 }
1990 })
1991 .detach();
1992
1993 language_server
1994 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1995 let this = this.downgrade();
1996 let adapter = adapter.clone();
1997 let language_server = language_server.clone();
1998 move |params, cx| {
1999 Self::on_lsp_workspace_edit(
2000 this,
2001 params,
2002 server_id,
2003 adapter.clone(),
2004 language_server.clone(),
2005 cx,
2006 )
2007 }
2008 })
2009 .detach();
2010
2011 language_server
2012 .on_notification::<lsp::notification::Progress, _>({
2013 let this = this.downgrade();
2014 move |params, mut cx| {
2015 if let Some(this) = this.upgrade(&cx) {
2016 this.update(&mut cx, |this, cx| {
2017 this.on_lsp_progress(
2018 params,
2019 server_id,
2020 disk_based_diagnostics_progress_token,
2021 cx,
2022 );
2023 });
2024 }
2025 }
2026 })
2027 .detach();
2028
2029 this.update(&mut cx, |this, cx| {
2030 this.language_servers
2031 .insert(key.clone(), (adapter.clone(), language_server.clone()));
2032 this.language_server_statuses.insert(
2033 server_id,
2034 LanguageServerStatus {
2035 name: language_server.name().to_string(),
2036 pending_work: Default::default(),
2037 pending_diagnostic_updates: 0,
2038 },
2039 );
2040 language_server
2041 .notify::<lsp::notification::DidChangeConfiguration>(
2042 lsp::DidChangeConfigurationParams {
2043 settings: this.language_server_settings.lock().clone(),
2044 },
2045 )
2046 .ok();
2047
2048 if let Some(project_id) = this.shared_remote_id() {
2049 this.client
2050 .send(proto::StartLanguageServer {
2051 project_id,
2052 server: Some(proto::LanguageServer {
2053 id: server_id as u64,
2054 name: language_server.name().to_string(),
2055 }),
2056 })
2057 .log_err();
2058 }
2059
2060 // Tell the language server about every open buffer in the worktree that matches the language.
2061 for buffer in this.opened_buffers.values() {
2062 if let Some(buffer_handle) = buffer.upgrade(cx) {
2063 let buffer = buffer_handle.read(cx);
2064 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2065 file
2066 } else {
2067 continue;
2068 };
2069 let language = if let Some(language) = buffer.language() {
2070 language
2071 } else {
2072 continue;
2073 };
2074 if file.worktree.read(cx).id() != key.0
2075 || language.lsp_adapter().map(|a| a.name())
2076 != Some(key.1.clone())
2077 {
2078 continue;
2079 }
2080
2081 let file = file.as_local()?;
2082 let versions = this
2083 .buffer_snapshots
2084 .entry(buffer.remote_id())
2085 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2086 let (version, initial_snapshot) = versions.last().unwrap();
2087 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2088 let language_id = adapter.id_for_language(language.name().as_ref());
2089 language_server
2090 .notify::<lsp::notification::DidOpenTextDocument>(
2091 lsp::DidOpenTextDocumentParams {
2092 text_document: lsp::TextDocumentItem::new(
2093 uri,
2094 language_id.unwrap_or_default(),
2095 *version,
2096 initial_snapshot.text(),
2097 ),
2098 },
2099 )
2100 .log_err()?;
2101 buffer_handle.update(cx, |buffer, cx| {
2102 buffer.set_completion_triggers(
2103 language_server
2104 .capabilities()
2105 .completion_provider
2106 .as_ref()
2107 .and_then(|provider| {
2108 provider.trigger_characters.clone()
2109 })
2110 .unwrap_or(Vec::new()),
2111 cx,
2112 )
2113 });
2114 }
2115 }
2116
2117 cx.notify();
2118 Some(())
2119 });
2120
2121 Some(language_server)
2122 })
2123 });
2124 }
2125
2126 fn stop_language_server(
2127 &mut self,
2128 worktree_id: WorktreeId,
2129 adapter_name: LanguageServerName,
2130 cx: &mut ModelContext<Self>,
2131 ) -> Task<()> {
2132 let key = (worktree_id, adapter_name);
2133 self.language_servers.remove(&key);
2134 if let Some(language_server) = self.started_language_servers.remove(&key) {
2135 cx.spawn_weak(|this, mut cx| async move {
2136 if let Some(language_server) = language_server.await {
2137 if let Some(shutdown) = language_server.shutdown() {
2138 shutdown.await;
2139 }
2140
2141 if let Some(this) = this.upgrade(&cx) {
2142 this.update(&mut cx, |this, cx| {
2143 this.language_server_statuses
2144 .remove(&language_server.server_id());
2145 cx.notify();
2146 });
2147 }
2148 }
2149 })
2150 } else {
2151 Task::ready(())
2152 }
2153 }
2154
2155 pub fn restart_language_servers_for_buffers(
2156 &mut self,
2157 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2158 cx: &mut ModelContext<Self>,
2159 ) -> Option<()> {
2160 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2161 .into_iter()
2162 .filter_map(|buffer| {
2163 let file = File::from_dyn(buffer.read(cx).file())?;
2164 let worktree = file.worktree.read(cx).as_local()?;
2165 let worktree_id = worktree.id();
2166 let worktree_abs_path = worktree.abs_path().clone();
2167 let full_path = file.full_path(cx);
2168 Some((worktree_id, worktree_abs_path, full_path))
2169 })
2170 .collect();
2171 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2172 let language = self.languages.select_language(&full_path)?;
2173 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2174 }
2175
2176 None
2177 }
2178
2179 fn restart_language_server(
2180 &mut self,
2181 worktree_id: WorktreeId,
2182 worktree_path: Arc<Path>,
2183 language: Arc<Language>,
2184 cx: &mut ModelContext<Self>,
2185 ) {
2186 let adapter = if let Some(adapter) = language.lsp_adapter() {
2187 adapter
2188 } else {
2189 return;
2190 };
2191
2192 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2193 cx.spawn_weak(|this, mut cx| async move {
2194 stop.await;
2195 if let Some(this) = this.upgrade(&cx) {
2196 this.update(&mut cx, |this, cx| {
2197 this.start_language_server(worktree_id, worktree_path, language, cx);
2198 });
2199 }
2200 })
2201 .detach();
2202 }
2203
2204 fn on_lsp_diagnostics_published(
2205 &mut self,
2206 server_id: usize,
2207 mut params: lsp::PublishDiagnosticsParams,
2208 adapter: &Arc<dyn LspAdapter>,
2209 cx: &mut ModelContext<Self>,
2210 ) {
2211 adapter.process_diagnostics(&mut params);
2212 self.update_diagnostics(
2213 server_id,
2214 params,
2215 adapter.disk_based_diagnostic_sources(),
2216 cx,
2217 )
2218 .log_err();
2219 }
2220
2221 fn on_lsp_progress(
2222 &mut self,
2223 progress: lsp::ProgressParams,
2224 server_id: usize,
2225 disk_based_diagnostics_progress_token: Option<&str>,
2226 cx: &mut ModelContext<Self>,
2227 ) {
2228 let token = match progress.token {
2229 lsp::NumberOrString::String(token) => token,
2230 lsp::NumberOrString::Number(token) => {
2231 log::info!("skipping numeric progress token {}", token);
2232 return;
2233 }
2234 };
2235 let progress = match progress.value {
2236 lsp::ProgressParamsValue::WorkDone(value) => value,
2237 };
2238 let language_server_status =
2239 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2240 status
2241 } else {
2242 return;
2243 };
2244 match progress {
2245 lsp::WorkDoneProgress::Begin(_) => {
2246 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2247 language_server_status.pending_diagnostic_updates += 1;
2248 if language_server_status.pending_diagnostic_updates == 1 {
2249 self.disk_based_diagnostics_started(server_id, cx);
2250 self.broadcast_language_server_update(
2251 server_id,
2252 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2253 proto::LspDiskBasedDiagnosticsUpdating {},
2254 ),
2255 );
2256 }
2257 } else {
2258 self.on_lsp_work_start(server_id, token.clone(), cx);
2259 self.broadcast_language_server_update(
2260 server_id,
2261 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2262 token,
2263 }),
2264 );
2265 }
2266 }
2267 lsp::WorkDoneProgress::Report(report) => {
2268 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2269 self.on_lsp_work_progress(
2270 server_id,
2271 token.clone(),
2272 LanguageServerProgress {
2273 message: report.message.clone(),
2274 percentage: report.percentage.map(|p| p as usize),
2275 last_update_at: Instant::now(),
2276 },
2277 cx,
2278 );
2279 self.broadcast_language_server_update(
2280 server_id,
2281 proto::update_language_server::Variant::WorkProgress(
2282 proto::LspWorkProgress {
2283 token,
2284 message: report.message,
2285 percentage: report.percentage.map(|p| p as u32),
2286 },
2287 ),
2288 );
2289 }
2290 }
2291 lsp::WorkDoneProgress::End(_) => {
2292 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2293 language_server_status.pending_diagnostic_updates -= 1;
2294 if language_server_status.pending_diagnostic_updates == 0 {
2295 self.disk_based_diagnostics_finished(server_id, cx);
2296 self.broadcast_language_server_update(
2297 server_id,
2298 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2299 proto::LspDiskBasedDiagnosticsUpdated {},
2300 ),
2301 );
2302 }
2303 } else {
2304 self.on_lsp_work_end(server_id, token.clone(), cx);
2305 self.broadcast_language_server_update(
2306 server_id,
2307 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2308 token,
2309 }),
2310 );
2311 }
2312 }
2313 }
2314 }
2315
2316 fn on_lsp_work_start(
2317 &mut self,
2318 language_server_id: usize,
2319 token: String,
2320 cx: &mut ModelContext<Self>,
2321 ) {
2322 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2323 status.pending_work.insert(
2324 token,
2325 LanguageServerProgress {
2326 message: None,
2327 percentage: None,
2328 last_update_at: Instant::now(),
2329 },
2330 );
2331 cx.notify();
2332 }
2333 }
2334
2335 fn on_lsp_work_progress(
2336 &mut self,
2337 language_server_id: usize,
2338 token: String,
2339 progress: LanguageServerProgress,
2340 cx: &mut ModelContext<Self>,
2341 ) {
2342 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2343 status.pending_work.insert(token, progress);
2344 cx.notify();
2345 }
2346 }
2347
2348 fn on_lsp_work_end(
2349 &mut self,
2350 language_server_id: usize,
2351 token: String,
2352 cx: &mut ModelContext<Self>,
2353 ) {
2354 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2355 status.pending_work.remove(&token);
2356 cx.notify();
2357 }
2358 }
2359
2360 async fn on_lsp_workspace_edit(
2361 this: WeakModelHandle<Self>,
2362 params: lsp::ApplyWorkspaceEditParams,
2363 server_id: usize,
2364 adapter: Arc<dyn LspAdapter>,
2365 language_server: Arc<LanguageServer>,
2366 mut cx: AsyncAppContext,
2367 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2368 let this = this
2369 .upgrade(&cx)
2370 .ok_or_else(|| anyhow!("project project closed"))?;
2371 let transaction = Self::deserialize_workspace_edit(
2372 this.clone(),
2373 params.edit,
2374 true,
2375 adapter.clone(),
2376 language_server.clone(),
2377 &mut cx,
2378 )
2379 .await
2380 .log_err();
2381 this.update(&mut cx, |this, _| {
2382 if let Some(transaction) = transaction {
2383 this.last_workspace_edits_by_language_server
2384 .insert(server_id, transaction);
2385 }
2386 });
2387 Ok(lsp::ApplyWorkspaceEditResponse {
2388 applied: true,
2389 failed_change: None,
2390 failure_reason: None,
2391 })
2392 }
2393
2394 fn broadcast_language_server_update(
2395 &self,
2396 language_server_id: usize,
2397 event: proto::update_language_server::Variant,
2398 ) {
2399 if let Some(project_id) = self.shared_remote_id() {
2400 self.client
2401 .send(proto::UpdateLanguageServer {
2402 project_id,
2403 language_server_id: language_server_id as u64,
2404 variant: Some(event),
2405 })
2406 .log_err();
2407 }
2408 }
2409
2410 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2411 for (_, server) in self.language_servers.values() {
2412 server
2413 .notify::<lsp::notification::DidChangeConfiguration>(
2414 lsp::DidChangeConfigurationParams {
2415 settings: settings.clone(),
2416 },
2417 )
2418 .ok();
2419 }
2420 *self.language_server_settings.lock() = settings;
2421 }
2422
2423 pub fn language_server_statuses(
2424 &self,
2425 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2426 self.language_server_statuses.values()
2427 }
2428
2429 pub fn update_diagnostics(
2430 &mut self,
2431 language_server_id: usize,
2432 params: lsp::PublishDiagnosticsParams,
2433 disk_based_sources: &[&str],
2434 cx: &mut ModelContext<Self>,
2435 ) -> Result<()> {
2436 let abs_path = params
2437 .uri
2438 .to_file_path()
2439 .map_err(|_| anyhow!("URI is not a file"))?;
2440 let mut diagnostics = Vec::default();
2441 let mut primary_diagnostic_group_ids = HashMap::default();
2442 let mut sources_by_group_id = HashMap::default();
2443 let mut supporting_diagnostics = HashMap::default();
2444 for diagnostic in ¶ms.diagnostics {
2445 let source = diagnostic.source.as_ref();
2446 let code = diagnostic.code.as_ref().map(|code| match code {
2447 lsp::NumberOrString::Number(code) => code.to_string(),
2448 lsp::NumberOrString::String(code) => code.clone(),
2449 });
2450 let range = range_from_lsp(diagnostic.range);
2451 let is_supporting = diagnostic
2452 .related_information
2453 .as_ref()
2454 .map_or(false, |infos| {
2455 infos.iter().any(|info| {
2456 primary_diagnostic_group_ids.contains_key(&(
2457 source,
2458 code.clone(),
2459 range_from_lsp(info.location.range),
2460 ))
2461 })
2462 });
2463
2464 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2465 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2466 });
2467
2468 if is_supporting {
2469 supporting_diagnostics.insert(
2470 (source, code.clone(), range),
2471 (diagnostic.severity, is_unnecessary),
2472 );
2473 } else {
2474 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2475 let is_disk_based = source.map_or(false, |source| {
2476 disk_based_sources.contains(&source.as_str())
2477 });
2478
2479 sources_by_group_id.insert(group_id, source);
2480 primary_diagnostic_group_ids
2481 .insert((source, code.clone(), range.clone()), group_id);
2482
2483 diagnostics.push(DiagnosticEntry {
2484 range,
2485 diagnostic: Diagnostic {
2486 code: code.clone(),
2487 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2488 message: diagnostic.message.clone(),
2489 group_id,
2490 is_primary: true,
2491 is_valid: true,
2492 is_disk_based,
2493 is_unnecessary,
2494 },
2495 });
2496 if let Some(infos) = &diagnostic.related_information {
2497 for info in infos {
2498 if info.location.uri == params.uri && !info.message.is_empty() {
2499 let range = range_from_lsp(info.location.range);
2500 diagnostics.push(DiagnosticEntry {
2501 range,
2502 diagnostic: Diagnostic {
2503 code: code.clone(),
2504 severity: DiagnosticSeverity::INFORMATION,
2505 message: info.message.clone(),
2506 group_id,
2507 is_primary: false,
2508 is_valid: true,
2509 is_disk_based,
2510 is_unnecessary: false,
2511 },
2512 });
2513 }
2514 }
2515 }
2516 }
2517 }
2518
2519 for entry in &mut diagnostics {
2520 let diagnostic = &mut entry.diagnostic;
2521 if !diagnostic.is_primary {
2522 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2523 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2524 source,
2525 diagnostic.code.clone(),
2526 entry.range.clone(),
2527 )) {
2528 if let Some(severity) = severity {
2529 diagnostic.severity = severity;
2530 }
2531 diagnostic.is_unnecessary = is_unnecessary;
2532 }
2533 }
2534 }
2535
2536 self.update_diagnostic_entries(
2537 language_server_id,
2538 abs_path,
2539 params.version,
2540 diagnostics,
2541 cx,
2542 )?;
2543 Ok(())
2544 }
2545
2546 pub fn update_diagnostic_entries(
2547 &mut self,
2548 language_server_id: usize,
2549 abs_path: PathBuf,
2550 version: Option<i32>,
2551 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2552 cx: &mut ModelContext<Project>,
2553 ) -> Result<(), anyhow::Error> {
2554 let (worktree, relative_path) = self
2555 .find_local_worktree(&abs_path, cx)
2556 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2557 if !worktree.read(cx).is_visible() {
2558 return Ok(());
2559 }
2560
2561 let project_path = ProjectPath {
2562 worktree_id: worktree.read(cx).id(),
2563 path: relative_path.into(),
2564 };
2565 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2566 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2567 }
2568
2569 let updated = worktree.update(cx, |worktree, cx| {
2570 worktree
2571 .as_local_mut()
2572 .ok_or_else(|| anyhow!("not a local worktree"))?
2573 .update_diagnostics(
2574 language_server_id,
2575 project_path.path.clone(),
2576 diagnostics,
2577 cx,
2578 )
2579 })?;
2580 if updated {
2581 cx.emit(Event::DiagnosticsUpdated {
2582 language_server_id,
2583 path: project_path,
2584 });
2585 }
2586 Ok(())
2587 }
2588
2589 fn update_buffer_diagnostics(
2590 &mut self,
2591 buffer: &ModelHandle<Buffer>,
2592 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2593 version: Option<i32>,
2594 cx: &mut ModelContext<Self>,
2595 ) -> Result<()> {
2596 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2597 Ordering::Equal
2598 .then_with(|| b.is_primary.cmp(&a.is_primary))
2599 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2600 .then_with(|| a.severity.cmp(&b.severity))
2601 .then_with(|| a.message.cmp(&b.message))
2602 }
2603
2604 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2605
2606 diagnostics.sort_unstable_by(|a, b| {
2607 Ordering::Equal
2608 .then_with(|| a.range.start.cmp(&b.range.start))
2609 .then_with(|| b.range.end.cmp(&a.range.end))
2610 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2611 });
2612
2613 let mut sanitized_diagnostics = Vec::new();
2614 let edits_since_save = Patch::new(
2615 snapshot
2616 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2617 .collect(),
2618 );
2619 for entry in diagnostics {
2620 let start;
2621 let end;
2622 if entry.diagnostic.is_disk_based {
2623 // Some diagnostics are based on files on disk instead of buffers'
2624 // current contents. Adjust these diagnostics' ranges to reflect
2625 // any unsaved edits.
2626 start = edits_since_save.old_to_new(entry.range.start);
2627 end = edits_since_save.old_to_new(entry.range.end);
2628 } else {
2629 start = entry.range.start;
2630 end = entry.range.end;
2631 }
2632
2633 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2634 ..snapshot.clip_point_utf16(end, Bias::Right);
2635
2636 // Expand empty ranges by one character
2637 if range.start == range.end {
2638 range.end.column += 1;
2639 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2640 if range.start == range.end && range.end.column > 0 {
2641 range.start.column -= 1;
2642 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2643 }
2644 }
2645
2646 sanitized_diagnostics.push(DiagnosticEntry {
2647 range,
2648 diagnostic: entry.diagnostic,
2649 });
2650 }
2651 drop(edits_since_save);
2652
2653 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2654 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2655 Ok(())
2656 }
2657
2658 pub fn reload_buffers(
2659 &self,
2660 buffers: HashSet<ModelHandle<Buffer>>,
2661 push_to_history: bool,
2662 cx: &mut ModelContext<Self>,
2663 ) -> Task<Result<ProjectTransaction>> {
2664 let mut local_buffers = Vec::new();
2665 let mut remote_buffers = None;
2666 for buffer_handle in buffers {
2667 let buffer = buffer_handle.read(cx);
2668 if buffer.is_dirty() {
2669 if let Some(file) = File::from_dyn(buffer.file()) {
2670 if file.is_local() {
2671 local_buffers.push(buffer_handle);
2672 } else {
2673 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2674 }
2675 }
2676 }
2677 }
2678
2679 let remote_buffers = self.remote_id().zip(remote_buffers);
2680 let client = self.client.clone();
2681
2682 cx.spawn(|this, mut cx| async move {
2683 let mut project_transaction = ProjectTransaction::default();
2684
2685 if let Some((project_id, remote_buffers)) = remote_buffers {
2686 let response = client
2687 .request(proto::ReloadBuffers {
2688 project_id,
2689 buffer_ids: remote_buffers
2690 .iter()
2691 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2692 .collect(),
2693 })
2694 .await?
2695 .transaction
2696 .ok_or_else(|| anyhow!("missing transaction"))?;
2697 project_transaction = this
2698 .update(&mut cx, |this, cx| {
2699 this.deserialize_project_transaction(response, push_to_history, cx)
2700 })
2701 .await?;
2702 }
2703
2704 for buffer in local_buffers {
2705 let transaction = buffer
2706 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2707 .await?;
2708 buffer.update(&mut cx, |buffer, cx| {
2709 if let Some(transaction) = transaction {
2710 if !push_to_history {
2711 buffer.forget_transaction(transaction.id);
2712 }
2713 project_transaction.0.insert(cx.handle(), transaction);
2714 }
2715 });
2716 }
2717
2718 Ok(project_transaction)
2719 })
2720 }
2721
2722 pub fn format(
2723 &self,
2724 buffers: HashSet<ModelHandle<Buffer>>,
2725 push_to_history: bool,
2726 cx: &mut ModelContext<Project>,
2727 ) -> Task<Result<ProjectTransaction>> {
2728 let mut local_buffers = Vec::new();
2729 let mut remote_buffers = None;
2730 for buffer_handle in buffers {
2731 let buffer = buffer_handle.read(cx);
2732 if let Some(file) = File::from_dyn(buffer.file()) {
2733 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2734 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2735 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2736 }
2737 } else {
2738 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2739 }
2740 } else {
2741 return Task::ready(Ok(Default::default()));
2742 }
2743 }
2744
2745 let remote_buffers = self.remote_id().zip(remote_buffers);
2746 let client = self.client.clone();
2747
2748 cx.spawn(|this, mut cx| async move {
2749 let mut project_transaction = ProjectTransaction::default();
2750
2751 if let Some((project_id, remote_buffers)) = remote_buffers {
2752 let response = client
2753 .request(proto::FormatBuffers {
2754 project_id,
2755 buffer_ids: remote_buffers
2756 .iter()
2757 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2758 .collect(),
2759 })
2760 .await?
2761 .transaction
2762 .ok_or_else(|| anyhow!("missing transaction"))?;
2763 project_transaction = this
2764 .update(&mut cx, |this, cx| {
2765 this.deserialize_project_transaction(response, push_to_history, cx)
2766 })
2767 .await?;
2768 }
2769
2770 for (buffer, buffer_abs_path, language_server) in local_buffers {
2771 let text_document = lsp::TextDocumentIdentifier::new(
2772 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2773 );
2774 let capabilities = &language_server.capabilities();
2775 let tab_size = cx.update(|cx| {
2776 let language_name = buffer.read(cx).language().map(|language| language.name());
2777 cx.global::<Settings>().tab_size(language_name.as_deref())
2778 });
2779 let lsp_edits = if capabilities
2780 .document_formatting_provider
2781 .as_ref()
2782 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2783 {
2784 language_server
2785 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2786 text_document,
2787 options: lsp::FormattingOptions {
2788 tab_size,
2789 insert_spaces: true,
2790 insert_final_newline: Some(true),
2791 ..Default::default()
2792 },
2793 work_done_progress_params: Default::default(),
2794 })
2795 .await?
2796 } else if capabilities
2797 .document_range_formatting_provider
2798 .as_ref()
2799 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2800 {
2801 let buffer_start = lsp::Position::new(0, 0);
2802 let buffer_end =
2803 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2804 language_server
2805 .request::<lsp::request::RangeFormatting>(
2806 lsp::DocumentRangeFormattingParams {
2807 text_document,
2808 range: lsp::Range::new(buffer_start, buffer_end),
2809 options: lsp::FormattingOptions {
2810 tab_size,
2811 insert_spaces: true,
2812 insert_final_newline: Some(true),
2813 ..Default::default()
2814 },
2815 work_done_progress_params: Default::default(),
2816 },
2817 )
2818 .await?
2819 } else {
2820 continue;
2821 };
2822
2823 if let Some(lsp_edits) = lsp_edits {
2824 let edits = this
2825 .update(&mut cx, |this, cx| {
2826 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2827 })
2828 .await?;
2829 buffer.update(&mut cx, |buffer, cx| {
2830 buffer.finalize_last_transaction();
2831 buffer.start_transaction();
2832 for (range, text) in edits {
2833 buffer.edit([(range, text)], cx);
2834 }
2835 if buffer.end_transaction(cx).is_some() {
2836 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2837 if !push_to_history {
2838 buffer.forget_transaction(transaction.id);
2839 }
2840 project_transaction.0.insert(cx.handle(), transaction);
2841 }
2842 });
2843 }
2844 }
2845
2846 Ok(project_transaction)
2847 })
2848 }
2849
2850 pub fn definition<T: ToPointUtf16>(
2851 &self,
2852 buffer: &ModelHandle<Buffer>,
2853 position: T,
2854 cx: &mut ModelContext<Self>,
2855 ) -> Task<Result<Vec<Location>>> {
2856 let position = position.to_point_utf16(buffer.read(cx));
2857 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2858 }
2859
2860 pub fn references<T: ToPointUtf16>(
2861 &self,
2862 buffer: &ModelHandle<Buffer>,
2863 position: T,
2864 cx: &mut ModelContext<Self>,
2865 ) -> Task<Result<Vec<Location>>> {
2866 let position = position.to_point_utf16(buffer.read(cx));
2867 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2868 }
2869
2870 pub fn document_highlights<T: ToPointUtf16>(
2871 &self,
2872 buffer: &ModelHandle<Buffer>,
2873 position: T,
2874 cx: &mut ModelContext<Self>,
2875 ) -> Task<Result<Vec<DocumentHighlight>>> {
2876 let position = position.to_point_utf16(buffer.read(cx));
2877
2878 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2879 }
2880
2881 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2882 if self.is_local() {
2883 let mut requests = Vec::new();
2884 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2885 let worktree_id = *worktree_id;
2886 if let Some(worktree) = self
2887 .worktree_for_id(worktree_id, cx)
2888 .and_then(|worktree| worktree.read(cx).as_local())
2889 {
2890 let lsp_adapter = lsp_adapter.clone();
2891 let worktree_abs_path = worktree.abs_path().clone();
2892 requests.push(
2893 language_server
2894 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2895 query: query.to_string(),
2896 ..Default::default()
2897 })
2898 .log_err()
2899 .map(move |response| {
2900 (
2901 lsp_adapter,
2902 worktree_id,
2903 worktree_abs_path,
2904 response.unwrap_or_default(),
2905 )
2906 }),
2907 );
2908 }
2909 }
2910
2911 cx.spawn_weak(|this, cx| async move {
2912 let responses = futures::future::join_all(requests).await;
2913 let this = if let Some(this) = this.upgrade(&cx) {
2914 this
2915 } else {
2916 return Ok(Default::default());
2917 };
2918 this.read_with(&cx, |this, cx| {
2919 let mut symbols = Vec::new();
2920 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2921 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2922 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2923 let mut worktree_id = source_worktree_id;
2924 let path;
2925 if let Some((worktree, rel_path)) =
2926 this.find_local_worktree(&abs_path, cx)
2927 {
2928 worktree_id = worktree.read(cx).id();
2929 path = rel_path;
2930 } else {
2931 path = relativize_path(&worktree_abs_path, &abs_path);
2932 }
2933
2934 let label = this
2935 .languages
2936 .select_language(&path)
2937 .and_then(|language| {
2938 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2939 })
2940 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2941 let signature = this.symbol_signature(worktree_id, &path);
2942
2943 Some(Symbol {
2944 source_worktree_id,
2945 worktree_id,
2946 language_server_name: adapter.name(),
2947 name: lsp_symbol.name,
2948 kind: lsp_symbol.kind,
2949 label,
2950 path,
2951 range: range_from_lsp(lsp_symbol.location.range),
2952 signature,
2953 })
2954 }));
2955 }
2956 Ok(symbols)
2957 })
2958 })
2959 } else if let Some(project_id) = self.remote_id() {
2960 let request = self.client.request(proto::GetProjectSymbols {
2961 project_id,
2962 query: query.to_string(),
2963 });
2964 cx.spawn_weak(|this, cx| async move {
2965 let response = request.await?;
2966 let mut symbols = Vec::new();
2967 if let Some(this) = this.upgrade(&cx) {
2968 this.read_with(&cx, |this, _| {
2969 symbols.extend(
2970 response
2971 .symbols
2972 .into_iter()
2973 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2974 );
2975 })
2976 }
2977 Ok(symbols)
2978 })
2979 } else {
2980 Task::ready(Ok(Default::default()))
2981 }
2982 }
2983
2984 pub fn open_buffer_for_symbol(
2985 &mut self,
2986 symbol: &Symbol,
2987 cx: &mut ModelContext<Self>,
2988 ) -> Task<Result<ModelHandle<Buffer>>> {
2989 if self.is_local() {
2990 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2991 symbol.source_worktree_id,
2992 symbol.language_server_name.clone(),
2993 )) {
2994 server.clone()
2995 } else {
2996 return Task::ready(Err(anyhow!(
2997 "language server for worktree and language not found"
2998 )));
2999 };
3000
3001 let worktree_abs_path = if let Some(worktree_abs_path) = self
3002 .worktree_for_id(symbol.worktree_id, cx)
3003 .and_then(|worktree| worktree.read(cx).as_local())
3004 .map(|local_worktree| local_worktree.abs_path())
3005 {
3006 worktree_abs_path
3007 } else {
3008 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3009 };
3010 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3011 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3012 uri
3013 } else {
3014 return Task::ready(Err(anyhow!("invalid symbol path")));
3015 };
3016
3017 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
3018 } else if let Some(project_id) = self.remote_id() {
3019 let request = self.client.request(proto::OpenBufferForSymbol {
3020 project_id,
3021 symbol: Some(serialize_symbol(symbol)),
3022 });
3023 cx.spawn(|this, mut cx| async move {
3024 let response = request.await?;
3025 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3026 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3027 .await
3028 })
3029 } else {
3030 Task::ready(Err(anyhow!("project does not have a remote id")))
3031 }
3032 }
3033
3034 pub fn hover<T: ToPointUtf16>(
3035 &self,
3036 buffer: &ModelHandle<Buffer>,
3037 position: T,
3038 cx: &mut ModelContext<Self>,
3039 ) -> Task<Result<Option<Hover>>> {
3040 let position = position.to_point_utf16(buffer.read(cx));
3041 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3042 }
3043
3044 pub fn completions<T: ToPointUtf16>(
3045 &self,
3046 source_buffer_handle: &ModelHandle<Buffer>,
3047 position: T,
3048 cx: &mut ModelContext<Self>,
3049 ) -> Task<Result<Vec<Completion>>> {
3050 let source_buffer_handle = source_buffer_handle.clone();
3051 let source_buffer = source_buffer_handle.read(cx);
3052 let buffer_id = source_buffer.remote_id();
3053 let language = source_buffer.language().cloned();
3054 let worktree;
3055 let buffer_abs_path;
3056 if let Some(file) = File::from_dyn(source_buffer.file()) {
3057 worktree = file.worktree.clone();
3058 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3059 } else {
3060 return Task::ready(Ok(Default::default()));
3061 };
3062
3063 let position = position.to_point_utf16(source_buffer);
3064 let anchor = source_buffer.anchor_after(position);
3065
3066 if worktree.read(cx).as_local().is_some() {
3067 let buffer_abs_path = buffer_abs_path.unwrap();
3068 let (_, lang_server) =
3069 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
3070 server.clone()
3071 } else {
3072 return Task::ready(Ok(Default::default()));
3073 };
3074
3075 cx.spawn(|_, cx| async move {
3076 let completions = lang_server
3077 .request::<lsp::request::Completion>(lsp::CompletionParams {
3078 text_document_position: lsp::TextDocumentPositionParams::new(
3079 lsp::TextDocumentIdentifier::new(
3080 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3081 ),
3082 point_to_lsp(position),
3083 ),
3084 context: Default::default(),
3085 work_done_progress_params: Default::default(),
3086 partial_result_params: Default::default(),
3087 })
3088 .await
3089 .context("lsp completion request failed")?;
3090
3091 let completions = if let Some(completions) = completions {
3092 match completions {
3093 lsp::CompletionResponse::Array(completions) => completions,
3094 lsp::CompletionResponse::List(list) => list.items,
3095 }
3096 } else {
3097 Default::default()
3098 };
3099
3100 source_buffer_handle.read_with(&cx, |this, _| {
3101 let snapshot = this.snapshot();
3102 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3103 let mut range_for_token = None;
3104 Ok(completions
3105 .into_iter()
3106 .filter_map(|lsp_completion| {
3107 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3108 // If the language server provides a range to overwrite, then
3109 // check that the range is valid.
3110 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3111 let range = range_from_lsp(edit.range);
3112 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3113 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3114 if start != range.start || end != range.end {
3115 log::info!("completion out of expected range");
3116 return None;
3117 }
3118 (
3119 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3120 edit.new_text.clone(),
3121 )
3122 }
3123 // If the language server does not provide a range, then infer
3124 // the range based on the syntax tree.
3125 None => {
3126 if position != clipped_position {
3127 log::info!("completion out of expected range");
3128 return None;
3129 }
3130 let Range { start, end } = range_for_token
3131 .get_or_insert_with(|| {
3132 let offset = position.to_offset(&snapshot);
3133 snapshot
3134 .range_for_word_token_at(offset)
3135 .unwrap_or_else(|| offset..offset)
3136 })
3137 .clone();
3138 let text = lsp_completion
3139 .insert_text
3140 .as_ref()
3141 .unwrap_or(&lsp_completion.label)
3142 .clone();
3143 (
3144 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3145 text.clone(),
3146 )
3147 }
3148 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3149 log::info!("unsupported insert/replace completion");
3150 return None;
3151 }
3152 };
3153
3154 Some(Completion {
3155 old_range,
3156 new_text,
3157 label: language
3158 .as_ref()
3159 .and_then(|l| l.label_for_completion(&lsp_completion))
3160 .unwrap_or_else(|| {
3161 CodeLabel::plain(
3162 lsp_completion.label.clone(),
3163 lsp_completion.filter_text.as_deref(),
3164 )
3165 }),
3166 lsp_completion,
3167 })
3168 })
3169 .collect())
3170 })
3171 })
3172 } else if let Some(project_id) = self.remote_id() {
3173 let rpc = self.client.clone();
3174 let message = proto::GetCompletions {
3175 project_id,
3176 buffer_id,
3177 position: Some(language::proto::serialize_anchor(&anchor)),
3178 version: serialize_version(&source_buffer.version()),
3179 };
3180 cx.spawn_weak(|_, mut cx| async move {
3181 let response = rpc.request(message).await?;
3182
3183 source_buffer_handle
3184 .update(&mut cx, |buffer, _| {
3185 buffer.wait_for_version(deserialize_version(response.version))
3186 })
3187 .await;
3188
3189 response
3190 .completions
3191 .into_iter()
3192 .map(|completion| {
3193 language::proto::deserialize_completion(completion, language.as_ref())
3194 })
3195 .collect()
3196 })
3197 } else {
3198 Task::ready(Ok(Default::default()))
3199 }
3200 }
3201
3202 pub fn apply_additional_edits_for_completion(
3203 &self,
3204 buffer_handle: ModelHandle<Buffer>,
3205 completion: Completion,
3206 push_to_history: bool,
3207 cx: &mut ModelContext<Self>,
3208 ) -> Task<Result<Option<Transaction>>> {
3209 let buffer = buffer_handle.read(cx);
3210 let buffer_id = buffer.remote_id();
3211
3212 if self.is_local() {
3213 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3214 {
3215 server.clone()
3216 } else {
3217 return Task::ready(Ok(Default::default()));
3218 };
3219
3220 cx.spawn(|this, mut cx| async move {
3221 let resolved_completion = lang_server
3222 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3223 .await?;
3224 if let Some(edits) = resolved_completion.additional_text_edits {
3225 let edits = this
3226 .update(&mut cx, |this, cx| {
3227 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3228 })
3229 .await?;
3230 buffer_handle.update(&mut cx, |buffer, cx| {
3231 buffer.finalize_last_transaction();
3232 buffer.start_transaction();
3233 for (range, text) in edits {
3234 buffer.edit([(range, text)], cx);
3235 }
3236 let transaction = if buffer.end_transaction(cx).is_some() {
3237 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3238 if !push_to_history {
3239 buffer.forget_transaction(transaction.id);
3240 }
3241 Some(transaction)
3242 } else {
3243 None
3244 };
3245 Ok(transaction)
3246 })
3247 } else {
3248 Ok(None)
3249 }
3250 })
3251 } else if let Some(project_id) = self.remote_id() {
3252 let client = self.client.clone();
3253 cx.spawn(|_, mut cx| async move {
3254 let response = client
3255 .request(proto::ApplyCompletionAdditionalEdits {
3256 project_id,
3257 buffer_id,
3258 completion: Some(language::proto::serialize_completion(&completion)),
3259 })
3260 .await?;
3261
3262 if let Some(transaction) = response.transaction {
3263 let transaction = language::proto::deserialize_transaction(transaction)?;
3264 buffer_handle
3265 .update(&mut cx, |buffer, _| {
3266 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3267 })
3268 .await;
3269 if push_to_history {
3270 buffer_handle.update(&mut cx, |buffer, _| {
3271 buffer.push_transaction(transaction.clone(), Instant::now());
3272 });
3273 }
3274 Ok(Some(transaction))
3275 } else {
3276 Ok(None)
3277 }
3278 })
3279 } else {
3280 Task::ready(Err(anyhow!("project does not have a remote id")))
3281 }
3282 }
3283
3284 pub fn code_actions<T: Clone + ToOffset>(
3285 &self,
3286 buffer_handle: &ModelHandle<Buffer>,
3287 range: Range<T>,
3288 cx: &mut ModelContext<Self>,
3289 ) -> Task<Result<Vec<CodeAction>>> {
3290 let buffer_handle = buffer_handle.clone();
3291 let buffer = buffer_handle.read(cx);
3292 let snapshot = buffer.snapshot();
3293 let relevant_diagnostics = snapshot
3294 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3295 .map(|entry| entry.to_lsp_diagnostic_stub())
3296 .collect();
3297 let buffer_id = buffer.remote_id();
3298 let worktree;
3299 let buffer_abs_path;
3300 if let Some(file) = File::from_dyn(buffer.file()) {
3301 worktree = file.worktree.clone();
3302 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3303 } else {
3304 return Task::ready(Ok(Default::default()));
3305 };
3306 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3307
3308 if worktree.read(cx).as_local().is_some() {
3309 let buffer_abs_path = buffer_abs_path.unwrap();
3310 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3311 {
3312 server.clone()
3313 } else {
3314 return Task::ready(Ok(Default::default()));
3315 };
3316
3317 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3318 cx.foreground().spawn(async move {
3319 if !lang_server.capabilities().code_action_provider.is_some() {
3320 return Ok(Default::default());
3321 }
3322
3323 Ok(lang_server
3324 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3325 text_document: lsp::TextDocumentIdentifier::new(
3326 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3327 ),
3328 range: lsp_range,
3329 work_done_progress_params: Default::default(),
3330 partial_result_params: Default::default(),
3331 context: lsp::CodeActionContext {
3332 diagnostics: relevant_diagnostics,
3333 only: Some(vec![
3334 lsp::CodeActionKind::QUICKFIX,
3335 lsp::CodeActionKind::REFACTOR,
3336 lsp::CodeActionKind::REFACTOR_EXTRACT,
3337 lsp::CodeActionKind::SOURCE,
3338 ]),
3339 },
3340 })
3341 .await?
3342 .unwrap_or_default()
3343 .into_iter()
3344 .filter_map(|entry| {
3345 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3346 Some(CodeAction {
3347 range: range.clone(),
3348 lsp_action,
3349 })
3350 } else {
3351 None
3352 }
3353 })
3354 .collect())
3355 })
3356 } else if let Some(project_id) = self.remote_id() {
3357 let rpc = self.client.clone();
3358 let version = buffer.version();
3359 cx.spawn_weak(|_, mut cx| async move {
3360 let response = rpc
3361 .request(proto::GetCodeActions {
3362 project_id,
3363 buffer_id,
3364 start: Some(language::proto::serialize_anchor(&range.start)),
3365 end: Some(language::proto::serialize_anchor(&range.end)),
3366 version: serialize_version(&version),
3367 })
3368 .await?;
3369
3370 buffer_handle
3371 .update(&mut cx, |buffer, _| {
3372 buffer.wait_for_version(deserialize_version(response.version))
3373 })
3374 .await;
3375
3376 response
3377 .actions
3378 .into_iter()
3379 .map(language::proto::deserialize_code_action)
3380 .collect()
3381 })
3382 } else {
3383 Task::ready(Ok(Default::default()))
3384 }
3385 }
3386
3387 pub fn apply_code_action(
3388 &self,
3389 buffer_handle: ModelHandle<Buffer>,
3390 mut action: CodeAction,
3391 push_to_history: bool,
3392 cx: &mut ModelContext<Self>,
3393 ) -> Task<Result<ProjectTransaction>> {
3394 if self.is_local() {
3395 let buffer = buffer_handle.read(cx);
3396 let (lsp_adapter, lang_server) =
3397 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3398 server.clone()
3399 } else {
3400 return Task::ready(Ok(Default::default()));
3401 };
3402 let range = action.range.to_point_utf16(buffer);
3403
3404 cx.spawn(|this, mut cx| async move {
3405 if let Some(lsp_range) = action
3406 .lsp_action
3407 .data
3408 .as_mut()
3409 .and_then(|d| d.get_mut("codeActionParams"))
3410 .and_then(|d| d.get_mut("range"))
3411 {
3412 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3413 action.lsp_action = lang_server
3414 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3415 .await?;
3416 } else {
3417 let actions = this
3418 .update(&mut cx, |this, cx| {
3419 this.code_actions(&buffer_handle, action.range, cx)
3420 })
3421 .await?;
3422 action.lsp_action = actions
3423 .into_iter()
3424 .find(|a| a.lsp_action.title == action.lsp_action.title)
3425 .ok_or_else(|| anyhow!("code action is outdated"))?
3426 .lsp_action;
3427 }
3428
3429 if let Some(edit) = action.lsp_action.edit {
3430 Self::deserialize_workspace_edit(
3431 this,
3432 edit,
3433 push_to_history,
3434 lsp_adapter,
3435 lang_server,
3436 &mut cx,
3437 )
3438 .await
3439 } else if let Some(command) = action.lsp_action.command {
3440 this.update(&mut cx, |this, _| {
3441 this.last_workspace_edits_by_language_server
3442 .remove(&lang_server.server_id());
3443 });
3444 lang_server
3445 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3446 command: command.command,
3447 arguments: command.arguments.unwrap_or_default(),
3448 ..Default::default()
3449 })
3450 .await?;
3451 Ok(this.update(&mut cx, |this, _| {
3452 this.last_workspace_edits_by_language_server
3453 .remove(&lang_server.server_id())
3454 .unwrap_or_default()
3455 }))
3456 } else {
3457 Ok(ProjectTransaction::default())
3458 }
3459 })
3460 } else if let Some(project_id) = self.remote_id() {
3461 let client = self.client.clone();
3462 let request = proto::ApplyCodeAction {
3463 project_id,
3464 buffer_id: buffer_handle.read(cx).remote_id(),
3465 action: Some(language::proto::serialize_code_action(&action)),
3466 };
3467 cx.spawn(|this, mut cx| async move {
3468 let response = client
3469 .request(request)
3470 .await?
3471 .transaction
3472 .ok_or_else(|| anyhow!("missing transaction"))?;
3473 this.update(&mut cx, |this, cx| {
3474 this.deserialize_project_transaction(response, push_to_history, cx)
3475 })
3476 .await
3477 })
3478 } else {
3479 Task::ready(Err(anyhow!("project does not have a remote id")))
3480 }
3481 }
3482
3483 async fn deserialize_workspace_edit(
3484 this: ModelHandle<Self>,
3485 edit: lsp::WorkspaceEdit,
3486 push_to_history: bool,
3487 lsp_adapter: Arc<dyn LspAdapter>,
3488 language_server: Arc<LanguageServer>,
3489 cx: &mut AsyncAppContext,
3490 ) -> Result<ProjectTransaction> {
3491 let fs = this.read_with(cx, |this, _| this.fs.clone());
3492 let mut operations = Vec::new();
3493 if let Some(document_changes) = edit.document_changes {
3494 match document_changes {
3495 lsp::DocumentChanges::Edits(edits) => {
3496 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3497 }
3498 lsp::DocumentChanges::Operations(ops) => operations = ops,
3499 }
3500 } else if let Some(changes) = edit.changes {
3501 operations.extend(changes.into_iter().map(|(uri, edits)| {
3502 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3503 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3504 uri,
3505 version: None,
3506 },
3507 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3508 })
3509 }));
3510 }
3511
3512 let mut project_transaction = ProjectTransaction::default();
3513 for operation in operations {
3514 match operation {
3515 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3516 let abs_path = op
3517 .uri
3518 .to_file_path()
3519 .map_err(|_| anyhow!("can't convert URI to path"))?;
3520
3521 if let Some(parent_path) = abs_path.parent() {
3522 fs.create_dir(parent_path).await?;
3523 }
3524 if abs_path.ends_with("/") {
3525 fs.create_dir(&abs_path).await?;
3526 } else {
3527 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3528 .await?;
3529 }
3530 }
3531 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3532 let source_abs_path = op
3533 .old_uri
3534 .to_file_path()
3535 .map_err(|_| anyhow!("can't convert URI to path"))?;
3536 let target_abs_path = op
3537 .new_uri
3538 .to_file_path()
3539 .map_err(|_| anyhow!("can't convert URI to path"))?;
3540 fs.rename(
3541 &source_abs_path,
3542 &target_abs_path,
3543 op.options.map(Into::into).unwrap_or_default(),
3544 )
3545 .await?;
3546 }
3547 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3548 let abs_path = op
3549 .uri
3550 .to_file_path()
3551 .map_err(|_| anyhow!("can't convert URI to path"))?;
3552 let options = op.options.map(Into::into).unwrap_or_default();
3553 if abs_path.ends_with("/") {
3554 fs.remove_dir(&abs_path, options).await?;
3555 } else {
3556 fs.remove_file(&abs_path, options).await?;
3557 }
3558 }
3559 lsp::DocumentChangeOperation::Edit(op) => {
3560 let buffer_to_edit = this
3561 .update(cx, |this, cx| {
3562 this.open_local_buffer_via_lsp(
3563 op.text_document.uri,
3564 lsp_adapter.clone(),
3565 language_server.clone(),
3566 cx,
3567 )
3568 })
3569 .await?;
3570
3571 let edits = this
3572 .update(cx, |this, cx| {
3573 let edits = op.edits.into_iter().map(|edit| match edit {
3574 lsp::OneOf::Left(edit) => edit,
3575 lsp::OneOf::Right(edit) => edit.text_edit,
3576 });
3577 this.edits_from_lsp(
3578 &buffer_to_edit,
3579 edits,
3580 op.text_document.version,
3581 cx,
3582 )
3583 })
3584 .await?;
3585
3586 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3587 buffer.finalize_last_transaction();
3588 buffer.start_transaction();
3589 for (range, text) in edits {
3590 buffer.edit([(range, text)], cx);
3591 }
3592 let transaction = if buffer.end_transaction(cx).is_some() {
3593 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3594 if !push_to_history {
3595 buffer.forget_transaction(transaction.id);
3596 }
3597 Some(transaction)
3598 } else {
3599 None
3600 };
3601
3602 transaction
3603 });
3604 if let Some(transaction) = transaction {
3605 project_transaction.0.insert(buffer_to_edit, transaction);
3606 }
3607 }
3608 }
3609 }
3610
3611 Ok(project_transaction)
3612 }
3613
3614 pub fn prepare_rename<T: ToPointUtf16>(
3615 &self,
3616 buffer: ModelHandle<Buffer>,
3617 position: T,
3618 cx: &mut ModelContext<Self>,
3619 ) -> Task<Result<Option<Range<Anchor>>>> {
3620 let position = position.to_point_utf16(buffer.read(cx));
3621 self.request_lsp(buffer, PrepareRename { position }, cx)
3622 }
3623
3624 pub fn perform_rename<T: ToPointUtf16>(
3625 &self,
3626 buffer: ModelHandle<Buffer>,
3627 position: T,
3628 new_name: String,
3629 push_to_history: bool,
3630 cx: &mut ModelContext<Self>,
3631 ) -> Task<Result<ProjectTransaction>> {
3632 let position = position.to_point_utf16(buffer.read(cx));
3633 self.request_lsp(
3634 buffer,
3635 PerformRename {
3636 position,
3637 new_name,
3638 push_to_history,
3639 },
3640 cx,
3641 )
3642 }
3643
3644 pub fn search(
3645 &self,
3646 query: SearchQuery,
3647 cx: &mut ModelContext<Self>,
3648 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3649 if self.is_local() {
3650 let snapshots = self
3651 .visible_worktrees(cx)
3652 .filter_map(|tree| {
3653 let tree = tree.read(cx).as_local()?;
3654 Some(tree.snapshot())
3655 })
3656 .collect::<Vec<_>>();
3657
3658 let background = cx.background().clone();
3659 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3660 if path_count == 0 {
3661 return Task::ready(Ok(Default::default()));
3662 }
3663 let workers = background.num_cpus().min(path_count);
3664 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3665 cx.background()
3666 .spawn({
3667 let fs = self.fs.clone();
3668 let background = cx.background().clone();
3669 let query = query.clone();
3670 async move {
3671 let fs = &fs;
3672 let query = &query;
3673 let matching_paths_tx = &matching_paths_tx;
3674 let paths_per_worker = (path_count + workers - 1) / workers;
3675 let snapshots = &snapshots;
3676 background
3677 .scoped(|scope| {
3678 for worker_ix in 0..workers {
3679 let worker_start_ix = worker_ix * paths_per_worker;
3680 let worker_end_ix = worker_start_ix + paths_per_worker;
3681 scope.spawn(async move {
3682 let mut snapshot_start_ix = 0;
3683 let mut abs_path = PathBuf::new();
3684 for snapshot in snapshots {
3685 let snapshot_end_ix =
3686 snapshot_start_ix + snapshot.visible_file_count();
3687 if worker_end_ix <= snapshot_start_ix {
3688 break;
3689 } else if worker_start_ix > snapshot_end_ix {
3690 snapshot_start_ix = snapshot_end_ix;
3691 continue;
3692 } else {
3693 let start_in_snapshot = worker_start_ix
3694 .saturating_sub(snapshot_start_ix);
3695 let end_in_snapshot =
3696 cmp::min(worker_end_ix, snapshot_end_ix)
3697 - snapshot_start_ix;
3698
3699 for entry in snapshot
3700 .files(false, start_in_snapshot)
3701 .take(end_in_snapshot - start_in_snapshot)
3702 {
3703 if matching_paths_tx.is_closed() {
3704 break;
3705 }
3706
3707 abs_path.clear();
3708 abs_path.push(&snapshot.abs_path());
3709 abs_path.push(&entry.path);
3710 let matches = if let Some(file) =
3711 fs.open_sync(&abs_path).await.log_err()
3712 {
3713 query.detect(file).unwrap_or(false)
3714 } else {
3715 false
3716 };
3717
3718 if matches {
3719 let project_path =
3720 (snapshot.id(), entry.path.clone());
3721 if matching_paths_tx
3722 .send(project_path)
3723 .await
3724 .is_err()
3725 {
3726 break;
3727 }
3728 }
3729 }
3730
3731 snapshot_start_ix = snapshot_end_ix;
3732 }
3733 }
3734 });
3735 }
3736 })
3737 .await;
3738 }
3739 })
3740 .detach();
3741
3742 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3743 let open_buffers = self
3744 .opened_buffers
3745 .values()
3746 .filter_map(|b| b.upgrade(cx))
3747 .collect::<HashSet<_>>();
3748 cx.spawn(|this, cx| async move {
3749 for buffer in &open_buffers {
3750 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3751 buffers_tx.send((buffer.clone(), snapshot)).await?;
3752 }
3753
3754 let open_buffers = Rc::new(RefCell::new(open_buffers));
3755 while let Some(project_path) = matching_paths_rx.next().await {
3756 if buffers_tx.is_closed() {
3757 break;
3758 }
3759
3760 let this = this.clone();
3761 let open_buffers = open_buffers.clone();
3762 let buffers_tx = buffers_tx.clone();
3763 cx.spawn(|mut cx| async move {
3764 if let Some(buffer) = this
3765 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3766 .await
3767 .log_err()
3768 {
3769 if open_buffers.borrow_mut().insert(buffer.clone()) {
3770 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3771 buffers_tx.send((buffer, snapshot)).await?;
3772 }
3773 }
3774
3775 Ok::<_, anyhow::Error>(())
3776 })
3777 .detach();
3778 }
3779
3780 Ok::<_, anyhow::Error>(())
3781 })
3782 .detach_and_log_err(cx);
3783
3784 let background = cx.background().clone();
3785 cx.background().spawn(async move {
3786 let query = &query;
3787 let mut matched_buffers = Vec::new();
3788 for _ in 0..workers {
3789 matched_buffers.push(HashMap::default());
3790 }
3791 background
3792 .scoped(|scope| {
3793 for worker_matched_buffers in matched_buffers.iter_mut() {
3794 let mut buffers_rx = buffers_rx.clone();
3795 scope.spawn(async move {
3796 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3797 let buffer_matches = query
3798 .search(snapshot.as_rope())
3799 .await
3800 .iter()
3801 .map(|range| {
3802 snapshot.anchor_before(range.start)
3803 ..snapshot.anchor_after(range.end)
3804 })
3805 .collect::<Vec<_>>();
3806 if !buffer_matches.is_empty() {
3807 worker_matched_buffers
3808 .insert(buffer.clone(), buffer_matches);
3809 }
3810 }
3811 });
3812 }
3813 })
3814 .await;
3815 Ok(matched_buffers.into_iter().flatten().collect())
3816 })
3817 } else if let Some(project_id) = self.remote_id() {
3818 let request = self.client.request(query.to_proto(project_id));
3819 cx.spawn(|this, mut cx| async move {
3820 let response = request.await?;
3821 let mut result = HashMap::default();
3822 for location in response.locations {
3823 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3824 let target_buffer = this
3825 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3826 .await?;
3827 let start = location
3828 .start
3829 .and_then(deserialize_anchor)
3830 .ok_or_else(|| anyhow!("missing target start"))?;
3831 let end = location
3832 .end
3833 .and_then(deserialize_anchor)
3834 .ok_or_else(|| anyhow!("missing target end"))?;
3835 result
3836 .entry(target_buffer)
3837 .or_insert(Vec::new())
3838 .push(start..end)
3839 }
3840 Ok(result)
3841 })
3842 } else {
3843 Task::ready(Ok(Default::default()))
3844 }
3845 }
3846
3847 fn request_lsp<R: LspCommand>(
3848 &self,
3849 buffer_handle: ModelHandle<Buffer>,
3850 request: R,
3851 cx: &mut ModelContext<Self>,
3852 ) -> Task<Result<R::Response>>
3853 where
3854 <R::LspRequest as lsp::request::Request>::Result: Send,
3855 {
3856 let buffer = buffer_handle.read(cx);
3857 if self.is_local() {
3858 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3859 if let Some((file, (_, language_server))) =
3860 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3861 {
3862 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3863 return cx.spawn(|this, cx| async move {
3864 if !request.check_capabilities(&language_server.capabilities()) {
3865 return Ok(Default::default());
3866 }
3867
3868 let response = language_server
3869 .request::<R::LspRequest>(lsp_params)
3870 .await
3871 .context("lsp request failed")?;
3872 request
3873 .response_from_lsp(response, this, buffer_handle, cx)
3874 .await
3875 });
3876 }
3877 } else if let Some(project_id) = self.remote_id() {
3878 let rpc = self.client.clone();
3879 let message = request.to_proto(project_id, buffer);
3880 return cx.spawn(|this, cx| async move {
3881 let response = rpc.request(message).await?;
3882 request
3883 .response_from_proto(response, this, buffer_handle, cx)
3884 .await
3885 });
3886 }
3887 Task::ready(Ok(Default::default()))
3888 }
3889
3890 pub fn find_or_create_local_worktree(
3891 &mut self,
3892 abs_path: impl AsRef<Path>,
3893 visible: bool,
3894 cx: &mut ModelContext<Self>,
3895 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3896 let abs_path = abs_path.as_ref();
3897 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3898 Task::ready(Ok((tree.clone(), relative_path.into())))
3899 } else {
3900 let worktree = self.create_local_worktree(abs_path, visible, cx);
3901 cx.foreground()
3902 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3903 }
3904 }
3905
3906 pub fn find_local_worktree(
3907 &self,
3908 abs_path: &Path,
3909 cx: &AppContext,
3910 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3911 for tree in self.worktrees(cx) {
3912 if let Some(relative_path) = tree
3913 .read(cx)
3914 .as_local()
3915 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3916 {
3917 return Some((tree.clone(), relative_path.into()));
3918 }
3919 }
3920 None
3921 }
3922
3923 pub fn is_shared(&self) -> bool {
3924 match &self.client_state {
3925 ProjectClientState::Local { is_shared, .. } => *is_shared,
3926 ProjectClientState::Remote { .. } => false,
3927 }
3928 }
3929
3930 fn create_local_worktree(
3931 &mut self,
3932 abs_path: impl AsRef<Path>,
3933 visible: bool,
3934 cx: &mut ModelContext<Self>,
3935 ) -> Task<Result<ModelHandle<Worktree>>> {
3936 let fs = self.fs.clone();
3937 let client = self.client.clone();
3938 let next_entry_id = self.next_entry_id.clone();
3939 let path: Arc<Path> = abs_path.as_ref().into();
3940 let task = self
3941 .loading_local_worktrees
3942 .entry(path.clone())
3943 .or_insert_with(|| {
3944 cx.spawn(|project, mut cx| {
3945 async move {
3946 let worktree = Worktree::local(
3947 client.clone(),
3948 path.clone(),
3949 visible,
3950 fs,
3951 next_entry_id,
3952 &mut cx,
3953 )
3954 .await;
3955 project.update(&mut cx, |project, _| {
3956 project.loading_local_worktrees.remove(&path);
3957 });
3958 let worktree = worktree?;
3959
3960 let project_id = project.update(&mut cx, |project, cx| {
3961 project.add_worktree(&worktree, cx);
3962 project.shared_remote_id()
3963 });
3964
3965 if let Some(project_id) = project_id {
3966 worktree
3967 .update(&mut cx, |worktree, cx| {
3968 worktree.as_local_mut().unwrap().share(project_id, cx)
3969 })
3970 .await
3971 .log_err();
3972 }
3973
3974 Ok(worktree)
3975 }
3976 .map_err(|err| Arc::new(err))
3977 })
3978 .shared()
3979 })
3980 .clone();
3981 cx.foreground().spawn(async move {
3982 match task.await {
3983 Ok(worktree) => Ok(worktree),
3984 Err(err) => Err(anyhow!("{}", err)),
3985 }
3986 })
3987 }
3988
3989 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3990 self.worktrees.retain(|worktree| {
3991 if let Some(worktree) = worktree.upgrade(cx) {
3992 let id = worktree.read(cx).id();
3993 if id == id_to_remove {
3994 cx.emit(Event::WorktreeRemoved(id));
3995 false
3996 } else {
3997 true
3998 }
3999 } else {
4000 false
4001 }
4002 });
4003 self.metadata_changed(true, cx);
4004 cx.notify();
4005 }
4006
4007 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4008 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4009 if worktree.read(cx).is_local() {
4010 cx.subscribe(&worktree, |this, worktree, _, cx| {
4011 this.update_local_worktree_buffers(worktree, cx);
4012 })
4013 .detach();
4014 }
4015
4016 let push_strong_handle = {
4017 let worktree = worktree.read(cx);
4018 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4019 };
4020 if push_strong_handle {
4021 self.worktrees
4022 .push(WorktreeHandle::Strong(worktree.clone()));
4023 } else {
4024 cx.observe_release(&worktree, |this, _, cx| {
4025 this.worktrees
4026 .retain(|worktree| worktree.upgrade(cx).is_some());
4027 cx.notify();
4028 })
4029 .detach();
4030 self.worktrees
4031 .push(WorktreeHandle::Weak(worktree.downgrade()));
4032 }
4033 self.metadata_changed(true, cx);
4034 cx.emit(Event::WorktreeAdded);
4035 cx.notify();
4036 }
4037
4038 fn update_local_worktree_buffers(
4039 &mut self,
4040 worktree_handle: ModelHandle<Worktree>,
4041 cx: &mut ModelContext<Self>,
4042 ) {
4043 let snapshot = worktree_handle.read(cx).snapshot();
4044 let mut buffers_to_delete = Vec::new();
4045 let mut renamed_buffers = Vec::new();
4046 for (buffer_id, buffer) in &self.opened_buffers {
4047 if let Some(buffer) = buffer.upgrade(cx) {
4048 buffer.update(cx, |buffer, cx| {
4049 if let Some(old_file) = File::from_dyn(buffer.file()) {
4050 if old_file.worktree != worktree_handle {
4051 return;
4052 }
4053
4054 let new_file = if let Some(entry) = old_file
4055 .entry_id
4056 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4057 {
4058 File {
4059 is_local: true,
4060 entry_id: Some(entry.id),
4061 mtime: entry.mtime,
4062 path: entry.path.clone(),
4063 worktree: worktree_handle.clone(),
4064 }
4065 } else if let Some(entry) =
4066 snapshot.entry_for_path(old_file.path().as_ref())
4067 {
4068 File {
4069 is_local: true,
4070 entry_id: Some(entry.id),
4071 mtime: entry.mtime,
4072 path: entry.path.clone(),
4073 worktree: worktree_handle.clone(),
4074 }
4075 } else {
4076 File {
4077 is_local: true,
4078 entry_id: None,
4079 path: old_file.path().clone(),
4080 mtime: old_file.mtime(),
4081 worktree: worktree_handle.clone(),
4082 }
4083 };
4084
4085 let old_path = old_file.abs_path(cx);
4086 if new_file.abs_path(cx) != old_path {
4087 renamed_buffers.push((cx.handle(), old_path));
4088 }
4089
4090 if let Some(project_id) = self.shared_remote_id() {
4091 self.client
4092 .send(proto::UpdateBufferFile {
4093 project_id,
4094 buffer_id: *buffer_id as u64,
4095 file: Some(new_file.to_proto()),
4096 })
4097 .log_err();
4098 }
4099 buffer.file_updated(Arc::new(new_file), cx).detach();
4100 }
4101 });
4102 } else {
4103 buffers_to_delete.push(*buffer_id);
4104 }
4105 }
4106
4107 for buffer_id in buffers_to_delete {
4108 self.opened_buffers.remove(&buffer_id);
4109 }
4110
4111 for (buffer, old_path) in renamed_buffers {
4112 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4113 self.assign_language_to_buffer(&buffer, cx);
4114 self.register_buffer_with_language_server(&buffer, cx);
4115 }
4116 }
4117
4118 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4119 let new_active_entry = entry.and_then(|project_path| {
4120 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4121 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4122 Some(entry.id)
4123 });
4124 if new_active_entry != self.active_entry {
4125 self.active_entry = new_active_entry;
4126 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4127 }
4128 }
4129
4130 pub fn language_servers_running_disk_based_diagnostics<'a>(
4131 &'a self,
4132 ) -> impl 'a + Iterator<Item = usize> {
4133 self.language_server_statuses
4134 .iter()
4135 .filter_map(|(id, status)| {
4136 if status.pending_diagnostic_updates > 0 {
4137 Some(*id)
4138 } else {
4139 None
4140 }
4141 })
4142 }
4143
4144 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4145 let mut summary = DiagnosticSummary::default();
4146 for (_, path_summary) in self.diagnostic_summaries(cx) {
4147 summary.error_count += path_summary.error_count;
4148 summary.warning_count += path_summary.warning_count;
4149 }
4150 summary
4151 }
4152
4153 pub fn diagnostic_summaries<'a>(
4154 &'a self,
4155 cx: &'a AppContext,
4156 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4157 self.worktrees(cx).flat_map(move |worktree| {
4158 let worktree = worktree.read(cx);
4159 let worktree_id = worktree.id();
4160 worktree
4161 .diagnostic_summaries()
4162 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4163 })
4164 }
4165
4166 pub fn disk_based_diagnostics_started(
4167 &mut self,
4168 language_server_id: usize,
4169 cx: &mut ModelContext<Self>,
4170 ) {
4171 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4172 }
4173
4174 pub fn disk_based_diagnostics_finished(
4175 &mut self,
4176 language_server_id: usize,
4177 cx: &mut ModelContext<Self>,
4178 ) {
4179 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4180 }
4181
4182 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4183 self.active_entry
4184 }
4185
4186 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4187 self.worktree_for_id(path.worktree_id, cx)?
4188 .read(cx)
4189 .entry_for_path(&path.path)
4190 .map(|entry| entry.id)
4191 }
4192
4193 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4194 let worktree = self.worktree_for_entry(entry_id, cx)?;
4195 let worktree = worktree.read(cx);
4196 let worktree_id = worktree.id();
4197 let path = worktree.entry_for_id(entry_id)?.path.clone();
4198 Some(ProjectPath { worktree_id, path })
4199 }
4200
4201 // RPC message handlers
4202
4203 async fn handle_request_join_project(
4204 this: ModelHandle<Self>,
4205 message: TypedEnvelope<proto::RequestJoinProject>,
4206 _: Arc<Client>,
4207 mut cx: AsyncAppContext,
4208 ) -> Result<()> {
4209 let user_id = message.payload.requester_id;
4210 if this.read_with(&cx, |project, _| {
4211 project.collaborators.values().any(|c| c.user.id == user_id)
4212 }) {
4213 this.update(&mut cx, |this, cx| {
4214 this.respond_to_join_request(user_id, true, cx)
4215 });
4216 } else {
4217 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4218 let user = user_store
4219 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4220 .await?;
4221 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4222 }
4223 Ok(())
4224 }
4225
4226 async fn handle_unregister_project(
4227 this: ModelHandle<Self>,
4228 _: TypedEnvelope<proto::UnregisterProject>,
4229 _: Arc<Client>,
4230 mut cx: AsyncAppContext,
4231 ) -> Result<()> {
4232 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4233 Ok(())
4234 }
4235
4236 async fn handle_project_unshared(
4237 this: ModelHandle<Self>,
4238 _: TypedEnvelope<proto::ProjectUnshared>,
4239 _: Arc<Client>,
4240 mut cx: AsyncAppContext,
4241 ) -> Result<()> {
4242 this.update(&mut cx, |this, cx| this.unshared(cx));
4243 Ok(())
4244 }
4245
4246 async fn handle_add_collaborator(
4247 this: ModelHandle<Self>,
4248 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4249 _: Arc<Client>,
4250 mut cx: AsyncAppContext,
4251 ) -> Result<()> {
4252 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4253 let collaborator = envelope
4254 .payload
4255 .collaborator
4256 .take()
4257 .ok_or_else(|| anyhow!("empty collaborator"))?;
4258
4259 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4260 this.update(&mut cx, |this, cx| {
4261 this.collaborators
4262 .insert(collaborator.peer_id, collaborator);
4263 cx.notify();
4264 });
4265
4266 Ok(())
4267 }
4268
4269 async fn handle_remove_collaborator(
4270 this: ModelHandle<Self>,
4271 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4272 _: Arc<Client>,
4273 mut cx: AsyncAppContext,
4274 ) -> Result<()> {
4275 this.update(&mut cx, |this, cx| {
4276 let peer_id = PeerId(envelope.payload.peer_id);
4277 let replica_id = this
4278 .collaborators
4279 .remove(&peer_id)
4280 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4281 .replica_id;
4282 for (_, buffer) in &this.opened_buffers {
4283 if let Some(buffer) = buffer.upgrade(cx) {
4284 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4285 }
4286 }
4287
4288 cx.emit(Event::CollaboratorLeft(peer_id));
4289 cx.notify();
4290 Ok(())
4291 })
4292 }
4293
4294 async fn handle_join_project_request_cancelled(
4295 this: ModelHandle<Self>,
4296 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4297 _: Arc<Client>,
4298 mut cx: AsyncAppContext,
4299 ) -> Result<()> {
4300 let user = this
4301 .update(&mut cx, |this, cx| {
4302 this.user_store.update(cx, |user_store, cx| {
4303 user_store.fetch_user(envelope.payload.requester_id, cx)
4304 })
4305 })
4306 .await?;
4307
4308 this.update(&mut cx, |_, cx| {
4309 cx.emit(Event::ContactCancelledJoinRequest(user));
4310 });
4311
4312 Ok(())
4313 }
4314
4315 async fn handle_update_project(
4316 this: ModelHandle<Self>,
4317 envelope: TypedEnvelope<proto::UpdateProject>,
4318 client: Arc<Client>,
4319 mut cx: AsyncAppContext,
4320 ) -> Result<()> {
4321 this.update(&mut cx, |this, cx| {
4322 let replica_id = this.replica_id();
4323 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4324
4325 let mut old_worktrees_by_id = this
4326 .worktrees
4327 .drain(..)
4328 .filter_map(|worktree| {
4329 let worktree = worktree.upgrade(cx)?;
4330 Some((worktree.read(cx).id(), worktree))
4331 })
4332 .collect::<HashMap<_, _>>();
4333
4334 for worktree in envelope.payload.worktrees {
4335 if let Some(old_worktree) =
4336 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4337 {
4338 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4339 } else {
4340 let worktree = proto::Worktree {
4341 id: worktree.id,
4342 root_name: worktree.root_name,
4343 entries: Default::default(),
4344 diagnostic_summaries: Default::default(),
4345 visible: worktree.visible,
4346 scan_id: 0,
4347 };
4348 let (worktree, load_task) =
4349 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4350 this.add_worktree(&worktree, cx);
4351 load_task.detach();
4352 }
4353 }
4354
4355 this.metadata_changed(true, cx);
4356 for (id, _) in old_worktrees_by_id {
4357 cx.emit(Event::WorktreeRemoved(id));
4358 }
4359
4360 Ok(())
4361 })
4362 }
4363
4364 async fn handle_update_worktree(
4365 this: ModelHandle<Self>,
4366 envelope: TypedEnvelope<proto::UpdateWorktree>,
4367 _: Arc<Client>,
4368 mut cx: AsyncAppContext,
4369 ) -> Result<()> {
4370 this.update(&mut cx, |this, cx| {
4371 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4372 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4373 worktree.update(cx, |worktree, _| {
4374 let worktree = worktree.as_remote_mut().unwrap();
4375 worktree.update_from_remote(envelope)
4376 })?;
4377 }
4378 Ok(())
4379 })
4380 }
4381
4382 async fn handle_create_project_entry(
4383 this: ModelHandle<Self>,
4384 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4385 _: Arc<Client>,
4386 mut cx: AsyncAppContext,
4387 ) -> Result<proto::ProjectEntryResponse> {
4388 let worktree = this.update(&mut cx, |this, cx| {
4389 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4390 this.worktree_for_id(worktree_id, cx)
4391 .ok_or_else(|| anyhow!("worktree not found"))
4392 })?;
4393 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4394 let entry = worktree
4395 .update(&mut cx, |worktree, cx| {
4396 let worktree = worktree.as_local_mut().unwrap();
4397 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4398 worktree.create_entry(path, envelope.payload.is_directory, cx)
4399 })
4400 .await?;
4401 Ok(proto::ProjectEntryResponse {
4402 entry: Some((&entry).into()),
4403 worktree_scan_id: worktree_scan_id as u64,
4404 })
4405 }
4406
4407 async fn handle_rename_project_entry(
4408 this: ModelHandle<Self>,
4409 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4410 _: Arc<Client>,
4411 mut cx: AsyncAppContext,
4412 ) -> Result<proto::ProjectEntryResponse> {
4413 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4414 let worktree = this.read_with(&cx, |this, cx| {
4415 this.worktree_for_entry(entry_id, cx)
4416 .ok_or_else(|| anyhow!("worktree not found"))
4417 })?;
4418 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4419 let entry = worktree
4420 .update(&mut cx, |worktree, cx| {
4421 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4422 worktree
4423 .as_local_mut()
4424 .unwrap()
4425 .rename_entry(entry_id, new_path, cx)
4426 .ok_or_else(|| anyhow!("invalid entry"))
4427 })?
4428 .await?;
4429 Ok(proto::ProjectEntryResponse {
4430 entry: Some((&entry).into()),
4431 worktree_scan_id: worktree_scan_id as u64,
4432 })
4433 }
4434
4435 async fn handle_copy_project_entry(
4436 this: ModelHandle<Self>,
4437 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4438 _: Arc<Client>,
4439 mut cx: AsyncAppContext,
4440 ) -> Result<proto::ProjectEntryResponse> {
4441 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4442 let worktree = this.read_with(&cx, |this, cx| {
4443 this.worktree_for_entry(entry_id, cx)
4444 .ok_or_else(|| anyhow!("worktree not found"))
4445 })?;
4446 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4447 let entry = worktree
4448 .update(&mut cx, |worktree, cx| {
4449 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4450 worktree
4451 .as_local_mut()
4452 .unwrap()
4453 .copy_entry(entry_id, new_path, cx)
4454 .ok_or_else(|| anyhow!("invalid entry"))
4455 })?
4456 .await?;
4457 Ok(proto::ProjectEntryResponse {
4458 entry: Some((&entry).into()),
4459 worktree_scan_id: worktree_scan_id as u64,
4460 })
4461 }
4462
4463 async fn handle_delete_project_entry(
4464 this: ModelHandle<Self>,
4465 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4466 _: Arc<Client>,
4467 mut cx: AsyncAppContext,
4468 ) -> Result<proto::ProjectEntryResponse> {
4469 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4470 let worktree = this.read_with(&cx, |this, cx| {
4471 this.worktree_for_entry(entry_id, cx)
4472 .ok_or_else(|| anyhow!("worktree not found"))
4473 })?;
4474 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4475 worktree
4476 .update(&mut cx, |worktree, cx| {
4477 worktree
4478 .as_local_mut()
4479 .unwrap()
4480 .delete_entry(entry_id, cx)
4481 .ok_or_else(|| anyhow!("invalid entry"))
4482 })?
4483 .await?;
4484 Ok(proto::ProjectEntryResponse {
4485 entry: None,
4486 worktree_scan_id: worktree_scan_id as u64,
4487 })
4488 }
4489
4490 async fn handle_update_diagnostic_summary(
4491 this: ModelHandle<Self>,
4492 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4493 _: Arc<Client>,
4494 mut cx: AsyncAppContext,
4495 ) -> Result<()> {
4496 this.update(&mut cx, |this, cx| {
4497 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4498 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4499 if let Some(summary) = envelope.payload.summary {
4500 let project_path = ProjectPath {
4501 worktree_id,
4502 path: Path::new(&summary.path).into(),
4503 };
4504 worktree.update(cx, |worktree, _| {
4505 worktree
4506 .as_remote_mut()
4507 .unwrap()
4508 .update_diagnostic_summary(project_path.path.clone(), &summary);
4509 });
4510 cx.emit(Event::DiagnosticsUpdated {
4511 language_server_id: summary.language_server_id as usize,
4512 path: project_path,
4513 });
4514 }
4515 }
4516 Ok(())
4517 })
4518 }
4519
4520 async fn handle_start_language_server(
4521 this: ModelHandle<Self>,
4522 envelope: TypedEnvelope<proto::StartLanguageServer>,
4523 _: Arc<Client>,
4524 mut cx: AsyncAppContext,
4525 ) -> Result<()> {
4526 let server = envelope
4527 .payload
4528 .server
4529 .ok_or_else(|| anyhow!("invalid server"))?;
4530 this.update(&mut cx, |this, cx| {
4531 this.language_server_statuses.insert(
4532 server.id as usize,
4533 LanguageServerStatus {
4534 name: server.name,
4535 pending_work: Default::default(),
4536 pending_diagnostic_updates: 0,
4537 },
4538 );
4539 cx.notify();
4540 });
4541 Ok(())
4542 }
4543
4544 async fn handle_update_language_server(
4545 this: ModelHandle<Self>,
4546 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4547 _: Arc<Client>,
4548 mut cx: AsyncAppContext,
4549 ) -> Result<()> {
4550 let language_server_id = envelope.payload.language_server_id as usize;
4551 match envelope
4552 .payload
4553 .variant
4554 .ok_or_else(|| anyhow!("invalid variant"))?
4555 {
4556 proto::update_language_server::Variant::WorkStart(payload) => {
4557 this.update(&mut cx, |this, cx| {
4558 this.on_lsp_work_start(language_server_id, payload.token, cx);
4559 })
4560 }
4561 proto::update_language_server::Variant::WorkProgress(payload) => {
4562 this.update(&mut cx, |this, cx| {
4563 this.on_lsp_work_progress(
4564 language_server_id,
4565 payload.token,
4566 LanguageServerProgress {
4567 message: payload.message,
4568 percentage: payload.percentage.map(|p| p as usize),
4569 last_update_at: Instant::now(),
4570 },
4571 cx,
4572 );
4573 })
4574 }
4575 proto::update_language_server::Variant::WorkEnd(payload) => {
4576 this.update(&mut cx, |this, cx| {
4577 this.on_lsp_work_end(language_server_id, payload.token, cx);
4578 })
4579 }
4580 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4581 this.update(&mut cx, |this, cx| {
4582 this.disk_based_diagnostics_started(language_server_id, cx);
4583 })
4584 }
4585 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4586 this.update(&mut cx, |this, cx| {
4587 this.disk_based_diagnostics_finished(language_server_id, cx)
4588 });
4589 }
4590 }
4591
4592 Ok(())
4593 }
4594
4595 async fn handle_update_buffer(
4596 this: ModelHandle<Self>,
4597 envelope: TypedEnvelope<proto::UpdateBuffer>,
4598 _: Arc<Client>,
4599 mut cx: AsyncAppContext,
4600 ) -> Result<()> {
4601 this.update(&mut cx, |this, cx| {
4602 let payload = envelope.payload.clone();
4603 let buffer_id = payload.buffer_id;
4604 let ops = payload
4605 .operations
4606 .into_iter()
4607 .map(|op| language::proto::deserialize_operation(op))
4608 .collect::<Result<Vec<_>, _>>()?;
4609 let is_remote = this.is_remote();
4610 match this.opened_buffers.entry(buffer_id) {
4611 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4612 OpenBuffer::Strong(buffer) => {
4613 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4614 }
4615 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4616 OpenBuffer::Weak(_) => {}
4617 },
4618 hash_map::Entry::Vacant(e) => {
4619 assert!(
4620 is_remote,
4621 "received buffer update from {:?}",
4622 envelope.original_sender_id
4623 );
4624 e.insert(OpenBuffer::Loading(ops));
4625 }
4626 }
4627 Ok(())
4628 })
4629 }
4630
4631 async fn handle_update_buffer_file(
4632 this: ModelHandle<Self>,
4633 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4634 _: Arc<Client>,
4635 mut cx: AsyncAppContext,
4636 ) -> Result<()> {
4637 this.update(&mut cx, |this, cx| {
4638 let payload = envelope.payload.clone();
4639 let buffer_id = payload.buffer_id;
4640 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4641 let worktree = this
4642 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4643 .ok_or_else(|| anyhow!("no such worktree"))?;
4644 let file = File::from_proto(file, worktree.clone(), cx)?;
4645 let buffer = this
4646 .opened_buffers
4647 .get_mut(&buffer_id)
4648 .and_then(|b| b.upgrade(cx))
4649 .ok_or_else(|| anyhow!("no such buffer"))?;
4650 buffer.update(cx, |buffer, cx| {
4651 buffer.file_updated(Arc::new(file), cx).detach();
4652 });
4653 Ok(())
4654 })
4655 }
4656
4657 async fn handle_save_buffer(
4658 this: ModelHandle<Self>,
4659 envelope: TypedEnvelope<proto::SaveBuffer>,
4660 _: Arc<Client>,
4661 mut cx: AsyncAppContext,
4662 ) -> Result<proto::BufferSaved> {
4663 let buffer_id = envelope.payload.buffer_id;
4664 let requested_version = deserialize_version(envelope.payload.version);
4665
4666 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4667 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4668 let buffer = this
4669 .opened_buffers
4670 .get(&buffer_id)
4671 .and_then(|buffer| buffer.upgrade(cx))
4672 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4673 Ok::<_, anyhow::Error>((project_id, buffer))
4674 })?;
4675 buffer
4676 .update(&mut cx, |buffer, _| {
4677 buffer.wait_for_version(requested_version)
4678 })
4679 .await;
4680
4681 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4682 Ok(proto::BufferSaved {
4683 project_id,
4684 buffer_id,
4685 version: serialize_version(&saved_version),
4686 mtime: Some(mtime.into()),
4687 })
4688 }
4689
4690 async fn handle_reload_buffers(
4691 this: ModelHandle<Self>,
4692 envelope: TypedEnvelope<proto::ReloadBuffers>,
4693 _: Arc<Client>,
4694 mut cx: AsyncAppContext,
4695 ) -> Result<proto::ReloadBuffersResponse> {
4696 let sender_id = envelope.original_sender_id()?;
4697 let reload = this.update(&mut cx, |this, cx| {
4698 let mut buffers = HashSet::default();
4699 for buffer_id in &envelope.payload.buffer_ids {
4700 buffers.insert(
4701 this.opened_buffers
4702 .get(buffer_id)
4703 .and_then(|buffer| buffer.upgrade(cx))
4704 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4705 );
4706 }
4707 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4708 })?;
4709
4710 let project_transaction = reload.await?;
4711 let project_transaction = this.update(&mut cx, |this, cx| {
4712 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4713 });
4714 Ok(proto::ReloadBuffersResponse {
4715 transaction: Some(project_transaction),
4716 })
4717 }
4718
4719 async fn handle_format_buffers(
4720 this: ModelHandle<Self>,
4721 envelope: TypedEnvelope<proto::FormatBuffers>,
4722 _: Arc<Client>,
4723 mut cx: AsyncAppContext,
4724 ) -> Result<proto::FormatBuffersResponse> {
4725 let sender_id = envelope.original_sender_id()?;
4726 let format = this.update(&mut cx, |this, cx| {
4727 let mut buffers = HashSet::default();
4728 for buffer_id in &envelope.payload.buffer_ids {
4729 buffers.insert(
4730 this.opened_buffers
4731 .get(buffer_id)
4732 .and_then(|buffer| buffer.upgrade(cx))
4733 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4734 );
4735 }
4736 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4737 })?;
4738
4739 let project_transaction = format.await?;
4740 let project_transaction = this.update(&mut cx, |this, cx| {
4741 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4742 });
4743 Ok(proto::FormatBuffersResponse {
4744 transaction: Some(project_transaction),
4745 })
4746 }
4747
4748 async fn handle_get_completions(
4749 this: ModelHandle<Self>,
4750 envelope: TypedEnvelope<proto::GetCompletions>,
4751 _: Arc<Client>,
4752 mut cx: AsyncAppContext,
4753 ) -> Result<proto::GetCompletionsResponse> {
4754 let position = envelope
4755 .payload
4756 .position
4757 .and_then(language::proto::deserialize_anchor)
4758 .ok_or_else(|| anyhow!("invalid position"))?;
4759 let version = deserialize_version(envelope.payload.version);
4760 let buffer = this.read_with(&cx, |this, cx| {
4761 this.opened_buffers
4762 .get(&envelope.payload.buffer_id)
4763 .and_then(|buffer| buffer.upgrade(cx))
4764 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4765 })?;
4766 buffer
4767 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4768 .await;
4769 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4770 let completions = this
4771 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4772 .await?;
4773
4774 Ok(proto::GetCompletionsResponse {
4775 completions: completions
4776 .iter()
4777 .map(language::proto::serialize_completion)
4778 .collect(),
4779 version: serialize_version(&version),
4780 })
4781 }
4782
4783 async fn handle_apply_additional_edits_for_completion(
4784 this: ModelHandle<Self>,
4785 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4786 _: Arc<Client>,
4787 mut cx: AsyncAppContext,
4788 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4789 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4790 let buffer = this
4791 .opened_buffers
4792 .get(&envelope.payload.buffer_id)
4793 .and_then(|buffer| buffer.upgrade(cx))
4794 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4795 let language = buffer.read(cx).language();
4796 let completion = language::proto::deserialize_completion(
4797 envelope
4798 .payload
4799 .completion
4800 .ok_or_else(|| anyhow!("invalid completion"))?,
4801 language,
4802 )?;
4803 Ok::<_, anyhow::Error>(
4804 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4805 )
4806 })?;
4807
4808 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4809 transaction: apply_additional_edits
4810 .await?
4811 .as_ref()
4812 .map(language::proto::serialize_transaction),
4813 })
4814 }
4815
4816 async fn handle_get_code_actions(
4817 this: ModelHandle<Self>,
4818 envelope: TypedEnvelope<proto::GetCodeActions>,
4819 _: Arc<Client>,
4820 mut cx: AsyncAppContext,
4821 ) -> Result<proto::GetCodeActionsResponse> {
4822 let start = envelope
4823 .payload
4824 .start
4825 .and_then(language::proto::deserialize_anchor)
4826 .ok_or_else(|| anyhow!("invalid start"))?;
4827 let end = envelope
4828 .payload
4829 .end
4830 .and_then(language::proto::deserialize_anchor)
4831 .ok_or_else(|| anyhow!("invalid end"))?;
4832 let buffer = this.update(&mut cx, |this, cx| {
4833 this.opened_buffers
4834 .get(&envelope.payload.buffer_id)
4835 .and_then(|buffer| buffer.upgrade(cx))
4836 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4837 })?;
4838 buffer
4839 .update(&mut cx, |buffer, _| {
4840 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4841 })
4842 .await;
4843
4844 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4845 let code_actions = this.update(&mut cx, |this, cx| {
4846 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4847 })?;
4848
4849 Ok(proto::GetCodeActionsResponse {
4850 actions: code_actions
4851 .await?
4852 .iter()
4853 .map(language::proto::serialize_code_action)
4854 .collect(),
4855 version: serialize_version(&version),
4856 })
4857 }
4858
4859 async fn handle_apply_code_action(
4860 this: ModelHandle<Self>,
4861 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4862 _: Arc<Client>,
4863 mut cx: AsyncAppContext,
4864 ) -> Result<proto::ApplyCodeActionResponse> {
4865 let sender_id = envelope.original_sender_id()?;
4866 let action = language::proto::deserialize_code_action(
4867 envelope
4868 .payload
4869 .action
4870 .ok_or_else(|| anyhow!("invalid action"))?,
4871 )?;
4872 let apply_code_action = this.update(&mut cx, |this, cx| {
4873 let buffer = this
4874 .opened_buffers
4875 .get(&envelope.payload.buffer_id)
4876 .and_then(|buffer| buffer.upgrade(cx))
4877 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4878 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4879 })?;
4880
4881 let project_transaction = apply_code_action.await?;
4882 let project_transaction = this.update(&mut cx, |this, cx| {
4883 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4884 });
4885 Ok(proto::ApplyCodeActionResponse {
4886 transaction: Some(project_transaction),
4887 })
4888 }
4889
4890 async fn handle_lsp_command<T: LspCommand>(
4891 this: ModelHandle<Self>,
4892 envelope: TypedEnvelope<T::ProtoRequest>,
4893 _: Arc<Client>,
4894 mut cx: AsyncAppContext,
4895 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4896 where
4897 <T::LspRequest as lsp::request::Request>::Result: Send,
4898 {
4899 let sender_id = envelope.original_sender_id()?;
4900 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4901 let buffer_handle = this.read_with(&cx, |this, _| {
4902 this.opened_buffers
4903 .get(&buffer_id)
4904 .and_then(|buffer| buffer.upgrade(&cx))
4905 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4906 })?;
4907 let request = T::from_proto(
4908 envelope.payload,
4909 this.clone(),
4910 buffer_handle.clone(),
4911 cx.clone(),
4912 )
4913 .await?;
4914 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4915 let response = this
4916 .update(&mut cx, |this, cx| {
4917 this.request_lsp(buffer_handle, request, cx)
4918 })
4919 .await?;
4920 this.update(&mut cx, |this, cx| {
4921 Ok(T::response_to_proto(
4922 response,
4923 this,
4924 sender_id,
4925 &buffer_version,
4926 cx,
4927 ))
4928 })
4929 }
4930
4931 async fn handle_get_project_symbols(
4932 this: ModelHandle<Self>,
4933 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4934 _: Arc<Client>,
4935 mut cx: AsyncAppContext,
4936 ) -> Result<proto::GetProjectSymbolsResponse> {
4937 let symbols = this
4938 .update(&mut cx, |this, cx| {
4939 this.symbols(&envelope.payload.query, cx)
4940 })
4941 .await?;
4942
4943 Ok(proto::GetProjectSymbolsResponse {
4944 symbols: symbols.iter().map(serialize_symbol).collect(),
4945 })
4946 }
4947
4948 async fn handle_search_project(
4949 this: ModelHandle<Self>,
4950 envelope: TypedEnvelope<proto::SearchProject>,
4951 _: Arc<Client>,
4952 mut cx: AsyncAppContext,
4953 ) -> Result<proto::SearchProjectResponse> {
4954 let peer_id = envelope.original_sender_id()?;
4955 let query = SearchQuery::from_proto(envelope.payload)?;
4956 let result = this
4957 .update(&mut cx, |this, cx| this.search(query, cx))
4958 .await?;
4959
4960 this.update(&mut cx, |this, cx| {
4961 let mut locations = Vec::new();
4962 for (buffer, ranges) in result {
4963 for range in ranges {
4964 let start = serialize_anchor(&range.start);
4965 let end = serialize_anchor(&range.end);
4966 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4967 locations.push(proto::Location {
4968 buffer: Some(buffer),
4969 start: Some(start),
4970 end: Some(end),
4971 });
4972 }
4973 }
4974 Ok(proto::SearchProjectResponse { locations })
4975 })
4976 }
4977
4978 async fn handle_open_buffer_for_symbol(
4979 this: ModelHandle<Self>,
4980 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4981 _: Arc<Client>,
4982 mut cx: AsyncAppContext,
4983 ) -> Result<proto::OpenBufferForSymbolResponse> {
4984 let peer_id = envelope.original_sender_id()?;
4985 let symbol = envelope
4986 .payload
4987 .symbol
4988 .ok_or_else(|| anyhow!("invalid symbol"))?;
4989 let symbol = this.read_with(&cx, |this, _| {
4990 let symbol = this.deserialize_symbol(symbol)?;
4991 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4992 if signature == symbol.signature {
4993 Ok(symbol)
4994 } else {
4995 Err(anyhow!("invalid symbol signature"))
4996 }
4997 })?;
4998 let buffer = this
4999 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5000 .await?;
5001
5002 Ok(proto::OpenBufferForSymbolResponse {
5003 buffer: Some(this.update(&mut cx, |this, cx| {
5004 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5005 })),
5006 })
5007 }
5008
5009 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5010 let mut hasher = Sha256::new();
5011 hasher.update(worktree_id.to_proto().to_be_bytes());
5012 hasher.update(path.to_string_lossy().as_bytes());
5013 hasher.update(self.nonce.to_be_bytes());
5014 hasher.finalize().as_slice().try_into().unwrap()
5015 }
5016
5017 async fn handle_open_buffer_by_id(
5018 this: ModelHandle<Self>,
5019 envelope: TypedEnvelope<proto::OpenBufferById>,
5020 _: Arc<Client>,
5021 mut cx: AsyncAppContext,
5022 ) -> Result<proto::OpenBufferResponse> {
5023 let peer_id = envelope.original_sender_id()?;
5024 let buffer = this
5025 .update(&mut cx, |this, cx| {
5026 this.open_buffer_by_id(envelope.payload.id, cx)
5027 })
5028 .await?;
5029 this.update(&mut cx, |this, cx| {
5030 Ok(proto::OpenBufferResponse {
5031 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5032 })
5033 })
5034 }
5035
5036 async fn handle_open_buffer_by_path(
5037 this: ModelHandle<Self>,
5038 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5039 _: Arc<Client>,
5040 mut cx: AsyncAppContext,
5041 ) -> Result<proto::OpenBufferResponse> {
5042 let peer_id = envelope.original_sender_id()?;
5043 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5044 let open_buffer = this.update(&mut cx, |this, cx| {
5045 this.open_buffer(
5046 ProjectPath {
5047 worktree_id,
5048 path: PathBuf::from(envelope.payload.path).into(),
5049 },
5050 cx,
5051 )
5052 });
5053
5054 let buffer = open_buffer.await?;
5055 this.update(&mut cx, |this, cx| {
5056 Ok(proto::OpenBufferResponse {
5057 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5058 })
5059 })
5060 }
5061
5062 fn serialize_project_transaction_for_peer(
5063 &mut self,
5064 project_transaction: ProjectTransaction,
5065 peer_id: PeerId,
5066 cx: &AppContext,
5067 ) -> proto::ProjectTransaction {
5068 let mut serialized_transaction = proto::ProjectTransaction {
5069 buffers: Default::default(),
5070 transactions: Default::default(),
5071 };
5072 for (buffer, transaction) in project_transaction.0 {
5073 serialized_transaction
5074 .buffers
5075 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5076 serialized_transaction
5077 .transactions
5078 .push(language::proto::serialize_transaction(&transaction));
5079 }
5080 serialized_transaction
5081 }
5082
5083 fn deserialize_project_transaction(
5084 &mut self,
5085 message: proto::ProjectTransaction,
5086 push_to_history: bool,
5087 cx: &mut ModelContext<Self>,
5088 ) -> Task<Result<ProjectTransaction>> {
5089 cx.spawn(|this, mut cx| async move {
5090 let mut project_transaction = ProjectTransaction::default();
5091 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5092 let buffer = this
5093 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5094 .await?;
5095 let transaction = language::proto::deserialize_transaction(transaction)?;
5096 project_transaction.0.insert(buffer, transaction);
5097 }
5098
5099 for (buffer, transaction) in &project_transaction.0 {
5100 buffer
5101 .update(&mut cx, |buffer, _| {
5102 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5103 })
5104 .await;
5105
5106 if push_to_history {
5107 buffer.update(&mut cx, |buffer, _| {
5108 buffer.push_transaction(transaction.clone(), Instant::now());
5109 });
5110 }
5111 }
5112
5113 Ok(project_transaction)
5114 })
5115 }
5116
5117 fn serialize_buffer_for_peer(
5118 &mut self,
5119 buffer: &ModelHandle<Buffer>,
5120 peer_id: PeerId,
5121 cx: &AppContext,
5122 ) -> proto::Buffer {
5123 let buffer_id = buffer.read(cx).remote_id();
5124 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5125 if shared_buffers.insert(buffer_id) {
5126 proto::Buffer {
5127 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5128 }
5129 } else {
5130 proto::Buffer {
5131 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5132 }
5133 }
5134 }
5135
5136 fn deserialize_buffer(
5137 &mut self,
5138 buffer: proto::Buffer,
5139 cx: &mut ModelContext<Self>,
5140 ) -> Task<Result<ModelHandle<Buffer>>> {
5141 let replica_id = self.replica_id();
5142
5143 let opened_buffer_tx = self.opened_buffer.0.clone();
5144 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5145 cx.spawn(|this, mut cx| async move {
5146 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5147 proto::buffer::Variant::Id(id) => {
5148 let buffer = loop {
5149 let buffer = this.read_with(&cx, |this, cx| {
5150 this.opened_buffers
5151 .get(&id)
5152 .and_then(|buffer| buffer.upgrade(cx))
5153 });
5154 if let Some(buffer) = buffer {
5155 break buffer;
5156 }
5157 opened_buffer_rx
5158 .next()
5159 .await
5160 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5161 };
5162 Ok(buffer)
5163 }
5164 proto::buffer::Variant::State(mut buffer) => {
5165 let mut buffer_worktree = None;
5166 let mut buffer_file = None;
5167 if let Some(file) = buffer.file.take() {
5168 this.read_with(&cx, |this, cx| {
5169 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5170 let worktree =
5171 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5172 anyhow!("no worktree found for id {}", file.worktree_id)
5173 })?;
5174 buffer_file =
5175 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5176 as Arc<dyn language::File>);
5177 buffer_worktree = Some(worktree);
5178 Ok::<_, anyhow::Error>(())
5179 })?;
5180 }
5181
5182 let buffer = cx.add_model(|cx| {
5183 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5184 });
5185
5186 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5187
5188 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5189 Ok(buffer)
5190 }
5191 }
5192 })
5193 }
5194
5195 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5196 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5197 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5198 let start = serialized_symbol
5199 .start
5200 .ok_or_else(|| anyhow!("invalid start"))?;
5201 let end = serialized_symbol
5202 .end
5203 .ok_or_else(|| anyhow!("invalid end"))?;
5204 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5205 let path = PathBuf::from(serialized_symbol.path);
5206 let language = self.languages.select_language(&path);
5207 Ok(Symbol {
5208 source_worktree_id,
5209 worktree_id,
5210 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5211 label: language
5212 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5213 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5214 name: serialized_symbol.name,
5215 path,
5216 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5217 kind,
5218 signature: serialized_symbol
5219 .signature
5220 .try_into()
5221 .map_err(|_| anyhow!("invalid signature"))?,
5222 })
5223 }
5224
5225 async fn handle_buffer_saved(
5226 this: ModelHandle<Self>,
5227 envelope: TypedEnvelope<proto::BufferSaved>,
5228 _: Arc<Client>,
5229 mut cx: AsyncAppContext,
5230 ) -> Result<()> {
5231 let version = deserialize_version(envelope.payload.version);
5232 let mtime = envelope
5233 .payload
5234 .mtime
5235 .ok_or_else(|| anyhow!("missing mtime"))?
5236 .into();
5237
5238 this.update(&mut cx, |this, cx| {
5239 let buffer = this
5240 .opened_buffers
5241 .get(&envelope.payload.buffer_id)
5242 .and_then(|buffer| buffer.upgrade(cx));
5243 if let Some(buffer) = buffer {
5244 buffer.update(cx, |buffer, cx| {
5245 buffer.did_save(version, mtime, None, cx);
5246 });
5247 }
5248 Ok(())
5249 })
5250 }
5251
5252 async fn handle_buffer_reloaded(
5253 this: ModelHandle<Self>,
5254 envelope: TypedEnvelope<proto::BufferReloaded>,
5255 _: Arc<Client>,
5256 mut cx: AsyncAppContext,
5257 ) -> Result<()> {
5258 let payload = envelope.payload.clone();
5259 let version = deserialize_version(payload.version);
5260 let mtime = payload
5261 .mtime
5262 .ok_or_else(|| anyhow!("missing mtime"))?
5263 .into();
5264 this.update(&mut cx, |this, cx| {
5265 let buffer = this
5266 .opened_buffers
5267 .get(&payload.buffer_id)
5268 .and_then(|buffer| buffer.upgrade(cx));
5269 if let Some(buffer) = buffer {
5270 buffer.update(cx, |buffer, cx| {
5271 buffer.did_reload(version, mtime, cx);
5272 });
5273 }
5274 Ok(())
5275 })
5276 }
5277
5278 pub fn match_paths<'a>(
5279 &self,
5280 query: &'a str,
5281 include_ignored: bool,
5282 smart_case: bool,
5283 max_results: usize,
5284 cancel_flag: &'a AtomicBool,
5285 cx: &AppContext,
5286 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5287 let worktrees = self
5288 .worktrees(cx)
5289 .filter(|worktree| worktree.read(cx).is_visible())
5290 .collect::<Vec<_>>();
5291 let include_root_name = worktrees.len() > 1;
5292 let candidate_sets = worktrees
5293 .into_iter()
5294 .map(|worktree| CandidateSet {
5295 snapshot: worktree.read(cx).snapshot(),
5296 include_ignored,
5297 include_root_name,
5298 })
5299 .collect::<Vec<_>>();
5300
5301 let background = cx.background().clone();
5302 async move {
5303 fuzzy::match_paths(
5304 candidate_sets.as_slice(),
5305 query,
5306 smart_case,
5307 max_results,
5308 cancel_flag,
5309 background,
5310 )
5311 .await
5312 }
5313 }
5314
5315 fn edits_from_lsp(
5316 &mut self,
5317 buffer: &ModelHandle<Buffer>,
5318 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5319 version: Option<i32>,
5320 cx: &mut ModelContext<Self>,
5321 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5322 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5323 cx.background().spawn(async move {
5324 let snapshot = snapshot?;
5325 let mut lsp_edits = lsp_edits
5326 .into_iter()
5327 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5328 .collect::<Vec<_>>();
5329 lsp_edits.sort_by_key(|(range, _)| range.start);
5330
5331 let mut lsp_edits = lsp_edits.into_iter().peekable();
5332 let mut edits = Vec::new();
5333 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5334 // Combine any LSP edits that are adjacent.
5335 //
5336 // Also, combine LSP edits that are separated from each other by only
5337 // a newline. This is important because for some code actions,
5338 // Rust-analyzer rewrites the entire buffer via a series of edits that
5339 // are separated by unchanged newline characters.
5340 //
5341 // In order for the diffing logic below to work properly, any edits that
5342 // cancel each other out must be combined into one.
5343 while let Some((next_range, next_text)) = lsp_edits.peek() {
5344 if next_range.start > range.end {
5345 if next_range.start.row > range.end.row + 1
5346 || next_range.start.column > 0
5347 || snapshot.clip_point_utf16(
5348 PointUtf16::new(range.end.row, u32::MAX),
5349 Bias::Left,
5350 ) > range.end
5351 {
5352 break;
5353 }
5354 new_text.push('\n');
5355 }
5356 range.end = next_range.end;
5357 new_text.push_str(&next_text);
5358 lsp_edits.next();
5359 }
5360
5361 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5362 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5363 {
5364 return Err(anyhow!("invalid edits received from language server"));
5365 }
5366
5367 // For multiline edits, perform a diff of the old and new text so that
5368 // we can identify the changes more precisely, preserving the locations
5369 // of any anchors positioned in the unchanged regions.
5370 if range.end.row > range.start.row {
5371 let mut offset = range.start.to_offset(&snapshot);
5372 let old_text = snapshot.text_for_range(range).collect::<String>();
5373
5374 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5375 let mut moved_since_edit = true;
5376 for change in diff.iter_all_changes() {
5377 let tag = change.tag();
5378 let value = change.value();
5379 match tag {
5380 ChangeTag::Equal => {
5381 offset += value.len();
5382 moved_since_edit = true;
5383 }
5384 ChangeTag::Delete => {
5385 let start = snapshot.anchor_after(offset);
5386 let end = snapshot.anchor_before(offset + value.len());
5387 if moved_since_edit {
5388 edits.push((start..end, String::new()));
5389 } else {
5390 edits.last_mut().unwrap().0.end = end;
5391 }
5392 offset += value.len();
5393 moved_since_edit = false;
5394 }
5395 ChangeTag::Insert => {
5396 if moved_since_edit {
5397 let anchor = snapshot.anchor_after(offset);
5398 edits.push((anchor.clone()..anchor, value.to_string()));
5399 } else {
5400 edits.last_mut().unwrap().1.push_str(value);
5401 }
5402 moved_since_edit = false;
5403 }
5404 }
5405 }
5406 } else if range.end == range.start {
5407 let anchor = snapshot.anchor_after(range.start);
5408 edits.push((anchor.clone()..anchor, new_text));
5409 } else {
5410 let edit_start = snapshot.anchor_after(range.start);
5411 let edit_end = snapshot.anchor_before(range.end);
5412 edits.push((edit_start..edit_end, new_text));
5413 }
5414 }
5415
5416 Ok(edits)
5417 })
5418 }
5419
5420 fn buffer_snapshot_for_lsp_version(
5421 &mut self,
5422 buffer: &ModelHandle<Buffer>,
5423 version: Option<i32>,
5424 cx: &AppContext,
5425 ) -> Result<TextBufferSnapshot> {
5426 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5427
5428 if let Some(version) = version {
5429 let buffer_id = buffer.read(cx).remote_id();
5430 let snapshots = self
5431 .buffer_snapshots
5432 .get_mut(&buffer_id)
5433 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5434 let mut found_snapshot = None;
5435 snapshots.retain(|(snapshot_version, snapshot)| {
5436 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5437 false
5438 } else {
5439 if *snapshot_version == version {
5440 found_snapshot = Some(snapshot.clone());
5441 }
5442 true
5443 }
5444 });
5445
5446 found_snapshot.ok_or_else(|| {
5447 anyhow!(
5448 "snapshot not found for buffer {} at version {}",
5449 buffer_id,
5450 version
5451 )
5452 })
5453 } else {
5454 Ok((buffer.read(cx)).text_snapshot())
5455 }
5456 }
5457
5458 fn language_server_for_buffer(
5459 &self,
5460 buffer: &Buffer,
5461 cx: &AppContext,
5462 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5463 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5464 let worktree_id = file.worktree_id(cx);
5465 self.language_servers
5466 .get(&(worktree_id, language.lsp_adapter()?.name()))
5467 } else {
5468 None
5469 }
5470 }
5471}
5472
5473impl ProjectStore {
5474 pub fn new(db: Arc<Db>) -> Self {
5475 Self {
5476 db,
5477 projects: Default::default(),
5478 }
5479 }
5480
5481 pub fn projects<'a>(
5482 &'a self,
5483 cx: &'a AppContext,
5484 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5485 self.projects
5486 .iter()
5487 .filter_map(|project| project.upgrade(cx))
5488 }
5489
5490 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5491 if let Err(ix) = self
5492 .projects
5493 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5494 {
5495 self.projects.insert(ix, project);
5496 }
5497 cx.notify();
5498 }
5499
5500 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5501 let mut did_change = false;
5502 self.projects.retain(|project| {
5503 if project.is_upgradable(cx) {
5504 true
5505 } else {
5506 did_change = true;
5507 false
5508 }
5509 });
5510 if did_change {
5511 cx.notify();
5512 }
5513 }
5514}
5515
5516impl WorktreeHandle {
5517 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5518 match self {
5519 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5520 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5521 }
5522 }
5523}
5524
5525impl OpenBuffer {
5526 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5527 match self {
5528 OpenBuffer::Strong(handle) => Some(handle.clone()),
5529 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5530 OpenBuffer::Loading(_) => None,
5531 }
5532 }
5533}
5534
5535struct CandidateSet {
5536 snapshot: Snapshot,
5537 include_ignored: bool,
5538 include_root_name: bool,
5539}
5540
5541impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5542 type Candidates = CandidateSetIter<'a>;
5543
5544 fn id(&self) -> usize {
5545 self.snapshot.id().to_usize()
5546 }
5547
5548 fn len(&self) -> usize {
5549 if self.include_ignored {
5550 self.snapshot.file_count()
5551 } else {
5552 self.snapshot.visible_file_count()
5553 }
5554 }
5555
5556 fn prefix(&self) -> Arc<str> {
5557 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5558 self.snapshot.root_name().into()
5559 } else if self.include_root_name {
5560 format!("{}/", self.snapshot.root_name()).into()
5561 } else {
5562 "".into()
5563 }
5564 }
5565
5566 fn candidates(&'a self, start: usize) -> Self::Candidates {
5567 CandidateSetIter {
5568 traversal: self.snapshot.files(self.include_ignored, start),
5569 }
5570 }
5571}
5572
5573struct CandidateSetIter<'a> {
5574 traversal: Traversal<'a>,
5575}
5576
5577impl<'a> Iterator for CandidateSetIter<'a> {
5578 type Item = PathMatchCandidate<'a>;
5579
5580 fn next(&mut self) -> Option<Self::Item> {
5581 self.traversal.next().map(|entry| {
5582 if let EntryKind::File(char_bag) = entry.kind {
5583 PathMatchCandidate {
5584 path: &entry.path,
5585 char_bag,
5586 }
5587 } else {
5588 unreachable!()
5589 }
5590 })
5591 }
5592}
5593
5594impl Entity for ProjectStore {
5595 type Event = ();
5596}
5597
5598impl Entity for Project {
5599 type Event = Event;
5600
5601 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5602 self.project_store.update(cx, ProjectStore::prune_projects);
5603
5604 match &self.client_state {
5605 ProjectClientState::Local { remote_id_rx, .. } => {
5606 if let Some(project_id) = *remote_id_rx.borrow() {
5607 self.client
5608 .send(proto::UnregisterProject { project_id })
5609 .log_err();
5610 }
5611 }
5612 ProjectClientState::Remote { remote_id, .. } => {
5613 self.client
5614 .send(proto::LeaveProject {
5615 project_id: *remote_id,
5616 })
5617 .log_err();
5618 }
5619 }
5620 }
5621
5622 fn app_will_quit(
5623 &mut self,
5624 _: &mut MutableAppContext,
5625 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5626 let shutdown_futures = self
5627 .language_servers
5628 .drain()
5629 .filter_map(|(_, (_, server))| server.shutdown())
5630 .collect::<Vec<_>>();
5631 Some(
5632 async move {
5633 futures::future::join_all(shutdown_futures).await;
5634 }
5635 .boxed(),
5636 )
5637 }
5638}
5639
5640impl Collaborator {
5641 fn from_proto(
5642 message: proto::Collaborator,
5643 user_store: &ModelHandle<UserStore>,
5644 cx: &mut AsyncAppContext,
5645 ) -> impl Future<Output = Result<Self>> {
5646 let user = user_store.update(cx, |user_store, cx| {
5647 user_store.fetch_user(message.user_id, cx)
5648 });
5649
5650 async move {
5651 Ok(Self {
5652 peer_id: PeerId(message.peer_id),
5653 user: user.await?,
5654 replica_id: message.replica_id as ReplicaId,
5655 })
5656 }
5657 }
5658}
5659
5660impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5661 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5662 Self {
5663 worktree_id,
5664 path: path.as_ref().into(),
5665 }
5666 }
5667}
5668
5669impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5670 fn from(options: lsp::CreateFileOptions) -> Self {
5671 Self {
5672 overwrite: options.overwrite.unwrap_or(false),
5673 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5674 }
5675 }
5676}
5677
5678impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5679 fn from(options: lsp::RenameFileOptions) -> Self {
5680 Self {
5681 overwrite: options.overwrite.unwrap_or(false),
5682 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5683 }
5684 }
5685}
5686
5687impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5688 fn from(options: lsp::DeleteFileOptions) -> Self {
5689 Self {
5690 recursive: options.recursive.unwrap_or(false),
5691 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5692 }
5693 }
5694}
5695
5696fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5697 proto::Symbol {
5698 source_worktree_id: symbol.source_worktree_id.to_proto(),
5699 worktree_id: symbol.worktree_id.to_proto(),
5700 language_server_name: symbol.language_server_name.0.to_string(),
5701 name: symbol.name.clone(),
5702 kind: unsafe { mem::transmute(symbol.kind) },
5703 path: symbol.path.to_string_lossy().to_string(),
5704 start: Some(proto::Point {
5705 row: symbol.range.start.row,
5706 column: symbol.range.start.column,
5707 }),
5708 end: Some(proto::Point {
5709 row: symbol.range.end.row,
5710 column: symbol.range.end.column,
5711 }),
5712 signature: symbol.signature.to_vec(),
5713 }
5714}
5715
5716fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5717 let mut path_components = path.components();
5718 let mut base_components = base.components();
5719 let mut components: Vec<Component> = Vec::new();
5720 loop {
5721 match (path_components.next(), base_components.next()) {
5722 (None, None) => break,
5723 (Some(a), None) => {
5724 components.push(a);
5725 components.extend(path_components.by_ref());
5726 break;
5727 }
5728 (None, _) => components.push(Component::ParentDir),
5729 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5730 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5731 (Some(a), Some(_)) => {
5732 components.push(Component::ParentDir);
5733 for _ in base_components {
5734 components.push(Component::ParentDir);
5735 }
5736 components.push(a);
5737 components.extend(path_components.by_ref());
5738 break;
5739 }
5740 }
5741 }
5742 components.iter().map(|c| c.as_os_str()).collect()
5743}
5744
5745impl Item for Buffer {
5746 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5747 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5748 }
5749}
5750
5751#[cfg(test)]
5752mod tests {
5753 use crate::worktree::WorktreeHandle;
5754
5755 use super::{Event, *};
5756 use fs::RealFs;
5757 use futures::{future, StreamExt};
5758 use gpui::{executor::Deterministic, test::subscribe};
5759 use language::{
5760 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5761 OffsetRangeExt, Point, ToPoint,
5762 };
5763 use lsp::Url;
5764 use serde_json::json;
5765 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5766 use unindent::Unindent as _;
5767 use util::{assert_set_eq, test::temp_tree};
5768
5769 #[gpui::test]
5770 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5771 let dir = temp_tree(json!({
5772 "root": {
5773 "apple": "",
5774 "banana": {
5775 "carrot": {
5776 "date": "",
5777 "endive": "",
5778 }
5779 },
5780 "fennel": {
5781 "grape": "",
5782 }
5783 }
5784 }));
5785
5786 let root_link_path = dir.path().join("root_link");
5787 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5788 unix::fs::symlink(
5789 &dir.path().join("root/fennel"),
5790 &dir.path().join("root/finnochio"),
5791 )
5792 .unwrap();
5793
5794 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5795
5796 project.read_with(cx, |project, cx| {
5797 let tree = project.worktrees(cx).next().unwrap().read(cx);
5798 assert_eq!(tree.file_count(), 5);
5799 assert_eq!(
5800 tree.inode_for_path("fennel/grape"),
5801 tree.inode_for_path("finnochio/grape")
5802 );
5803 });
5804
5805 let cancel_flag = Default::default();
5806 let results = project
5807 .read_with(cx, |project, cx| {
5808 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5809 })
5810 .await;
5811 assert_eq!(
5812 results
5813 .into_iter()
5814 .map(|result| result.path)
5815 .collect::<Vec<Arc<Path>>>(),
5816 vec![
5817 PathBuf::from("banana/carrot/date").into(),
5818 PathBuf::from("banana/carrot/endive").into(),
5819 ]
5820 );
5821 }
5822
5823 #[gpui::test]
5824 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5825 cx.foreground().forbid_parking();
5826
5827 let mut rust_language = Language::new(
5828 LanguageConfig {
5829 name: "Rust".into(),
5830 path_suffixes: vec!["rs".to_string()],
5831 ..Default::default()
5832 },
5833 Some(tree_sitter_rust::language()),
5834 );
5835 let mut json_language = Language::new(
5836 LanguageConfig {
5837 name: "JSON".into(),
5838 path_suffixes: vec!["json".to_string()],
5839 ..Default::default()
5840 },
5841 None,
5842 );
5843 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5844 name: "the-rust-language-server",
5845 capabilities: lsp::ServerCapabilities {
5846 completion_provider: Some(lsp::CompletionOptions {
5847 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5848 ..Default::default()
5849 }),
5850 ..Default::default()
5851 },
5852 ..Default::default()
5853 });
5854 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5855 name: "the-json-language-server",
5856 capabilities: lsp::ServerCapabilities {
5857 completion_provider: Some(lsp::CompletionOptions {
5858 trigger_characters: Some(vec![":".to_string()]),
5859 ..Default::default()
5860 }),
5861 ..Default::default()
5862 },
5863 ..Default::default()
5864 });
5865
5866 let fs = FakeFs::new(cx.background());
5867 fs.insert_tree(
5868 "/the-root",
5869 json!({
5870 "test.rs": "const A: i32 = 1;",
5871 "test2.rs": "",
5872 "Cargo.toml": "a = 1",
5873 "package.json": "{\"a\": 1}",
5874 }),
5875 )
5876 .await;
5877
5878 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5879 project.update(cx, |project, _| {
5880 project.languages.add(Arc::new(rust_language));
5881 project.languages.add(Arc::new(json_language));
5882 });
5883
5884 // Open a buffer without an associated language server.
5885 let toml_buffer = project
5886 .update(cx, |project, cx| {
5887 project.open_local_buffer("/the-root/Cargo.toml", cx)
5888 })
5889 .await
5890 .unwrap();
5891
5892 // Open a buffer with an associated language server.
5893 let rust_buffer = project
5894 .update(cx, |project, cx| {
5895 project.open_local_buffer("/the-root/test.rs", cx)
5896 })
5897 .await
5898 .unwrap();
5899
5900 // A server is started up, and it is notified about Rust files.
5901 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5902 assert_eq!(
5903 fake_rust_server
5904 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5905 .await
5906 .text_document,
5907 lsp::TextDocumentItem {
5908 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5909 version: 0,
5910 text: "const A: i32 = 1;".to_string(),
5911 language_id: Default::default()
5912 }
5913 );
5914
5915 // The buffer is configured based on the language server's capabilities.
5916 rust_buffer.read_with(cx, |buffer, _| {
5917 assert_eq!(
5918 buffer.completion_triggers(),
5919 &[".".to_string(), "::".to_string()]
5920 );
5921 });
5922 toml_buffer.read_with(cx, |buffer, _| {
5923 assert!(buffer.completion_triggers().is_empty());
5924 });
5925
5926 // Edit a buffer. The changes are reported to the language server.
5927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5928 assert_eq!(
5929 fake_rust_server
5930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5931 .await
5932 .text_document,
5933 lsp::VersionedTextDocumentIdentifier::new(
5934 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5935 1
5936 )
5937 );
5938
5939 // Open a third buffer with a different associated language server.
5940 let json_buffer = project
5941 .update(cx, |project, cx| {
5942 project.open_local_buffer("/the-root/package.json", cx)
5943 })
5944 .await
5945 .unwrap();
5946
5947 // A json language server is started up and is only notified about the json buffer.
5948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5949 assert_eq!(
5950 fake_json_server
5951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5952 .await
5953 .text_document,
5954 lsp::TextDocumentItem {
5955 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5956 version: 0,
5957 text: "{\"a\": 1}".to_string(),
5958 language_id: Default::default()
5959 }
5960 );
5961
5962 // This buffer is configured based on the second language server's
5963 // capabilities.
5964 json_buffer.read_with(cx, |buffer, _| {
5965 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5966 });
5967
5968 // When opening another buffer whose language server is already running,
5969 // it is also configured based on the existing language server's capabilities.
5970 let rust_buffer2 = project
5971 .update(cx, |project, cx| {
5972 project.open_local_buffer("/the-root/test2.rs", cx)
5973 })
5974 .await
5975 .unwrap();
5976 rust_buffer2.read_with(cx, |buffer, _| {
5977 assert_eq!(
5978 buffer.completion_triggers(),
5979 &[".".to_string(), "::".to_string()]
5980 );
5981 });
5982
5983 // Changes are reported only to servers matching the buffer's language.
5984 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5985 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5986 assert_eq!(
5987 fake_rust_server
5988 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5989 .await
5990 .text_document,
5991 lsp::VersionedTextDocumentIdentifier::new(
5992 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5993 1
5994 )
5995 );
5996
5997 // Save notifications are reported to all servers.
5998 toml_buffer
5999 .update(cx, |buffer, cx| buffer.save(cx))
6000 .await
6001 .unwrap();
6002 assert_eq!(
6003 fake_rust_server
6004 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6005 .await
6006 .text_document,
6007 lsp::TextDocumentIdentifier::new(
6008 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6009 )
6010 );
6011 assert_eq!(
6012 fake_json_server
6013 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6014 .await
6015 .text_document,
6016 lsp::TextDocumentIdentifier::new(
6017 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6018 )
6019 );
6020
6021 // Renames are reported only to servers matching the buffer's language.
6022 fs.rename(
6023 Path::new("/the-root/test2.rs"),
6024 Path::new("/the-root/test3.rs"),
6025 Default::default(),
6026 )
6027 .await
6028 .unwrap();
6029 assert_eq!(
6030 fake_rust_server
6031 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6032 .await
6033 .text_document,
6034 lsp::TextDocumentIdentifier::new(
6035 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6036 ),
6037 );
6038 assert_eq!(
6039 fake_rust_server
6040 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6041 .await
6042 .text_document,
6043 lsp::TextDocumentItem {
6044 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6045 version: 0,
6046 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6047 language_id: Default::default()
6048 },
6049 );
6050
6051 rust_buffer2.update(cx, |buffer, cx| {
6052 buffer.update_diagnostics(
6053 DiagnosticSet::from_sorted_entries(
6054 vec![DiagnosticEntry {
6055 diagnostic: Default::default(),
6056 range: Anchor::MIN..Anchor::MAX,
6057 }],
6058 &buffer.snapshot(),
6059 ),
6060 cx,
6061 );
6062 assert_eq!(
6063 buffer
6064 .snapshot()
6065 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6066 .count(),
6067 1
6068 );
6069 });
6070
6071 // When the rename changes the extension of the file, the buffer gets closed on the old
6072 // language server and gets opened on the new one.
6073 fs.rename(
6074 Path::new("/the-root/test3.rs"),
6075 Path::new("/the-root/test3.json"),
6076 Default::default(),
6077 )
6078 .await
6079 .unwrap();
6080 assert_eq!(
6081 fake_rust_server
6082 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6083 .await
6084 .text_document,
6085 lsp::TextDocumentIdentifier::new(
6086 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6087 ),
6088 );
6089 assert_eq!(
6090 fake_json_server
6091 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6092 .await
6093 .text_document,
6094 lsp::TextDocumentItem {
6095 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6096 version: 0,
6097 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6098 language_id: Default::default()
6099 },
6100 );
6101
6102 // We clear the diagnostics, since the language has changed.
6103 rust_buffer2.read_with(cx, |buffer, _| {
6104 assert_eq!(
6105 buffer
6106 .snapshot()
6107 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6108 .count(),
6109 0
6110 );
6111 });
6112
6113 // The renamed file's version resets after changing language server.
6114 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6115 assert_eq!(
6116 fake_json_server
6117 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6118 .await
6119 .text_document,
6120 lsp::VersionedTextDocumentIdentifier::new(
6121 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6122 1
6123 )
6124 );
6125
6126 // Restart language servers
6127 project.update(cx, |project, cx| {
6128 project.restart_language_servers_for_buffers(
6129 vec![rust_buffer.clone(), json_buffer.clone()],
6130 cx,
6131 );
6132 });
6133
6134 let mut rust_shutdown_requests = fake_rust_server
6135 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6136 let mut json_shutdown_requests = fake_json_server
6137 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6138 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6139
6140 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6141 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6142
6143 // Ensure rust document is reopened in new rust language server
6144 assert_eq!(
6145 fake_rust_server
6146 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6147 .await
6148 .text_document,
6149 lsp::TextDocumentItem {
6150 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6151 version: 1,
6152 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6153 language_id: Default::default()
6154 }
6155 );
6156
6157 // Ensure json documents are reopened in new json language server
6158 assert_set_eq!(
6159 [
6160 fake_json_server
6161 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6162 .await
6163 .text_document,
6164 fake_json_server
6165 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6166 .await
6167 .text_document,
6168 ],
6169 [
6170 lsp::TextDocumentItem {
6171 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6172 version: 0,
6173 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6174 language_id: Default::default()
6175 },
6176 lsp::TextDocumentItem {
6177 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6178 version: 1,
6179 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6180 language_id: Default::default()
6181 }
6182 ]
6183 );
6184
6185 // Close notifications are reported only to servers matching the buffer's language.
6186 cx.update(|_| drop(json_buffer));
6187 let close_message = lsp::DidCloseTextDocumentParams {
6188 text_document: lsp::TextDocumentIdentifier::new(
6189 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6190 ),
6191 };
6192 assert_eq!(
6193 fake_json_server
6194 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6195 .await,
6196 close_message,
6197 );
6198 }
6199
6200 #[gpui::test]
6201 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6202 cx.foreground().forbid_parking();
6203
6204 let fs = FakeFs::new(cx.background());
6205 fs.insert_tree(
6206 "/dir",
6207 json!({
6208 "a.rs": "let a = 1;",
6209 "b.rs": "let b = 2;"
6210 }),
6211 )
6212 .await;
6213
6214 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6215
6216 let buffer_a = project
6217 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6218 .await
6219 .unwrap();
6220 let buffer_b = project
6221 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6222 .await
6223 .unwrap();
6224
6225 project.update(cx, |project, cx| {
6226 project
6227 .update_diagnostics(
6228 0,
6229 lsp::PublishDiagnosticsParams {
6230 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6231 version: None,
6232 diagnostics: vec![lsp::Diagnostic {
6233 range: lsp::Range::new(
6234 lsp::Position::new(0, 4),
6235 lsp::Position::new(0, 5),
6236 ),
6237 severity: Some(lsp::DiagnosticSeverity::ERROR),
6238 message: "error 1".to_string(),
6239 ..Default::default()
6240 }],
6241 },
6242 &[],
6243 cx,
6244 )
6245 .unwrap();
6246 project
6247 .update_diagnostics(
6248 0,
6249 lsp::PublishDiagnosticsParams {
6250 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6251 version: None,
6252 diagnostics: vec![lsp::Diagnostic {
6253 range: lsp::Range::new(
6254 lsp::Position::new(0, 4),
6255 lsp::Position::new(0, 5),
6256 ),
6257 severity: Some(lsp::DiagnosticSeverity::WARNING),
6258 message: "error 2".to_string(),
6259 ..Default::default()
6260 }],
6261 },
6262 &[],
6263 cx,
6264 )
6265 .unwrap();
6266 });
6267
6268 buffer_a.read_with(cx, |buffer, _| {
6269 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6270 assert_eq!(
6271 chunks
6272 .iter()
6273 .map(|(s, d)| (s.as_str(), *d))
6274 .collect::<Vec<_>>(),
6275 &[
6276 ("let ", None),
6277 ("a", Some(DiagnosticSeverity::ERROR)),
6278 (" = 1;", None),
6279 ]
6280 );
6281 });
6282 buffer_b.read_with(cx, |buffer, _| {
6283 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6284 assert_eq!(
6285 chunks
6286 .iter()
6287 .map(|(s, d)| (s.as_str(), *d))
6288 .collect::<Vec<_>>(),
6289 &[
6290 ("let ", None),
6291 ("b", Some(DiagnosticSeverity::WARNING)),
6292 (" = 2;", None),
6293 ]
6294 );
6295 });
6296 }
6297
6298 #[gpui::test]
6299 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6300 cx.foreground().forbid_parking();
6301
6302 let progress_token = "the-progress-token";
6303 let mut language = Language::new(
6304 LanguageConfig {
6305 name: "Rust".into(),
6306 path_suffixes: vec!["rs".to_string()],
6307 ..Default::default()
6308 },
6309 Some(tree_sitter_rust::language()),
6310 );
6311 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6312 disk_based_diagnostics_progress_token: Some(progress_token),
6313 disk_based_diagnostics_sources: &["disk"],
6314 ..Default::default()
6315 });
6316
6317 let fs = FakeFs::new(cx.background());
6318 fs.insert_tree(
6319 "/dir",
6320 json!({
6321 "a.rs": "fn a() { A }",
6322 "b.rs": "const y: i32 = 1",
6323 }),
6324 )
6325 .await;
6326
6327 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6328 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6329 let worktree_id =
6330 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6331
6332 // Cause worktree to start the fake language server
6333 let _buffer = project
6334 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6335 .await
6336 .unwrap();
6337
6338 let mut events = subscribe(&project, cx);
6339
6340 let mut fake_server = fake_servers.next().await.unwrap();
6341 fake_server.start_progress(progress_token).await;
6342 assert_eq!(
6343 events.next().await.unwrap(),
6344 Event::DiskBasedDiagnosticsStarted {
6345 language_server_id: 0,
6346 }
6347 );
6348
6349 fake_server.start_progress(progress_token).await;
6350 fake_server.end_progress(progress_token).await;
6351 fake_server.start_progress(progress_token).await;
6352
6353 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6354 lsp::PublishDiagnosticsParams {
6355 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6356 version: None,
6357 diagnostics: vec![lsp::Diagnostic {
6358 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6359 severity: Some(lsp::DiagnosticSeverity::ERROR),
6360 message: "undefined variable 'A'".to_string(),
6361 ..Default::default()
6362 }],
6363 },
6364 );
6365 assert_eq!(
6366 events.next().await.unwrap(),
6367 Event::DiagnosticsUpdated {
6368 language_server_id: 0,
6369 path: (worktree_id, Path::new("a.rs")).into()
6370 }
6371 );
6372
6373 fake_server.end_progress(progress_token).await;
6374 fake_server.end_progress(progress_token).await;
6375 assert_eq!(
6376 events.next().await.unwrap(),
6377 Event::DiskBasedDiagnosticsFinished {
6378 language_server_id: 0
6379 }
6380 );
6381
6382 let buffer = project
6383 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6384 .await
6385 .unwrap();
6386
6387 buffer.read_with(cx, |buffer, _| {
6388 let snapshot = buffer.snapshot();
6389 let diagnostics = snapshot
6390 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6391 .collect::<Vec<_>>();
6392 assert_eq!(
6393 diagnostics,
6394 &[DiagnosticEntry {
6395 range: Point::new(0, 9)..Point::new(0, 10),
6396 diagnostic: Diagnostic {
6397 severity: lsp::DiagnosticSeverity::ERROR,
6398 message: "undefined variable 'A'".to_string(),
6399 group_id: 0,
6400 is_primary: true,
6401 ..Default::default()
6402 }
6403 }]
6404 )
6405 });
6406
6407 // Ensure publishing empty diagnostics twice only results in one update event.
6408 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6409 lsp::PublishDiagnosticsParams {
6410 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6411 version: None,
6412 diagnostics: Default::default(),
6413 },
6414 );
6415 assert_eq!(
6416 events.next().await.unwrap(),
6417 Event::DiagnosticsUpdated {
6418 language_server_id: 0,
6419 path: (worktree_id, Path::new("a.rs")).into()
6420 }
6421 );
6422
6423 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6424 lsp::PublishDiagnosticsParams {
6425 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6426 version: None,
6427 diagnostics: Default::default(),
6428 },
6429 );
6430 cx.foreground().run_until_parked();
6431 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6432 }
6433
6434 #[gpui::test]
6435 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6436 cx.foreground().forbid_parking();
6437
6438 let progress_token = "the-progress-token";
6439 let mut language = Language::new(
6440 LanguageConfig {
6441 path_suffixes: vec!["rs".to_string()],
6442 ..Default::default()
6443 },
6444 None,
6445 );
6446 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6447 disk_based_diagnostics_sources: &["disk"],
6448 disk_based_diagnostics_progress_token: Some(progress_token),
6449 ..Default::default()
6450 });
6451
6452 let fs = FakeFs::new(cx.background());
6453 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6454
6455 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6456 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6457
6458 let buffer = project
6459 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6460 .await
6461 .unwrap();
6462
6463 // Simulate diagnostics starting to update.
6464 let mut fake_server = fake_servers.next().await.unwrap();
6465 fake_server.start_progress(progress_token).await;
6466
6467 // Restart the server before the diagnostics finish updating.
6468 project.update(cx, |project, cx| {
6469 project.restart_language_servers_for_buffers([buffer], cx);
6470 });
6471 let mut events = subscribe(&project, cx);
6472
6473 // Simulate the newly started server sending more diagnostics.
6474 let mut fake_server = fake_servers.next().await.unwrap();
6475 fake_server.start_progress(progress_token).await;
6476 assert_eq!(
6477 events.next().await.unwrap(),
6478 Event::DiskBasedDiagnosticsStarted {
6479 language_server_id: 1
6480 }
6481 );
6482 project.read_with(cx, |project, _| {
6483 assert_eq!(
6484 project
6485 .language_servers_running_disk_based_diagnostics()
6486 .collect::<Vec<_>>(),
6487 [1]
6488 );
6489 });
6490
6491 // All diagnostics are considered done, despite the old server's diagnostic
6492 // task never completing.
6493 fake_server.end_progress(progress_token).await;
6494 assert_eq!(
6495 events.next().await.unwrap(),
6496 Event::DiskBasedDiagnosticsFinished {
6497 language_server_id: 1
6498 }
6499 );
6500 project.read_with(cx, |project, _| {
6501 assert_eq!(
6502 project
6503 .language_servers_running_disk_based_diagnostics()
6504 .collect::<Vec<_>>(),
6505 [0; 0]
6506 );
6507 });
6508 }
6509
6510 #[gpui::test]
6511 async fn test_toggling_enable_language_server(
6512 deterministic: Arc<Deterministic>,
6513 cx: &mut gpui::TestAppContext,
6514 ) {
6515 deterministic.forbid_parking();
6516
6517 let mut rust = Language::new(
6518 LanguageConfig {
6519 name: Arc::from("Rust"),
6520 path_suffixes: vec!["rs".to_string()],
6521 ..Default::default()
6522 },
6523 None,
6524 );
6525 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6526 name: "rust-lsp",
6527 ..Default::default()
6528 });
6529 let mut js = Language::new(
6530 LanguageConfig {
6531 name: Arc::from("JavaScript"),
6532 path_suffixes: vec!["js".to_string()],
6533 ..Default::default()
6534 },
6535 None,
6536 );
6537 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6538 name: "js-lsp",
6539 ..Default::default()
6540 });
6541
6542 let fs = FakeFs::new(cx.background());
6543 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6544 .await;
6545
6546 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6547 project.update(cx, |project, _| {
6548 project.languages.add(Arc::new(rust));
6549 project.languages.add(Arc::new(js));
6550 });
6551
6552 let _rs_buffer = project
6553 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6554 .await
6555 .unwrap();
6556 let _js_buffer = project
6557 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6558 .await
6559 .unwrap();
6560
6561 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6562 assert_eq!(
6563 fake_rust_server_1
6564 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6565 .await
6566 .text_document
6567 .uri
6568 .as_str(),
6569 "file:///dir/a.rs"
6570 );
6571
6572 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6573 assert_eq!(
6574 fake_js_server
6575 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6576 .await
6577 .text_document
6578 .uri
6579 .as_str(),
6580 "file:///dir/b.js"
6581 );
6582
6583 // Disable Rust language server, ensuring only that server gets stopped.
6584 cx.update(|cx| {
6585 cx.update_global(|settings: &mut Settings, _| {
6586 settings.language_overrides.insert(
6587 Arc::from("Rust"),
6588 settings::LanguageOverride {
6589 enable_language_server: Some(false),
6590 ..Default::default()
6591 },
6592 );
6593 })
6594 });
6595 fake_rust_server_1
6596 .receive_notification::<lsp::notification::Exit>()
6597 .await;
6598
6599 // Enable Rust and disable JavaScript language servers, ensuring that the
6600 // former gets started again and that the latter stops.
6601 cx.update(|cx| {
6602 cx.update_global(|settings: &mut Settings, _| {
6603 settings.language_overrides.insert(
6604 Arc::from("Rust"),
6605 settings::LanguageOverride {
6606 enable_language_server: Some(true),
6607 ..Default::default()
6608 },
6609 );
6610 settings.language_overrides.insert(
6611 Arc::from("JavaScript"),
6612 settings::LanguageOverride {
6613 enable_language_server: Some(false),
6614 ..Default::default()
6615 },
6616 );
6617 })
6618 });
6619 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6620 assert_eq!(
6621 fake_rust_server_2
6622 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6623 .await
6624 .text_document
6625 .uri
6626 .as_str(),
6627 "file:///dir/a.rs"
6628 );
6629 fake_js_server
6630 .receive_notification::<lsp::notification::Exit>()
6631 .await;
6632 }
6633
6634 #[gpui::test]
6635 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6636 cx.foreground().forbid_parking();
6637
6638 let mut language = Language::new(
6639 LanguageConfig {
6640 name: "Rust".into(),
6641 path_suffixes: vec!["rs".to_string()],
6642 ..Default::default()
6643 },
6644 Some(tree_sitter_rust::language()),
6645 );
6646 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6647 disk_based_diagnostics_sources: &["disk"],
6648 ..Default::default()
6649 });
6650
6651 let text = "
6652 fn a() { A }
6653 fn b() { BB }
6654 fn c() { CCC }
6655 "
6656 .unindent();
6657
6658 let fs = FakeFs::new(cx.background());
6659 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6660
6661 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6662 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6663
6664 let buffer = project
6665 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6666 .await
6667 .unwrap();
6668
6669 let mut fake_server = fake_servers.next().await.unwrap();
6670 let open_notification = fake_server
6671 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6672 .await;
6673
6674 // Edit the buffer, moving the content down
6675 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6676 let change_notification_1 = fake_server
6677 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6678 .await;
6679 assert!(
6680 change_notification_1.text_document.version > open_notification.text_document.version
6681 );
6682
6683 // Report some diagnostics for the initial version of the buffer
6684 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6685 lsp::PublishDiagnosticsParams {
6686 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6687 version: Some(open_notification.text_document.version),
6688 diagnostics: vec![
6689 lsp::Diagnostic {
6690 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6691 severity: Some(DiagnosticSeverity::ERROR),
6692 message: "undefined variable 'A'".to_string(),
6693 source: Some("disk".to_string()),
6694 ..Default::default()
6695 },
6696 lsp::Diagnostic {
6697 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6698 severity: Some(DiagnosticSeverity::ERROR),
6699 message: "undefined variable 'BB'".to_string(),
6700 source: Some("disk".to_string()),
6701 ..Default::default()
6702 },
6703 lsp::Diagnostic {
6704 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6705 severity: Some(DiagnosticSeverity::ERROR),
6706 source: Some("disk".to_string()),
6707 message: "undefined variable 'CCC'".to_string(),
6708 ..Default::default()
6709 },
6710 ],
6711 },
6712 );
6713
6714 // The diagnostics have moved down since they were created.
6715 buffer.next_notification(cx).await;
6716 buffer.read_with(cx, |buffer, _| {
6717 assert_eq!(
6718 buffer
6719 .snapshot()
6720 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6721 .collect::<Vec<_>>(),
6722 &[
6723 DiagnosticEntry {
6724 range: Point::new(3, 9)..Point::new(3, 11),
6725 diagnostic: Diagnostic {
6726 severity: DiagnosticSeverity::ERROR,
6727 message: "undefined variable 'BB'".to_string(),
6728 is_disk_based: true,
6729 group_id: 1,
6730 is_primary: true,
6731 ..Default::default()
6732 },
6733 },
6734 DiagnosticEntry {
6735 range: Point::new(4, 9)..Point::new(4, 12),
6736 diagnostic: Diagnostic {
6737 severity: DiagnosticSeverity::ERROR,
6738 message: "undefined variable 'CCC'".to_string(),
6739 is_disk_based: true,
6740 group_id: 2,
6741 is_primary: true,
6742 ..Default::default()
6743 }
6744 }
6745 ]
6746 );
6747 assert_eq!(
6748 chunks_with_diagnostics(buffer, 0..buffer.len()),
6749 [
6750 ("\n\nfn a() { ".to_string(), None),
6751 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6752 (" }\nfn b() { ".to_string(), None),
6753 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6754 (" }\nfn c() { ".to_string(), None),
6755 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6756 (" }\n".to_string(), None),
6757 ]
6758 );
6759 assert_eq!(
6760 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6761 [
6762 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6763 (" }\nfn c() { ".to_string(), None),
6764 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6765 ]
6766 );
6767 });
6768
6769 // Ensure overlapping diagnostics are highlighted correctly.
6770 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6771 lsp::PublishDiagnosticsParams {
6772 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6773 version: Some(open_notification.text_document.version),
6774 diagnostics: vec![
6775 lsp::Diagnostic {
6776 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6777 severity: Some(DiagnosticSeverity::ERROR),
6778 message: "undefined variable 'A'".to_string(),
6779 source: Some("disk".to_string()),
6780 ..Default::default()
6781 },
6782 lsp::Diagnostic {
6783 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6784 severity: Some(DiagnosticSeverity::WARNING),
6785 message: "unreachable statement".to_string(),
6786 source: Some("disk".to_string()),
6787 ..Default::default()
6788 },
6789 ],
6790 },
6791 );
6792
6793 buffer.next_notification(cx).await;
6794 buffer.read_with(cx, |buffer, _| {
6795 assert_eq!(
6796 buffer
6797 .snapshot()
6798 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6799 .collect::<Vec<_>>(),
6800 &[
6801 DiagnosticEntry {
6802 range: Point::new(2, 9)..Point::new(2, 12),
6803 diagnostic: Diagnostic {
6804 severity: DiagnosticSeverity::WARNING,
6805 message: "unreachable statement".to_string(),
6806 is_disk_based: true,
6807 group_id: 4,
6808 is_primary: true,
6809 ..Default::default()
6810 }
6811 },
6812 DiagnosticEntry {
6813 range: Point::new(2, 9)..Point::new(2, 10),
6814 diagnostic: Diagnostic {
6815 severity: DiagnosticSeverity::ERROR,
6816 message: "undefined variable 'A'".to_string(),
6817 is_disk_based: true,
6818 group_id: 3,
6819 is_primary: true,
6820 ..Default::default()
6821 },
6822 }
6823 ]
6824 );
6825 assert_eq!(
6826 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6827 [
6828 ("fn a() { ".to_string(), None),
6829 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6830 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6831 ("\n".to_string(), None),
6832 ]
6833 );
6834 assert_eq!(
6835 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6836 [
6837 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6838 ("\n".to_string(), None),
6839 ]
6840 );
6841 });
6842
6843 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6844 // changes since the last save.
6845 buffer.update(cx, |buffer, cx| {
6846 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6847 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6848 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6849 });
6850 let change_notification_2 = fake_server
6851 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6852 .await;
6853 assert!(
6854 change_notification_2.text_document.version
6855 > change_notification_1.text_document.version
6856 );
6857
6858 // Handle out-of-order diagnostics
6859 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6860 lsp::PublishDiagnosticsParams {
6861 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6862 version: Some(change_notification_2.text_document.version),
6863 diagnostics: vec![
6864 lsp::Diagnostic {
6865 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6866 severity: Some(DiagnosticSeverity::ERROR),
6867 message: "undefined variable 'BB'".to_string(),
6868 source: Some("disk".to_string()),
6869 ..Default::default()
6870 },
6871 lsp::Diagnostic {
6872 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6873 severity: Some(DiagnosticSeverity::WARNING),
6874 message: "undefined variable 'A'".to_string(),
6875 source: Some("disk".to_string()),
6876 ..Default::default()
6877 },
6878 ],
6879 },
6880 );
6881
6882 buffer.next_notification(cx).await;
6883 buffer.read_with(cx, |buffer, _| {
6884 assert_eq!(
6885 buffer
6886 .snapshot()
6887 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6888 .collect::<Vec<_>>(),
6889 &[
6890 DiagnosticEntry {
6891 range: Point::new(2, 21)..Point::new(2, 22),
6892 diagnostic: Diagnostic {
6893 severity: DiagnosticSeverity::WARNING,
6894 message: "undefined variable 'A'".to_string(),
6895 is_disk_based: true,
6896 group_id: 6,
6897 is_primary: true,
6898 ..Default::default()
6899 }
6900 },
6901 DiagnosticEntry {
6902 range: Point::new(3, 9)..Point::new(3, 14),
6903 diagnostic: Diagnostic {
6904 severity: DiagnosticSeverity::ERROR,
6905 message: "undefined variable 'BB'".to_string(),
6906 is_disk_based: true,
6907 group_id: 5,
6908 is_primary: true,
6909 ..Default::default()
6910 },
6911 }
6912 ]
6913 );
6914 });
6915 }
6916
6917 #[gpui::test]
6918 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6919 cx.foreground().forbid_parking();
6920
6921 let text = concat!(
6922 "let one = ;\n", //
6923 "let two = \n",
6924 "let three = 3;\n",
6925 );
6926
6927 let fs = FakeFs::new(cx.background());
6928 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6929
6930 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6931 let buffer = project
6932 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6933 .await
6934 .unwrap();
6935
6936 project.update(cx, |project, cx| {
6937 project
6938 .update_buffer_diagnostics(
6939 &buffer,
6940 vec![
6941 DiagnosticEntry {
6942 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6943 diagnostic: Diagnostic {
6944 severity: DiagnosticSeverity::ERROR,
6945 message: "syntax error 1".to_string(),
6946 ..Default::default()
6947 },
6948 },
6949 DiagnosticEntry {
6950 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6951 diagnostic: Diagnostic {
6952 severity: DiagnosticSeverity::ERROR,
6953 message: "syntax error 2".to_string(),
6954 ..Default::default()
6955 },
6956 },
6957 ],
6958 None,
6959 cx,
6960 )
6961 .unwrap();
6962 });
6963
6964 // An empty range is extended forward to include the following character.
6965 // At the end of a line, an empty range is extended backward to include
6966 // the preceding character.
6967 buffer.read_with(cx, |buffer, _| {
6968 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6969 assert_eq!(
6970 chunks
6971 .iter()
6972 .map(|(s, d)| (s.as_str(), *d))
6973 .collect::<Vec<_>>(),
6974 &[
6975 ("let one = ", None),
6976 (";", Some(DiagnosticSeverity::ERROR)),
6977 ("\nlet two =", None),
6978 (" ", Some(DiagnosticSeverity::ERROR)),
6979 ("\nlet three = 3;\n", None)
6980 ]
6981 );
6982 });
6983 }
6984
6985 #[gpui::test]
6986 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6987 cx.foreground().forbid_parking();
6988
6989 let mut language = Language::new(
6990 LanguageConfig {
6991 name: "Rust".into(),
6992 path_suffixes: vec!["rs".to_string()],
6993 ..Default::default()
6994 },
6995 Some(tree_sitter_rust::language()),
6996 );
6997 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6998
6999 let text = "
7000 fn a() {
7001 f1();
7002 }
7003 fn b() {
7004 f2();
7005 }
7006 fn c() {
7007 f3();
7008 }
7009 "
7010 .unindent();
7011
7012 let fs = FakeFs::new(cx.background());
7013 fs.insert_tree(
7014 "/dir",
7015 json!({
7016 "a.rs": text.clone(),
7017 }),
7018 )
7019 .await;
7020
7021 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7022 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7023 let buffer = project
7024 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7025 .await
7026 .unwrap();
7027
7028 let mut fake_server = fake_servers.next().await.unwrap();
7029 let lsp_document_version = fake_server
7030 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7031 .await
7032 .text_document
7033 .version;
7034
7035 // Simulate editing the buffer after the language server computes some edits.
7036 buffer.update(cx, |buffer, cx| {
7037 buffer.edit(
7038 [(
7039 Point::new(0, 0)..Point::new(0, 0),
7040 "// above first function\n",
7041 )],
7042 cx,
7043 );
7044 buffer.edit(
7045 [(
7046 Point::new(2, 0)..Point::new(2, 0),
7047 " // inside first function\n",
7048 )],
7049 cx,
7050 );
7051 buffer.edit(
7052 [(
7053 Point::new(6, 4)..Point::new(6, 4),
7054 "// inside second function ",
7055 )],
7056 cx,
7057 );
7058
7059 assert_eq!(
7060 buffer.text(),
7061 "
7062 // above first function
7063 fn a() {
7064 // inside first function
7065 f1();
7066 }
7067 fn b() {
7068 // inside second function f2();
7069 }
7070 fn c() {
7071 f3();
7072 }
7073 "
7074 .unindent()
7075 );
7076 });
7077
7078 let edits = project
7079 .update(cx, |project, cx| {
7080 project.edits_from_lsp(
7081 &buffer,
7082 vec![
7083 // replace body of first function
7084 lsp::TextEdit {
7085 range: lsp::Range::new(
7086 lsp::Position::new(0, 0),
7087 lsp::Position::new(3, 0),
7088 ),
7089 new_text: "
7090 fn a() {
7091 f10();
7092 }
7093 "
7094 .unindent(),
7095 },
7096 // edit inside second function
7097 lsp::TextEdit {
7098 range: lsp::Range::new(
7099 lsp::Position::new(4, 6),
7100 lsp::Position::new(4, 6),
7101 ),
7102 new_text: "00".into(),
7103 },
7104 // edit inside third function via two distinct edits
7105 lsp::TextEdit {
7106 range: lsp::Range::new(
7107 lsp::Position::new(7, 5),
7108 lsp::Position::new(7, 5),
7109 ),
7110 new_text: "4000".into(),
7111 },
7112 lsp::TextEdit {
7113 range: lsp::Range::new(
7114 lsp::Position::new(7, 5),
7115 lsp::Position::new(7, 6),
7116 ),
7117 new_text: "".into(),
7118 },
7119 ],
7120 Some(lsp_document_version),
7121 cx,
7122 )
7123 })
7124 .await
7125 .unwrap();
7126
7127 buffer.update(cx, |buffer, cx| {
7128 for (range, new_text) in edits {
7129 buffer.edit([(range, new_text)], cx);
7130 }
7131 assert_eq!(
7132 buffer.text(),
7133 "
7134 // above first function
7135 fn a() {
7136 // inside first function
7137 f10();
7138 }
7139 fn b() {
7140 // inside second function f200();
7141 }
7142 fn c() {
7143 f4000();
7144 }
7145 "
7146 .unindent()
7147 );
7148 });
7149 }
7150
7151 #[gpui::test]
7152 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7153 cx.foreground().forbid_parking();
7154
7155 let text = "
7156 use a::b;
7157 use a::c;
7158
7159 fn f() {
7160 b();
7161 c();
7162 }
7163 "
7164 .unindent();
7165
7166 let fs = FakeFs::new(cx.background());
7167 fs.insert_tree(
7168 "/dir",
7169 json!({
7170 "a.rs": text.clone(),
7171 }),
7172 )
7173 .await;
7174
7175 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7176 let buffer = project
7177 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7178 .await
7179 .unwrap();
7180
7181 // Simulate the language server sending us a small edit in the form of a very large diff.
7182 // Rust-analyzer does this when performing a merge-imports code action.
7183 let edits = project
7184 .update(cx, |project, cx| {
7185 project.edits_from_lsp(
7186 &buffer,
7187 [
7188 // Replace the first use statement without editing the semicolon.
7189 lsp::TextEdit {
7190 range: lsp::Range::new(
7191 lsp::Position::new(0, 4),
7192 lsp::Position::new(0, 8),
7193 ),
7194 new_text: "a::{b, c}".into(),
7195 },
7196 // Reinsert the remainder of the file between the semicolon and the final
7197 // newline of the file.
7198 lsp::TextEdit {
7199 range: lsp::Range::new(
7200 lsp::Position::new(0, 9),
7201 lsp::Position::new(0, 9),
7202 ),
7203 new_text: "\n\n".into(),
7204 },
7205 lsp::TextEdit {
7206 range: lsp::Range::new(
7207 lsp::Position::new(0, 9),
7208 lsp::Position::new(0, 9),
7209 ),
7210 new_text: "
7211 fn f() {
7212 b();
7213 c();
7214 }"
7215 .unindent(),
7216 },
7217 // Delete everything after the first newline of the file.
7218 lsp::TextEdit {
7219 range: lsp::Range::new(
7220 lsp::Position::new(1, 0),
7221 lsp::Position::new(7, 0),
7222 ),
7223 new_text: "".into(),
7224 },
7225 ],
7226 None,
7227 cx,
7228 )
7229 })
7230 .await
7231 .unwrap();
7232
7233 buffer.update(cx, |buffer, cx| {
7234 let edits = edits
7235 .into_iter()
7236 .map(|(range, text)| {
7237 (
7238 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7239 text,
7240 )
7241 })
7242 .collect::<Vec<_>>();
7243
7244 assert_eq!(
7245 edits,
7246 [
7247 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7248 (Point::new(1, 0)..Point::new(2, 0), "".into())
7249 ]
7250 );
7251
7252 for (range, new_text) in edits {
7253 buffer.edit([(range, new_text)], cx);
7254 }
7255 assert_eq!(
7256 buffer.text(),
7257 "
7258 use a::{b, c};
7259
7260 fn f() {
7261 b();
7262 c();
7263 }
7264 "
7265 .unindent()
7266 );
7267 });
7268 }
7269
7270 #[gpui::test]
7271 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7272 cx.foreground().forbid_parking();
7273
7274 let text = "
7275 use a::b;
7276 use a::c;
7277
7278 fn f() {
7279 b();
7280 c();
7281 }
7282 "
7283 .unindent();
7284
7285 let fs = FakeFs::new(cx.background());
7286 fs.insert_tree(
7287 "/dir",
7288 json!({
7289 "a.rs": text.clone(),
7290 }),
7291 )
7292 .await;
7293
7294 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7295 let buffer = project
7296 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7297 .await
7298 .unwrap();
7299
7300 // Simulate the language server sending us edits in a non-ordered fashion,
7301 // with ranges sometimes being inverted.
7302 let edits = project
7303 .update(cx, |project, cx| {
7304 project.edits_from_lsp(
7305 &buffer,
7306 [
7307 lsp::TextEdit {
7308 range: lsp::Range::new(
7309 lsp::Position::new(0, 9),
7310 lsp::Position::new(0, 9),
7311 ),
7312 new_text: "\n\n".into(),
7313 },
7314 lsp::TextEdit {
7315 range: lsp::Range::new(
7316 lsp::Position::new(0, 8),
7317 lsp::Position::new(0, 4),
7318 ),
7319 new_text: "a::{b, c}".into(),
7320 },
7321 lsp::TextEdit {
7322 range: lsp::Range::new(
7323 lsp::Position::new(1, 0),
7324 lsp::Position::new(7, 0),
7325 ),
7326 new_text: "".into(),
7327 },
7328 lsp::TextEdit {
7329 range: lsp::Range::new(
7330 lsp::Position::new(0, 9),
7331 lsp::Position::new(0, 9),
7332 ),
7333 new_text: "
7334 fn f() {
7335 b();
7336 c();
7337 }"
7338 .unindent(),
7339 },
7340 ],
7341 None,
7342 cx,
7343 )
7344 })
7345 .await
7346 .unwrap();
7347
7348 buffer.update(cx, |buffer, cx| {
7349 let edits = edits
7350 .into_iter()
7351 .map(|(range, text)| {
7352 (
7353 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7354 text,
7355 )
7356 })
7357 .collect::<Vec<_>>();
7358
7359 assert_eq!(
7360 edits,
7361 [
7362 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7363 (Point::new(1, 0)..Point::new(2, 0), "".into())
7364 ]
7365 );
7366
7367 for (range, new_text) in edits {
7368 buffer.edit([(range, new_text)], cx);
7369 }
7370 assert_eq!(
7371 buffer.text(),
7372 "
7373 use a::{b, c};
7374
7375 fn f() {
7376 b();
7377 c();
7378 }
7379 "
7380 .unindent()
7381 );
7382 });
7383 }
7384
7385 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7386 buffer: &Buffer,
7387 range: Range<T>,
7388 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7389 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7390 for chunk in buffer.snapshot().chunks(range, true) {
7391 if chunks.last().map_or(false, |prev_chunk| {
7392 prev_chunk.1 == chunk.diagnostic_severity
7393 }) {
7394 chunks.last_mut().unwrap().0.push_str(chunk.text);
7395 } else {
7396 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7397 }
7398 }
7399 chunks
7400 }
7401
7402 #[gpui::test]
7403 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7404 let dir = temp_tree(json!({
7405 "root": {
7406 "dir1": {},
7407 "dir2": {
7408 "dir3": {}
7409 }
7410 }
7411 }));
7412
7413 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7414 let cancel_flag = Default::default();
7415 let results = project
7416 .read_with(cx, |project, cx| {
7417 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7418 })
7419 .await;
7420
7421 assert!(results.is_empty());
7422 }
7423
7424 #[gpui::test(iterations = 10)]
7425 async fn test_definition(cx: &mut gpui::TestAppContext) {
7426 let mut language = Language::new(
7427 LanguageConfig {
7428 name: "Rust".into(),
7429 path_suffixes: vec!["rs".to_string()],
7430 ..Default::default()
7431 },
7432 Some(tree_sitter_rust::language()),
7433 );
7434 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7435
7436 let fs = FakeFs::new(cx.background());
7437 fs.insert_tree(
7438 "/dir",
7439 json!({
7440 "a.rs": "const fn a() { A }",
7441 "b.rs": "const y: i32 = crate::a()",
7442 }),
7443 )
7444 .await;
7445
7446 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7447 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7448
7449 let buffer = project
7450 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7451 .await
7452 .unwrap();
7453
7454 let fake_server = fake_servers.next().await.unwrap();
7455 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7456 let params = params.text_document_position_params;
7457 assert_eq!(
7458 params.text_document.uri.to_file_path().unwrap(),
7459 Path::new("/dir/b.rs"),
7460 );
7461 assert_eq!(params.position, lsp::Position::new(0, 22));
7462
7463 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7464 lsp::Location::new(
7465 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7466 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7467 ),
7468 )))
7469 });
7470
7471 let mut definitions = project
7472 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7473 .await
7474 .unwrap();
7475
7476 assert_eq!(definitions.len(), 1);
7477 let definition = definitions.pop().unwrap();
7478 cx.update(|cx| {
7479 let target_buffer = definition.buffer.read(cx);
7480 assert_eq!(
7481 target_buffer
7482 .file()
7483 .unwrap()
7484 .as_local()
7485 .unwrap()
7486 .abs_path(cx),
7487 Path::new("/dir/a.rs"),
7488 );
7489 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7490 assert_eq!(
7491 list_worktrees(&project, cx),
7492 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7493 );
7494
7495 drop(definition);
7496 });
7497 cx.read(|cx| {
7498 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7499 });
7500
7501 fn list_worktrees<'a>(
7502 project: &'a ModelHandle<Project>,
7503 cx: &'a AppContext,
7504 ) -> Vec<(&'a Path, bool)> {
7505 project
7506 .read(cx)
7507 .worktrees(cx)
7508 .map(|worktree| {
7509 let worktree = worktree.read(cx);
7510 (
7511 worktree.as_local().unwrap().abs_path().as_ref(),
7512 worktree.is_visible(),
7513 )
7514 })
7515 .collect::<Vec<_>>()
7516 }
7517 }
7518
7519 #[gpui::test]
7520 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7521 let mut language = Language::new(
7522 LanguageConfig {
7523 name: "TypeScript".into(),
7524 path_suffixes: vec!["ts".to_string()],
7525 ..Default::default()
7526 },
7527 Some(tree_sitter_typescript::language_typescript()),
7528 );
7529 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7530
7531 let fs = FakeFs::new(cx.background());
7532 fs.insert_tree(
7533 "/dir",
7534 json!({
7535 "a.ts": "",
7536 }),
7537 )
7538 .await;
7539
7540 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7541 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7542 let buffer = project
7543 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7544 .await
7545 .unwrap();
7546
7547 let fake_server = fake_language_servers.next().await.unwrap();
7548
7549 let text = "let a = b.fqn";
7550 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7551 let completions = project.update(cx, |project, cx| {
7552 project.completions(&buffer, text.len(), cx)
7553 });
7554
7555 fake_server
7556 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7557 Ok(Some(lsp::CompletionResponse::Array(vec![
7558 lsp::CompletionItem {
7559 label: "fullyQualifiedName?".into(),
7560 insert_text: Some("fullyQualifiedName".into()),
7561 ..Default::default()
7562 },
7563 ])))
7564 })
7565 .next()
7566 .await;
7567 let completions = completions.await.unwrap();
7568 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7569 assert_eq!(completions.len(), 1);
7570 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7571 assert_eq!(
7572 completions[0].old_range.to_offset(&snapshot),
7573 text.len() - 3..text.len()
7574 );
7575 }
7576
7577 #[gpui::test(iterations = 10)]
7578 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7579 let mut language = Language::new(
7580 LanguageConfig {
7581 name: "TypeScript".into(),
7582 path_suffixes: vec!["ts".to_string()],
7583 ..Default::default()
7584 },
7585 None,
7586 );
7587 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7588
7589 let fs = FakeFs::new(cx.background());
7590 fs.insert_tree(
7591 "/dir",
7592 json!({
7593 "a.ts": "a",
7594 }),
7595 )
7596 .await;
7597
7598 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7599 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7600 let buffer = project
7601 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7602 .await
7603 .unwrap();
7604
7605 let fake_server = fake_language_servers.next().await.unwrap();
7606
7607 // Language server returns code actions that contain commands, and not edits.
7608 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7609 fake_server
7610 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7611 Ok(Some(vec![
7612 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7613 title: "The code action".into(),
7614 command: Some(lsp::Command {
7615 title: "The command".into(),
7616 command: "_the/command".into(),
7617 arguments: Some(vec![json!("the-argument")]),
7618 }),
7619 ..Default::default()
7620 }),
7621 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7622 title: "two".into(),
7623 ..Default::default()
7624 }),
7625 ]))
7626 })
7627 .next()
7628 .await;
7629
7630 let action = actions.await.unwrap()[0].clone();
7631 let apply = project.update(cx, |project, cx| {
7632 project.apply_code_action(buffer.clone(), action, true, cx)
7633 });
7634
7635 // Resolving the code action does not populate its edits. In absence of
7636 // edits, we must execute the given command.
7637 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7638 |action, _| async move { Ok(action) },
7639 );
7640
7641 // While executing the command, the language server sends the editor
7642 // a `workspaceEdit` request.
7643 fake_server
7644 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7645 let fake = fake_server.clone();
7646 move |params, _| {
7647 assert_eq!(params.command, "_the/command");
7648 let fake = fake.clone();
7649 async move {
7650 fake.server
7651 .request::<lsp::request::ApplyWorkspaceEdit>(
7652 lsp::ApplyWorkspaceEditParams {
7653 label: None,
7654 edit: lsp::WorkspaceEdit {
7655 changes: Some(
7656 [(
7657 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7658 vec![lsp::TextEdit {
7659 range: lsp::Range::new(
7660 lsp::Position::new(0, 0),
7661 lsp::Position::new(0, 0),
7662 ),
7663 new_text: "X".into(),
7664 }],
7665 )]
7666 .into_iter()
7667 .collect(),
7668 ),
7669 ..Default::default()
7670 },
7671 },
7672 )
7673 .await
7674 .unwrap();
7675 Ok(Some(json!(null)))
7676 }
7677 }
7678 })
7679 .next()
7680 .await;
7681
7682 // Applying the code action returns a project transaction containing the edits
7683 // sent by the language server in its `workspaceEdit` request.
7684 let transaction = apply.await.unwrap();
7685 assert!(transaction.0.contains_key(&buffer));
7686 buffer.update(cx, |buffer, cx| {
7687 assert_eq!(buffer.text(), "Xa");
7688 buffer.undo(cx);
7689 assert_eq!(buffer.text(), "a");
7690 });
7691 }
7692
7693 #[gpui::test]
7694 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7695 let fs = FakeFs::new(cx.background());
7696 fs.insert_tree(
7697 "/dir",
7698 json!({
7699 "file1": "the old contents",
7700 }),
7701 )
7702 .await;
7703
7704 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7705 let buffer = project
7706 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7707 .await
7708 .unwrap();
7709 buffer
7710 .update(cx, |buffer, cx| {
7711 assert_eq!(buffer.text(), "the old contents");
7712 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7713 buffer.save(cx)
7714 })
7715 .await
7716 .unwrap();
7717
7718 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7719 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7720 }
7721
7722 #[gpui::test]
7723 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7724 let fs = FakeFs::new(cx.background());
7725 fs.insert_tree(
7726 "/dir",
7727 json!({
7728 "file1": "the old contents",
7729 }),
7730 )
7731 .await;
7732
7733 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7734 let buffer = project
7735 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7736 .await
7737 .unwrap();
7738 buffer
7739 .update(cx, |buffer, cx| {
7740 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7741 buffer.save(cx)
7742 })
7743 .await
7744 .unwrap();
7745
7746 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7747 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7748 }
7749
7750 #[gpui::test]
7751 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7752 let fs = FakeFs::new(cx.background());
7753 fs.insert_tree("/dir", json!({})).await;
7754
7755 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7756 let buffer = project.update(cx, |project, cx| {
7757 project.create_buffer("", None, cx).unwrap()
7758 });
7759 buffer.update(cx, |buffer, cx| {
7760 buffer.edit([(0..0, "abc")], cx);
7761 assert!(buffer.is_dirty());
7762 assert!(!buffer.has_conflict());
7763 });
7764 project
7765 .update(cx, |project, cx| {
7766 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7767 })
7768 .await
7769 .unwrap();
7770 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7771 buffer.read_with(cx, |buffer, cx| {
7772 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7773 assert!(!buffer.is_dirty());
7774 assert!(!buffer.has_conflict());
7775 });
7776
7777 let opened_buffer = project
7778 .update(cx, |project, cx| {
7779 project.open_local_buffer("/dir/file1", cx)
7780 })
7781 .await
7782 .unwrap();
7783 assert_eq!(opened_buffer, buffer);
7784 }
7785
7786 #[gpui::test(retries = 5)]
7787 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7788 let dir = temp_tree(json!({
7789 "a": {
7790 "file1": "",
7791 "file2": "",
7792 "file3": "",
7793 },
7794 "b": {
7795 "c": {
7796 "file4": "",
7797 "file5": "",
7798 }
7799 }
7800 }));
7801
7802 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7803 let rpc = project.read_with(cx, |p, _| p.client.clone());
7804
7805 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7806 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7807 async move { buffer.await.unwrap() }
7808 };
7809 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7810 project.read_with(cx, |project, cx| {
7811 let tree = project.worktrees(cx).next().unwrap();
7812 tree.read(cx)
7813 .entry_for_path(path)
7814 .expect(&format!("no entry for path {}", path))
7815 .id
7816 })
7817 };
7818
7819 let buffer2 = buffer_for_path("a/file2", cx).await;
7820 let buffer3 = buffer_for_path("a/file3", cx).await;
7821 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7822 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7823
7824 let file2_id = id_for_path("a/file2", &cx);
7825 let file3_id = id_for_path("a/file3", &cx);
7826 let file4_id = id_for_path("b/c/file4", &cx);
7827
7828 // Create a remote copy of this worktree.
7829 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7830 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7831 let (remote, load_task) = cx.update(|cx| {
7832 Worktree::remote(
7833 1,
7834 1,
7835 initial_snapshot.to_proto(&Default::default(), true),
7836 rpc.clone(),
7837 cx,
7838 )
7839 });
7840 // tree
7841 load_task.await;
7842
7843 cx.read(|cx| {
7844 assert!(!buffer2.read(cx).is_dirty());
7845 assert!(!buffer3.read(cx).is_dirty());
7846 assert!(!buffer4.read(cx).is_dirty());
7847 assert!(!buffer5.read(cx).is_dirty());
7848 });
7849
7850 // Rename and delete files and directories.
7851 tree.flush_fs_events(&cx).await;
7852 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7853 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7854 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7855 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7856 tree.flush_fs_events(&cx).await;
7857
7858 let expected_paths = vec![
7859 "a",
7860 "a/file1",
7861 "a/file2.new",
7862 "b",
7863 "d",
7864 "d/file3",
7865 "d/file4",
7866 ];
7867
7868 cx.read(|app| {
7869 assert_eq!(
7870 tree.read(app)
7871 .paths()
7872 .map(|p| p.to_str().unwrap())
7873 .collect::<Vec<_>>(),
7874 expected_paths
7875 );
7876
7877 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7878 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7879 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7880
7881 assert_eq!(
7882 buffer2.read(app).file().unwrap().path().as_ref(),
7883 Path::new("a/file2.new")
7884 );
7885 assert_eq!(
7886 buffer3.read(app).file().unwrap().path().as_ref(),
7887 Path::new("d/file3")
7888 );
7889 assert_eq!(
7890 buffer4.read(app).file().unwrap().path().as_ref(),
7891 Path::new("d/file4")
7892 );
7893 assert_eq!(
7894 buffer5.read(app).file().unwrap().path().as_ref(),
7895 Path::new("b/c/file5")
7896 );
7897
7898 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7899 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7900 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7901 assert!(buffer5.read(app).file().unwrap().is_deleted());
7902 });
7903
7904 // Update the remote worktree. Check that it becomes consistent with the
7905 // local worktree.
7906 remote.update(cx, |remote, cx| {
7907 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7908 &initial_snapshot,
7909 1,
7910 1,
7911 true,
7912 );
7913 remote
7914 .as_remote_mut()
7915 .unwrap()
7916 .snapshot
7917 .apply_remote_update(update_message)
7918 .unwrap();
7919
7920 assert_eq!(
7921 remote
7922 .paths()
7923 .map(|p| p.to_str().unwrap())
7924 .collect::<Vec<_>>(),
7925 expected_paths
7926 );
7927 });
7928 }
7929
7930 #[gpui::test]
7931 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7932 let fs = FakeFs::new(cx.background());
7933 fs.insert_tree(
7934 "/dir",
7935 json!({
7936 "a.txt": "a-contents",
7937 "b.txt": "b-contents",
7938 }),
7939 )
7940 .await;
7941
7942 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7943
7944 // Spawn multiple tasks to open paths, repeating some paths.
7945 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7946 (
7947 p.open_local_buffer("/dir/a.txt", cx),
7948 p.open_local_buffer("/dir/b.txt", cx),
7949 p.open_local_buffer("/dir/a.txt", cx),
7950 )
7951 });
7952
7953 let buffer_a_1 = buffer_a_1.await.unwrap();
7954 let buffer_a_2 = buffer_a_2.await.unwrap();
7955 let buffer_b = buffer_b.await.unwrap();
7956 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7957 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7958
7959 // There is only one buffer per path.
7960 let buffer_a_id = buffer_a_1.id();
7961 assert_eq!(buffer_a_2.id(), buffer_a_id);
7962
7963 // Open the same path again while it is still open.
7964 drop(buffer_a_1);
7965 let buffer_a_3 = project
7966 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7967 .await
7968 .unwrap();
7969
7970 // There's still only one buffer per path.
7971 assert_eq!(buffer_a_3.id(), buffer_a_id);
7972 }
7973
7974 #[gpui::test]
7975 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7976 let fs = FakeFs::new(cx.background());
7977 fs.insert_tree(
7978 "/dir",
7979 json!({
7980 "file1": "abc",
7981 "file2": "def",
7982 "file3": "ghi",
7983 }),
7984 )
7985 .await;
7986
7987 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7988
7989 let buffer1 = project
7990 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7991 .await
7992 .unwrap();
7993 let events = Rc::new(RefCell::new(Vec::new()));
7994
7995 // initially, the buffer isn't dirty.
7996 buffer1.update(cx, |buffer, cx| {
7997 cx.subscribe(&buffer1, {
7998 let events = events.clone();
7999 move |_, _, event, _| match event {
8000 BufferEvent::Operation(_) => {}
8001 _ => events.borrow_mut().push(event.clone()),
8002 }
8003 })
8004 .detach();
8005
8006 assert!(!buffer.is_dirty());
8007 assert!(events.borrow().is_empty());
8008
8009 buffer.edit([(1..2, "")], cx);
8010 });
8011
8012 // after the first edit, the buffer is dirty, and emits a dirtied event.
8013 buffer1.update(cx, |buffer, cx| {
8014 assert!(buffer.text() == "ac");
8015 assert!(buffer.is_dirty());
8016 assert_eq!(
8017 *events.borrow(),
8018 &[language::Event::Edited, language::Event::Dirtied]
8019 );
8020 events.borrow_mut().clear();
8021 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
8022 });
8023
8024 // after saving, the buffer is not dirty, and emits a saved event.
8025 buffer1.update(cx, |buffer, cx| {
8026 assert!(!buffer.is_dirty());
8027 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8028 events.borrow_mut().clear();
8029
8030 buffer.edit([(1..1, "B")], cx);
8031 buffer.edit([(2..2, "D")], cx);
8032 });
8033
8034 // after editing again, the buffer is dirty, and emits another dirty event.
8035 buffer1.update(cx, |buffer, cx| {
8036 assert!(buffer.text() == "aBDc");
8037 assert!(buffer.is_dirty());
8038 assert_eq!(
8039 *events.borrow(),
8040 &[
8041 language::Event::Edited,
8042 language::Event::Dirtied,
8043 language::Event::Edited,
8044 ],
8045 );
8046 events.borrow_mut().clear();
8047
8048 // TODO - currently, after restoring the buffer to its
8049 // previously-saved state, the is still considered dirty.
8050 buffer.edit([(1..3, "")], cx);
8051 assert!(buffer.text() == "ac");
8052 assert!(buffer.is_dirty());
8053 });
8054
8055 assert_eq!(*events.borrow(), &[language::Event::Edited]);
8056
8057 // When a file is deleted, the buffer is considered dirty.
8058 let events = Rc::new(RefCell::new(Vec::new()));
8059 let buffer2 = project
8060 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8061 .await
8062 .unwrap();
8063 buffer2.update(cx, |_, cx| {
8064 cx.subscribe(&buffer2, {
8065 let events = events.clone();
8066 move |_, _, event, _| events.borrow_mut().push(event.clone())
8067 })
8068 .detach();
8069 });
8070
8071 fs.remove_file("/dir/file2".as_ref(), Default::default())
8072 .await
8073 .unwrap();
8074 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8075 assert_eq!(
8076 *events.borrow(),
8077 &[language::Event::Dirtied, language::Event::FileHandleChanged]
8078 );
8079
8080 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8081 let events = Rc::new(RefCell::new(Vec::new()));
8082 let buffer3 = project
8083 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8084 .await
8085 .unwrap();
8086 buffer3.update(cx, |_, cx| {
8087 cx.subscribe(&buffer3, {
8088 let events = events.clone();
8089 move |_, _, event, _| events.borrow_mut().push(event.clone())
8090 })
8091 .detach();
8092 });
8093
8094 buffer3.update(cx, |buffer, cx| {
8095 buffer.edit([(0..0, "x")], cx);
8096 });
8097 events.borrow_mut().clear();
8098 fs.remove_file("/dir/file3".as_ref(), Default::default())
8099 .await
8100 .unwrap();
8101 buffer3
8102 .condition(&cx, |_, _| !events.borrow().is_empty())
8103 .await;
8104 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8105 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8106 }
8107
8108 #[gpui::test]
8109 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8110 let initial_contents = "aaa\nbbbbb\nc\n";
8111 let fs = FakeFs::new(cx.background());
8112 fs.insert_tree(
8113 "/dir",
8114 json!({
8115 "the-file": initial_contents,
8116 }),
8117 )
8118 .await;
8119 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8120 let buffer = project
8121 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8122 .await
8123 .unwrap();
8124
8125 let anchors = (0..3)
8126 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8127 .collect::<Vec<_>>();
8128
8129 // Change the file on disk, adding two new lines of text, and removing
8130 // one line.
8131 buffer.read_with(cx, |buffer, _| {
8132 assert!(!buffer.is_dirty());
8133 assert!(!buffer.has_conflict());
8134 });
8135 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8136 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8137 .await
8138 .unwrap();
8139
8140 // Because the buffer was not modified, it is reloaded from disk. Its
8141 // contents are edited according to the diff between the old and new
8142 // file contents.
8143 buffer
8144 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8145 .await;
8146
8147 buffer.update(cx, |buffer, _| {
8148 assert_eq!(buffer.text(), new_contents);
8149 assert!(!buffer.is_dirty());
8150 assert!(!buffer.has_conflict());
8151
8152 let anchor_positions = anchors
8153 .iter()
8154 .map(|anchor| anchor.to_point(&*buffer))
8155 .collect::<Vec<_>>();
8156 assert_eq!(
8157 anchor_positions,
8158 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8159 );
8160 });
8161
8162 // Modify the buffer
8163 buffer.update(cx, |buffer, cx| {
8164 buffer.edit([(0..0, " ")], cx);
8165 assert!(buffer.is_dirty());
8166 assert!(!buffer.has_conflict());
8167 });
8168
8169 // Change the file on disk again, adding blank lines to the beginning.
8170 fs.save(
8171 "/dir/the-file".as_ref(),
8172 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8173 )
8174 .await
8175 .unwrap();
8176
8177 // Because the buffer is modified, it doesn't reload from disk, but is
8178 // marked as having a conflict.
8179 buffer
8180 .condition(&cx, |buffer, _| buffer.has_conflict())
8181 .await;
8182 }
8183
8184 #[gpui::test]
8185 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8186 cx.foreground().forbid_parking();
8187
8188 let fs = FakeFs::new(cx.background());
8189 fs.insert_tree(
8190 "/the-dir",
8191 json!({
8192 "a.rs": "
8193 fn foo(mut v: Vec<usize>) {
8194 for x in &v {
8195 v.push(1);
8196 }
8197 }
8198 "
8199 .unindent(),
8200 }),
8201 )
8202 .await;
8203
8204 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8205 let buffer = project
8206 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8207 .await
8208 .unwrap();
8209
8210 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8211 let message = lsp::PublishDiagnosticsParams {
8212 uri: buffer_uri.clone(),
8213 diagnostics: vec![
8214 lsp::Diagnostic {
8215 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8216 severity: Some(DiagnosticSeverity::WARNING),
8217 message: "error 1".to_string(),
8218 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8219 location: lsp::Location {
8220 uri: buffer_uri.clone(),
8221 range: lsp::Range::new(
8222 lsp::Position::new(1, 8),
8223 lsp::Position::new(1, 9),
8224 ),
8225 },
8226 message: "error 1 hint 1".to_string(),
8227 }]),
8228 ..Default::default()
8229 },
8230 lsp::Diagnostic {
8231 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8232 severity: Some(DiagnosticSeverity::HINT),
8233 message: "error 1 hint 1".to_string(),
8234 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8235 location: lsp::Location {
8236 uri: buffer_uri.clone(),
8237 range: lsp::Range::new(
8238 lsp::Position::new(1, 8),
8239 lsp::Position::new(1, 9),
8240 ),
8241 },
8242 message: "original diagnostic".to_string(),
8243 }]),
8244 ..Default::default()
8245 },
8246 lsp::Diagnostic {
8247 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8248 severity: Some(DiagnosticSeverity::ERROR),
8249 message: "error 2".to_string(),
8250 related_information: Some(vec![
8251 lsp::DiagnosticRelatedInformation {
8252 location: lsp::Location {
8253 uri: buffer_uri.clone(),
8254 range: lsp::Range::new(
8255 lsp::Position::new(1, 13),
8256 lsp::Position::new(1, 15),
8257 ),
8258 },
8259 message: "error 2 hint 1".to_string(),
8260 },
8261 lsp::DiagnosticRelatedInformation {
8262 location: lsp::Location {
8263 uri: buffer_uri.clone(),
8264 range: lsp::Range::new(
8265 lsp::Position::new(1, 13),
8266 lsp::Position::new(1, 15),
8267 ),
8268 },
8269 message: "error 2 hint 2".to_string(),
8270 },
8271 ]),
8272 ..Default::default()
8273 },
8274 lsp::Diagnostic {
8275 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8276 severity: Some(DiagnosticSeverity::HINT),
8277 message: "error 2 hint 1".to_string(),
8278 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8279 location: lsp::Location {
8280 uri: buffer_uri.clone(),
8281 range: lsp::Range::new(
8282 lsp::Position::new(2, 8),
8283 lsp::Position::new(2, 17),
8284 ),
8285 },
8286 message: "original diagnostic".to_string(),
8287 }]),
8288 ..Default::default()
8289 },
8290 lsp::Diagnostic {
8291 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8292 severity: Some(DiagnosticSeverity::HINT),
8293 message: "error 2 hint 2".to_string(),
8294 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8295 location: lsp::Location {
8296 uri: buffer_uri.clone(),
8297 range: lsp::Range::new(
8298 lsp::Position::new(2, 8),
8299 lsp::Position::new(2, 17),
8300 ),
8301 },
8302 message: "original diagnostic".to_string(),
8303 }]),
8304 ..Default::default()
8305 },
8306 ],
8307 version: None,
8308 };
8309
8310 project
8311 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8312 .unwrap();
8313 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8314
8315 assert_eq!(
8316 buffer
8317 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8318 .collect::<Vec<_>>(),
8319 &[
8320 DiagnosticEntry {
8321 range: Point::new(1, 8)..Point::new(1, 9),
8322 diagnostic: Diagnostic {
8323 severity: DiagnosticSeverity::WARNING,
8324 message: "error 1".to_string(),
8325 group_id: 0,
8326 is_primary: true,
8327 ..Default::default()
8328 }
8329 },
8330 DiagnosticEntry {
8331 range: Point::new(1, 8)..Point::new(1, 9),
8332 diagnostic: Diagnostic {
8333 severity: DiagnosticSeverity::HINT,
8334 message: "error 1 hint 1".to_string(),
8335 group_id: 0,
8336 is_primary: false,
8337 ..Default::default()
8338 }
8339 },
8340 DiagnosticEntry {
8341 range: Point::new(1, 13)..Point::new(1, 15),
8342 diagnostic: Diagnostic {
8343 severity: DiagnosticSeverity::HINT,
8344 message: "error 2 hint 1".to_string(),
8345 group_id: 1,
8346 is_primary: false,
8347 ..Default::default()
8348 }
8349 },
8350 DiagnosticEntry {
8351 range: Point::new(1, 13)..Point::new(1, 15),
8352 diagnostic: Diagnostic {
8353 severity: DiagnosticSeverity::HINT,
8354 message: "error 2 hint 2".to_string(),
8355 group_id: 1,
8356 is_primary: false,
8357 ..Default::default()
8358 }
8359 },
8360 DiagnosticEntry {
8361 range: Point::new(2, 8)..Point::new(2, 17),
8362 diagnostic: Diagnostic {
8363 severity: DiagnosticSeverity::ERROR,
8364 message: "error 2".to_string(),
8365 group_id: 1,
8366 is_primary: true,
8367 ..Default::default()
8368 }
8369 }
8370 ]
8371 );
8372
8373 assert_eq!(
8374 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8375 &[
8376 DiagnosticEntry {
8377 range: Point::new(1, 8)..Point::new(1, 9),
8378 diagnostic: Diagnostic {
8379 severity: DiagnosticSeverity::WARNING,
8380 message: "error 1".to_string(),
8381 group_id: 0,
8382 is_primary: true,
8383 ..Default::default()
8384 }
8385 },
8386 DiagnosticEntry {
8387 range: Point::new(1, 8)..Point::new(1, 9),
8388 diagnostic: Diagnostic {
8389 severity: DiagnosticSeverity::HINT,
8390 message: "error 1 hint 1".to_string(),
8391 group_id: 0,
8392 is_primary: false,
8393 ..Default::default()
8394 }
8395 },
8396 ]
8397 );
8398 assert_eq!(
8399 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8400 &[
8401 DiagnosticEntry {
8402 range: Point::new(1, 13)..Point::new(1, 15),
8403 diagnostic: Diagnostic {
8404 severity: DiagnosticSeverity::HINT,
8405 message: "error 2 hint 1".to_string(),
8406 group_id: 1,
8407 is_primary: false,
8408 ..Default::default()
8409 }
8410 },
8411 DiagnosticEntry {
8412 range: Point::new(1, 13)..Point::new(1, 15),
8413 diagnostic: Diagnostic {
8414 severity: DiagnosticSeverity::HINT,
8415 message: "error 2 hint 2".to_string(),
8416 group_id: 1,
8417 is_primary: false,
8418 ..Default::default()
8419 }
8420 },
8421 DiagnosticEntry {
8422 range: Point::new(2, 8)..Point::new(2, 17),
8423 diagnostic: Diagnostic {
8424 severity: DiagnosticSeverity::ERROR,
8425 message: "error 2".to_string(),
8426 group_id: 1,
8427 is_primary: true,
8428 ..Default::default()
8429 }
8430 }
8431 ]
8432 );
8433 }
8434
8435 #[gpui::test]
8436 async fn test_rename(cx: &mut gpui::TestAppContext) {
8437 cx.foreground().forbid_parking();
8438
8439 let mut language = Language::new(
8440 LanguageConfig {
8441 name: "Rust".into(),
8442 path_suffixes: vec!["rs".to_string()],
8443 ..Default::default()
8444 },
8445 Some(tree_sitter_rust::language()),
8446 );
8447 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8448 capabilities: lsp::ServerCapabilities {
8449 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8450 prepare_provider: Some(true),
8451 work_done_progress_options: Default::default(),
8452 })),
8453 ..Default::default()
8454 },
8455 ..Default::default()
8456 });
8457
8458 let fs = FakeFs::new(cx.background());
8459 fs.insert_tree(
8460 "/dir",
8461 json!({
8462 "one.rs": "const ONE: usize = 1;",
8463 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8464 }),
8465 )
8466 .await;
8467
8468 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8469 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8470 let buffer = project
8471 .update(cx, |project, cx| {
8472 project.open_local_buffer("/dir/one.rs", cx)
8473 })
8474 .await
8475 .unwrap();
8476
8477 let fake_server = fake_servers.next().await.unwrap();
8478
8479 let response = project.update(cx, |project, cx| {
8480 project.prepare_rename(buffer.clone(), 7, cx)
8481 });
8482 fake_server
8483 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8484 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8485 assert_eq!(params.position, lsp::Position::new(0, 7));
8486 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8487 lsp::Position::new(0, 6),
8488 lsp::Position::new(0, 9),
8489 ))))
8490 })
8491 .next()
8492 .await
8493 .unwrap();
8494 let range = response.await.unwrap().unwrap();
8495 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8496 assert_eq!(range, 6..9);
8497
8498 let response = project.update(cx, |project, cx| {
8499 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8500 });
8501 fake_server
8502 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8503 assert_eq!(
8504 params.text_document_position.text_document.uri.as_str(),
8505 "file:///dir/one.rs"
8506 );
8507 assert_eq!(
8508 params.text_document_position.position,
8509 lsp::Position::new(0, 7)
8510 );
8511 assert_eq!(params.new_name, "THREE");
8512 Ok(Some(lsp::WorkspaceEdit {
8513 changes: Some(
8514 [
8515 (
8516 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8517 vec![lsp::TextEdit::new(
8518 lsp::Range::new(
8519 lsp::Position::new(0, 6),
8520 lsp::Position::new(0, 9),
8521 ),
8522 "THREE".to_string(),
8523 )],
8524 ),
8525 (
8526 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8527 vec![
8528 lsp::TextEdit::new(
8529 lsp::Range::new(
8530 lsp::Position::new(0, 24),
8531 lsp::Position::new(0, 27),
8532 ),
8533 "THREE".to_string(),
8534 ),
8535 lsp::TextEdit::new(
8536 lsp::Range::new(
8537 lsp::Position::new(0, 35),
8538 lsp::Position::new(0, 38),
8539 ),
8540 "THREE".to_string(),
8541 ),
8542 ],
8543 ),
8544 ]
8545 .into_iter()
8546 .collect(),
8547 ),
8548 ..Default::default()
8549 }))
8550 })
8551 .next()
8552 .await
8553 .unwrap();
8554 let mut transaction = response.await.unwrap().0;
8555 assert_eq!(transaction.len(), 2);
8556 assert_eq!(
8557 transaction
8558 .remove_entry(&buffer)
8559 .unwrap()
8560 .0
8561 .read_with(cx, |buffer, _| buffer.text()),
8562 "const THREE: usize = 1;"
8563 );
8564 assert_eq!(
8565 transaction
8566 .into_keys()
8567 .next()
8568 .unwrap()
8569 .read_with(cx, |buffer, _| buffer.text()),
8570 "const TWO: usize = one::THREE + one::THREE;"
8571 );
8572 }
8573
8574 #[gpui::test]
8575 async fn test_search(cx: &mut gpui::TestAppContext) {
8576 let fs = FakeFs::new(cx.background());
8577 fs.insert_tree(
8578 "/dir",
8579 json!({
8580 "one.rs": "const ONE: usize = 1;",
8581 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8582 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8583 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8584 }),
8585 )
8586 .await;
8587 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8588 assert_eq!(
8589 search(&project, SearchQuery::text("TWO", false, true), cx)
8590 .await
8591 .unwrap(),
8592 HashMap::from_iter([
8593 ("two.rs".to_string(), vec![6..9]),
8594 ("three.rs".to_string(), vec![37..40])
8595 ])
8596 );
8597
8598 let buffer_4 = project
8599 .update(cx, |project, cx| {
8600 project.open_local_buffer("/dir/four.rs", cx)
8601 })
8602 .await
8603 .unwrap();
8604 buffer_4.update(cx, |buffer, cx| {
8605 let text = "two::TWO";
8606 buffer.edit([(20..28, text), (31..43, text)], cx);
8607 });
8608
8609 assert_eq!(
8610 search(&project, SearchQuery::text("TWO", false, true), cx)
8611 .await
8612 .unwrap(),
8613 HashMap::from_iter([
8614 ("two.rs".to_string(), vec![6..9]),
8615 ("three.rs".to_string(), vec![37..40]),
8616 ("four.rs".to_string(), vec![25..28, 36..39])
8617 ])
8618 );
8619
8620 async fn search(
8621 project: &ModelHandle<Project>,
8622 query: SearchQuery,
8623 cx: &mut gpui::TestAppContext,
8624 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8625 let results = project
8626 .update(cx, |project, cx| project.search(query, cx))
8627 .await?;
8628
8629 Ok(results
8630 .into_iter()
8631 .map(|(buffer, ranges)| {
8632 buffer.read_with(cx, |buffer, _| {
8633 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8634 let ranges = ranges
8635 .into_iter()
8636 .map(|range| range.to_offset(buffer))
8637 .collect::<Vec<_>>();
8638 (path, ranges)
8639 })
8640 })
8641 .collect())
8642 }
8643 }
8644}