1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub enum LanguageServerState {
69 Starting(Task<Option<Arc<LanguageServer>>>),
70 Running {
71 adapter: Arc<dyn LspAdapter>,
72 server: Arc<LanguageServer>,
73 },
74}
75
76pub struct ProjectStore {
77 db: Arc<Db>,
78 projects: Vec<WeakModelHandle<Project>>,
79}
80
81pub struct Project {
82 worktrees: Vec<WorktreeHandle>,
83 active_entry: Option<ProjectEntryId>,
84 languages: Arc<LanguageRegistry>,
85 language_servers: HashMap<usize, LanguageServerState>,
86 language_server_ids: HashMap<(WorktreeId, LanguageServerName), usize>,
87 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
88 language_server_settings: Arc<Mutex<serde_json::Value>>,
89 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
90 next_language_server_id: usize,
91 client: Arc<client::Client>,
92 next_entry_id: Arc<AtomicUsize>,
93 next_diagnostic_group_id: usize,
94 user_store: ModelHandle<UserStore>,
95 project_store: ModelHandle<ProjectStore>,
96 fs: Arc<dyn Fs>,
97 client_state: ProjectClientState,
98 collaborators: HashMap<PeerId, Collaborator>,
99 client_subscriptions: Vec<client::Subscription>,
100 _subscriptions: Vec<gpui::Subscription>,
101 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
102 shared_buffers: HashMap<PeerId, HashSet<u64>>,
103 loading_buffers: HashMap<
104 ProjectPath,
105 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
106 >,
107 loading_local_worktrees:
108 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
109 opened_buffers: HashMap<u64, OpenBuffer>,
110 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
111 nonce: u128,
112 initialized_persistent_state: bool,
113}
114
115#[derive(Error, Debug)]
116pub enum JoinProjectError {
117 #[error("host declined join request")]
118 HostDeclined,
119 #[error("host closed the project")]
120 HostClosedProject,
121 #[error("host went offline")]
122 HostWentOffline,
123 #[error("{0}")]
124 Other(#[from] anyhow::Error),
125}
126
127enum OpenBuffer {
128 Strong(ModelHandle<Buffer>),
129 Weak(WeakModelHandle<Buffer>),
130 Loading(Vec<Operation>),
131}
132
133enum WorktreeHandle {
134 Strong(ModelHandle<Worktree>),
135 Weak(WeakModelHandle<Worktree>),
136}
137
138enum ProjectClientState {
139 Local {
140 is_shared: bool,
141 remote_id_tx: watch::Sender<Option<u64>>,
142 remote_id_rx: watch::Receiver<Option<u64>>,
143 online_tx: watch::Sender<bool>,
144 online_rx: watch::Receiver<bool>,
145 _maintain_remote_id_task: Task<Option<()>>,
146 },
147 Remote {
148 sharing_has_stopped: bool,
149 remote_id: u64,
150 replica_id: ReplicaId,
151 _detect_unshare_task: Task<Option<()>>,
152 },
153}
154
155#[derive(Clone, Debug)]
156pub struct Collaborator {
157 pub user: Arc<User>,
158 pub peer_id: PeerId,
159 pub replica_id: ReplicaId,
160}
161
162#[derive(Clone, Debug, PartialEq, Eq)]
163pub enum Event {
164 ActiveEntryChanged(Option<ProjectEntryId>),
165 WorktreeAdded,
166 WorktreeRemoved(WorktreeId),
167 DiskBasedDiagnosticsStarted {
168 language_server_id: usize,
169 },
170 DiskBasedDiagnosticsFinished {
171 language_server_id: usize,
172 },
173 DiagnosticsUpdated {
174 path: ProjectPath,
175 language_server_id: usize,
176 },
177 RemoteIdChanged(Option<u64>),
178 CollaboratorLeft(PeerId),
179 ContactRequestedJoin(Arc<User>),
180 ContactCancelledJoinRequest(Arc<User>),
181}
182
183#[derive(Serialize)]
184pub struct LanguageServerStatus {
185 pub name: String,
186 pub pending_work: BTreeMap<String, LanguageServerProgress>,
187 pub has_pending_diagnostic_updates: bool,
188 pub progress_tokens: HashSet<String>,
189}
190
191#[derive(Clone, Debug, Serialize)]
192pub struct LanguageServerProgress {
193 pub message: Option<String>,
194 pub percentage: Option<usize>,
195 #[serde(skip_serializing)]
196 pub last_update_at: Instant,
197}
198
199#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
200pub struct ProjectPath {
201 pub worktree_id: WorktreeId,
202 pub path: Arc<Path>,
203}
204
205#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
206pub struct DiagnosticSummary {
207 pub language_server_id: usize,
208 pub error_count: usize,
209 pub warning_count: usize,
210}
211
212#[derive(Debug, Clone)]
213pub struct Location {
214 pub buffer: ModelHandle<Buffer>,
215 pub range: Range<language::Anchor>,
216}
217
218#[derive(Debug, Clone)]
219pub struct LocationLink {
220 pub origin: Option<Location>,
221 pub target: Location,
222}
223
224#[derive(Debug)]
225pub struct DocumentHighlight {
226 pub range: Range<language::Anchor>,
227 pub kind: DocumentHighlightKind,
228}
229
230#[derive(Clone, Debug)]
231pub struct Symbol {
232 pub source_worktree_id: WorktreeId,
233 pub worktree_id: WorktreeId,
234 pub language_server_name: LanguageServerName,
235 pub path: PathBuf,
236 pub label: CodeLabel,
237 pub name: String,
238 pub kind: lsp::SymbolKind,
239 pub range: Range<PointUtf16>,
240 pub signature: [u8; 32],
241}
242
243#[derive(Clone, Debug, PartialEq)]
244pub struct HoverBlock {
245 pub text: String,
246 pub language: Option<String>,
247}
248
249impl HoverBlock {
250 fn try_new(marked_string: MarkedString) -> Option<Self> {
251 let result = match marked_string {
252 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
253 text: value,
254 language: Some(language),
255 },
256 MarkedString::String(text) => HoverBlock {
257 text,
258 language: None,
259 },
260 };
261 if result.text.is_empty() {
262 None
263 } else {
264 Some(result)
265 }
266 }
267}
268
269#[derive(Debug)]
270pub struct Hover {
271 pub contents: Vec<HoverBlock>,
272 pub range: Option<Range<language::Anchor>>,
273}
274
275#[derive(Default)]
276pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
277
278impl DiagnosticSummary {
279 fn new<'a, T: 'a>(
280 language_server_id: usize,
281 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
282 ) -> Self {
283 let mut this = Self {
284 language_server_id,
285 error_count: 0,
286 warning_count: 0,
287 };
288
289 for entry in diagnostics {
290 if entry.diagnostic.is_primary {
291 match entry.diagnostic.severity {
292 DiagnosticSeverity::ERROR => this.error_count += 1,
293 DiagnosticSeverity::WARNING => this.warning_count += 1,
294 _ => {}
295 }
296 }
297 }
298
299 this
300 }
301
302 pub fn is_empty(&self) -> bool {
303 self.error_count == 0 && self.warning_count == 0
304 }
305
306 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
307 proto::DiagnosticSummary {
308 path: path.to_string_lossy().to_string(),
309 language_server_id: self.language_server_id as u64,
310 error_count: self.error_count as u32,
311 warning_count: self.warning_count as u32,
312 }
313 }
314}
315
316#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
317pub struct ProjectEntryId(usize);
318
319impl ProjectEntryId {
320 pub const MAX: Self = Self(usize::MAX);
321
322 pub fn new(counter: &AtomicUsize) -> Self {
323 Self(counter.fetch_add(1, SeqCst))
324 }
325
326 pub fn from_proto(id: u64) -> Self {
327 Self(id as usize)
328 }
329
330 pub fn to_proto(&self) -> u64 {
331 self.0 as u64
332 }
333
334 pub fn to_usize(&self) -> usize {
335 self.0
336 }
337}
338
339impl Project {
340 pub fn init(client: &Arc<Client>) {
341 client.add_model_message_handler(Self::handle_request_join_project);
342 client.add_model_message_handler(Self::handle_add_collaborator);
343 client.add_model_message_handler(Self::handle_buffer_reloaded);
344 client.add_model_message_handler(Self::handle_buffer_saved);
345 client.add_model_message_handler(Self::handle_start_language_server);
346 client.add_model_message_handler(Self::handle_update_language_server);
347 client.add_model_message_handler(Self::handle_remove_collaborator);
348 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
349 client.add_model_message_handler(Self::handle_update_project);
350 client.add_model_message_handler(Self::handle_unregister_project);
351 client.add_model_message_handler(Self::handle_project_unshared);
352 client.add_model_message_handler(Self::handle_update_buffer_file);
353 client.add_model_message_handler(Self::handle_update_buffer);
354 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
355 client.add_model_message_handler(Self::handle_update_worktree);
356 client.add_model_request_handler(Self::handle_create_project_entry);
357 client.add_model_request_handler(Self::handle_rename_project_entry);
358 client.add_model_request_handler(Self::handle_copy_project_entry);
359 client.add_model_request_handler(Self::handle_delete_project_entry);
360 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
361 client.add_model_request_handler(Self::handle_apply_code_action);
362 client.add_model_request_handler(Self::handle_reload_buffers);
363 client.add_model_request_handler(Self::handle_format_buffers);
364 client.add_model_request_handler(Self::handle_get_code_actions);
365 client.add_model_request_handler(Self::handle_get_completions);
366 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
367 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
368 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
369 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
370 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
371 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
372 client.add_model_request_handler(Self::handle_search_project);
373 client.add_model_request_handler(Self::handle_get_project_symbols);
374 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
375 client.add_model_request_handler(Self::handle_open_buffer_by_id);
376 client.add_model_request_handler(Self::handle_open_buffer_by_path);
377 client.add_model_request_handler(Self::handle_save_buffer);
378 }
379
380 pub fn local(
381 online: bool,
382 client: Arc<Client>,
383 user_store: ModelHandle<UserStore>,
384 project_store: ModelHandle<ProjectStore>,
385 languages: Arc<LanguageRegistry>,
386 fs: Arc<dyn Fs>,
387 cx: &mut MutableAppContext,
388 ) -> ModelHandle<Self> {
389 cx.add_model(|cx: &mut ModelContext<Self>| {
390 let (online_tx, online_rx) = watch::channel_with(online);
391 let (remote_id_tx, remote_id_rx) = watch::channel();
392 let _maintain_remote_id_task = cx.spawn_weak({
393 let status_rx = client.clone().status();
394 let online_rx = online_rx.clone();
395 move |this, mut cx| async move {
396 let mut stream = Stream::map(status_rx.clone(), drop)
397 .merge(Stream::map(online_rx.clone(), drop));
398 while stream.recv().await.is_some() {
399 let this = this.upgrade(&cx)?;
400 if status_rx.borrow().is_connected() && *online_rx.borrow() {
401 this.update(&mut cx, |this, cx| this.register(cx))
402 .await
403 .log_err()?;
404 } else {
405 this.update(&mut cx, |this, cx| this.unregister(cx))
406 .await
407 .log_err();
408 }
409 }
410 None
411 }
412 });
413
414 let handle = cx.weak_handle();
415 project_store.update(cx, |store, cx| store.add_project(handle, cx));
416
417 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
418 Self {
419 worktrees: Default::default(),
420 collaborators: Default::default(),
421 opened_buffers: Default::default(),
422 shared_buffers: Default::default(),
423 loading_buffers: Default::default(),
424 loading_local_worktrees: Default::default(),
425 buffer_snapshots: Default::default(),
426 client_state: ProjectClientState::Local {
427 is_shared: false,
428 remote_id_tx,
429 remote_id_rx,
430 online_tx,
431 online_rx,
432 _maintain_remote_id_task,
433 },
434 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
435 client_subscriptions: Vec::new(),
436 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
437 active_entry: None,
438 languages,
439 client,
440 user_store,
441 project_store,
442 fs,
443 next_entry_id: Default::default(),
444 next_diagnostic_group_id: Default::default(),
445 language_servers: Default::default(),
446 language_server_ids: Default::default(),
447 language_server_statuses: Default::default(),
448 last_workspace_edits_by_language_server: Default::default(),
449 language_server_settings: Default::default(),
450 next_language_server_id: 0,
451 nonce: StdRng::from_entropy().gen(),
452 initialized_persistent_state: false,
453 }
454 })
455 }
456
457 pub async fn remote(
458 remote_id: u64,
459 client: Arc<Client>,
460 user_store: ModelHandle<UserStore>,
461 project_store: ModelHandle<ProjectStore>,
462 languages: Arc<LanguageRegistry>,
463 fs: Arc<dyn Fs>,
464 mut cx: AsyncAppContext,
465 ) -> Result<ModelHandle<Self>, JoinProjectError> {
466 client.authenticate_and_connect(true, &cx).await?;
467
468 let response = client
469 .request(proto::JoinProject {
470 project_id: remote_id,
471 })
472 .await?;
473
474 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
475 proto::join_project_response::Variant::Accept(response) => response,
476 proto::join_project_response::Variant::Decline(decline) => {
477 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
478 Some(proto::join_project_response::decline::Reason::Declined) => {
479 Err(JoinProjectError::HostDeclined)?
480 }
481 Some(proto::join_project_response::decline::Reason::Closed) => {
482 Err(JoinProjectError::HostClosedProject)?
483 }
484 Some(proto::join_project_response::decline::Reason::WentOffline) => {
485 Err(JoinProjectError::HostWentOffline)?
486 }
487 None => Err(anyhow!("missing decline reason"))?,
488 }
489 }
490 };
491
492 let replica_id = response.replica_id as ReplicaId;
493
494 let mut worktrees = Vec::new();
495 for worktree in response.worktrees {
496 let (worktree, load_task) = cx
497 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
498 worktrees.push(worktree);
499 load_task.detach();
500 }
501
502 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
503 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
504 let handle = cx.weak_handle();
505 project_store.update(cx, |store, cx| store.add_project(handle, cx));
506
507 let mut this = Self {
508 worktrees: Vec::new(),
509 loading_buffers: Default::default(),
510 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
511 shared_buffers: Default::default(),
512 loading_local_worktrees: Default::default(),
513 active_entry: None,
514 collaborators: Default::default(),
515 languages,
516 user_store: user_store.clone(),
517 project_store,
518 fs,
519 next_entry_id: Default::default(),
520 next_diagnostic_group_id: Default::default(),
521 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
522 _subscriptions: Default::default(),
523 client: client.clone(),
524 client_state: ProjectClientState::Remote {
525 sharing_has_stopped: false,
526 remote_id,
527 replica_id,
528 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
529 async move {
530 let mut status = client.status();
531 let is_connected =
532 status.next().await.map_or(false, |s| s.is_connected());
533 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
534 if !is_connected || status.next().await.is_some() {
535 if let Some(this) = this.upgrade(&cx) {
536 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
537 }
538 }
539 Ok(())
540 }
541 .log_err()
542 }),
543 },
544 language_servers: Default::default(),
545 language_server_ids: Default::default(),
546 language_server_settings: Default::default(),
547 language_server_statuses: response
548 .language_servers
549 .into_iter()
550 .map(|server| {
551 (
552 server.id as usize,
553 LanguageServerStatus {
554 name: server.name,
555 pending_work: Default::default(),
556 has_pending_diagnostic_updates: false,
557 progress_tokens: Default::default(),
558 },
559 )
560 })
561 .collect(),
562 last_workspace_edits_by_language_server: Default::default(),
563 next_language_server_id: 0,
564 opened_buffers: Default::default(),
565 buffer_snapshots: Default::default(),
566 nonce: StdRng::from_entropy().gen(),
567 initialized_persistent_state: false,
568 };
569 for worktree in worktrees {
570 this.add_worktree(&worktree, cx);
571 }
572 this
573 });
574
575 let user_ids = response
576 .collaborators
577 .iter()
578 .map(|peer| peer.user_id)
579 .collect();
580 user_store
581 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
582 .await?;
583 let mut collaborators = HashMap::default();
584 for message in response.collaborators {
585 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
586 collaborators.insert(collaborator.peer_id, collaborator);
587 }
588
589 this.update(&mut cx, |this, _| {
590 this.collaborators = collaborators;
591 });
592
593 Ok(this)
594 }
595
596 #[cfg(any(test, feature = "test-support"))]
597 pub async fn test(
598 fs: Arc<dyn Fs>,
599 root_paths: impl IntoIterator<Item = &Path>,
600 cx: &mut gpui::TestAppContext,
601 ) -> ModelHandle<Project> {
602 if !cx.read(|cx| cx.has_global::<Settings>()) {
603 cx.update(|cx| cx.set_global(Settings::test(cx)));
604 }
605
606 let languages = Arc::new(LanguageRegistry::test());
607 let http_client = client::test::FakeHttpClient::with_404_response();
608 let client = client::Client::new(http_client.clone());
609 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
610 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
611 let project = cx.update(|cx| {
612 Project::local(true, client, user_store, project_store, languages, fs, cx)
613 });
614 for path in root_paths {
615 let (tree, _) = project
616 .update(cx, |project, cx| {
617 project.find_or_create_local_worktree(path, true, cx)
618 })
619 .await
620 .unwrap();
621 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
622 .await;
623 }
624 project
625 }
626
627 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
628 if self.is_remote() {
629 return Task::ready(Ok(()));
630 }
631
632 let db = self.project_store.read(cx).db.clone();
633 let keys = self.db_keys_for_online_state(cx);
634 let online_by_default = cx.global::<Settings>().projects_online_by_default;
635 let read_online = cx.background().spawn(async move {
636 let values = db.read(keys)?;
637 anyhow::Ok(
638 values
639 .into_iter()
640 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
641 )
642 });
643 cx.spawn(|this, mut cx| async move {
644 let online = read_online.await.log_err().unwrap_or(false);
645 this.update(&mut cx, |this, cx| {
646 this.initialized_persistent_state = true;
647 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
648 let mut online_tx = online_tx.borrow_mut();
649 if *online_tx != online {
650 *online_tx = online;
651 drop(online_tx);
652 this.metadata_changed(false, cx);
653 }
654 }
655 });
656 Ok(())
657 })
658 }
659
660 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
661 if self.is_remote() || !self.initialized_persistent_state {
662 return Task::ready(Ok(()));
663 }
664
665 let db = self.project_store.read(cx).db.clone();
666 let keys = self.db_keys_for_online_state(cx);
667 let is_online = self.is_online();
668 cx.background().spawn(async move {
669 let value = &[is_online as u8];
670 db.write(keys.into_iter().map(|key| (key, value)))
671 })
672 }
673
674 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
675 let settings = cx.global::<Settings>();
676
677 let mut language_servers_to_start = Vec::new();
678 for buffer in self.opened_buffers.values() {
679 if let Some(buffer) = buffer.upgrade(cx) {
680 let buffer = buffer.read(cx);
681 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
682 {
683 if settings.enable_language_server(Some(&language.name())) {
684 let worktree = file.worktree.read(cx);
685 language_servers_to_start.push((
686 worktree.id(),
687 worktree.as_local().unwrap().abs_path().clone(),
688 language.clone(),
689 ));
690 }
691 }
692 }
693 }
694
695 let mut language_servers_to_stop = Vec::new();
696 for language in self.languages.to_vec() {
697 if let Some(lsp_adapter) = language.lsp_adapter() {
698 if !settings.enable_language_server(Some(&language.name())) {
699 let lsp_name = lsp_adapter.name();
700 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
701 if lsp_name == *started_lsp_name {
702 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
703 }
704 }
705 }
706 }
707 }
708
709 // Stop all newly-disabled language servers.
710 for (worktree_id, adapter_name) in language_servers_to_stop {
711 self.stop_language_server(worktree_id, adapter_name, cx)
712 .detach();
713 }
714
715 // Start all the newly-enabled language servers.
716 for (worktree_id, worktree_path, language) in language_servers_to_start {
717 self.start_language_server(worktree_id, worktree_path, language, cx);
718 }
719
720 cx.notify();
721 }
722
723 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
724 self.opened_buffers
725 .get(&remote_id)
726 .and_then(|buffer| buffer.upgrade(cx))
727 }
728
729 pub fn languages(&self) -> &Arc<LanguageRegistry> {
730 &self.languages
731 }
732
733 pub fn client(&self) -> Arc<Client> {
734 self.client.clone()
735 }
736
737 pub fn user_store(&self) -> ModelHandle<UserStore> {
738 self.user_store.clone()
739 }
740
741 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
742 self.project_store.clone()
743 }
744
745 #[cfg(any(test, feature = "test-support"))]
746 pub fn check_invariants(&self, cx: &AppContext) {
747 if self.is_local() {
748 let mut worktree_root_paths = HashMap::default();
749 for worktree in self.worktrees(cx) {
750 let worktree = worktree.read(cx);
751 let abs_path = worktree.as_local().unwrap().abs_path().clone();
752 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
753 assert_eq!(
754 prev_worktree_id,
755 None,
756 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
757 abs_path,
758 worktree.id(),
759 prev_worktree_id
760 )
761 }
762 } else {
763 let replica_id = self.replica_id();
764 for buffer in self.opened_buffers.values() {
765 if let Some(buffer) = buffer.upgrade(cx) {
766 let buffer = buffer.read(cx);
767 assert_eq!(
768 buffer.deferred_ops_len(),
769 0,
770 "replica {}, buffer {} has deferred operations",
771 replica_id,
772 buffer.remote_id()
773 );
774 }
775 }
776 }
777 }
778
779 #[cfg(any(test, feature = "test-support"))]
780 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
781 let path = path.into();
782 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
783 self.opened_buffers.iter().any(|(_, buffer)| {
784 if let Some(buffer) = buffer.upgrade(cx) {
785 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
786 if file.worktree == worktree && file.path() == &path.path {
787 return true;
788 }
789 }
790 }
791 false
792 })
793 } else {
794 false
795 }
796 }
797
798 pub fn fs(&self) -> &Arc<dyn Fs> {
799 &self.fs
800 }
801
802 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
803 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
804 let mut online_tx = online_tx.borrow_mut();
805 if *online_tx != online {
806 *online_tx = online;
807 drop(online_tx);
808 self.metadata_changed(true, cx);
809 }
810 }
811 }
812
813 pub fn is_online(&self) -> bool {
814 match &self.client_state {
815 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
816 ProjectClientState::Remote { .. } => true,
817 }
818 }
819
820 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
821 self.unshared(cx);
822 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
823 if let Some(remote_id) = *remote_id_rx.borrow() {
824 let request = self.client.request(proto::UnregisterProject {
825 project_id: remote_id,
826 });
827 return cx.spawn(|this, mut cx| async move {
828 let response = request.await;
829
830 // Unregistering the project causes the server to send out a
831 // contact update removing this project from the host's list
832 // of online projects. Wait until this contact update has been
833 // processed before clearing out this project's remote id, so
834 // that there is no moment where this project appears in the
835 // contact metadata and *also* has no remote id.
836 this.update(&mut cx, |this, cx| {
837 this.user_store()
838 .update(cx, |store, _| store.contact_updates_done())
839 })
840 .await;
841
842 this.update(&mut cx, |this, cx| {
843 if let ProjectClientState::Local { remote_id_tx, .. } =
844 &mut this.client_state
845 {
846 *remote_id_tx.borrow_mut() = None;
847 }
848 this.client_subscriptions.clear();
849 this.metadata_changed(false, cx);
850 });
851 response.map(drop)
852 });
853 }
854 }
855 Task::ready(Ok(()))
856 }
857
858 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
859 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
860 if remote_id_rx.borrow().is_some() {
861 return Task::ready(Ok(()));
862 }
863 }
864
865 let response = self.client.request(proto::RegisterProject {});
866 cx.spawn(|this, mut cx| async move {
867 let remote_id = response.await?.project_id;
868 this.update(&mut cx, |this, cx| {
869 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
870 *remote_id_tx.borrow_mut() = Some(remote_id);
871 }
872
873 this.metadata_changed(false, cx);
874 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
875 this.client_subscriptions
876 .push(this.client.add_model_for_remote_entity(remote_id, cx));
877 Ok(())
878 })
879 })
880 }
881
882 pub fn remote_id(&self) -> Option<u64> {
883 match &self.client_state {
884 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
885 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
886 }
887 }
888
889 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
890 let mut id = None;
891 let mut watch = None;
892 match &self.client_state {
893 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
894 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
895 }
896
897 async move {
898 if let Some(id) = id {
899 return id;
900 }
901 let mut watch = watch.unwrap();
902 loop {
903 let id = *watch.borrow();
904 if let Some(id) = id {
905 return id;
906 }
907 watch.next().await;
908 }
909 }
910 }
911
912 pub fn shared_remote_id(&self) -> Option<u64> {
913 match &self.client_state {
914 ProjectClientState::Local {
915 remote_id_rx,
916 is_shared,
917 ..
918 } => {
919 if *is_shared {
920 *remote_id_rx.borrow()
921 } else {
922 None
923 }
924 }
925 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
926 }
927 }
928
929 pub fn replica_id(&self) -> ReplicaId {
930 match &self.client_state {
931 ProjectClientState::Local { .. } => 0,
932 ProjectClientState::Remote { replica_id, .. } => *replica_id,
933 }
934 }
935
936 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
937 if let ProjectClientState::Local {
938 remote_id_rx,
939 online_rx,
940 ..
941 } = &self.client_state
942 {
943 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
944 self.client
945 .send(proto::UpdateProject {
946 project_id,
947 worktrees: self
948 .worktrees
949 .iter()
950 .filter_map(|worktree| {
951 worktree.upgrade(&cx).map(|worktree| {
952 worktree.read(cx).as_local().unwrap().metadata_proto()
953 })
954 })
955 .collect(),
956 })
957 .log_err();
958 }
959
960 self.project_store.update(cx, |_, cx| cx.notify());
961 if persist {
962 self.persist_state(cx).detach_and_log_err(cx);
963 }
964 cx.notify();
965 }
966 }
967
968 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
969 &self.collaborators
970 }
971
972 pub fn worktrees<'a>(
973 &'a self,
974 cx: &'a AppContext,
975 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
976 self.worktrees
977 .iter()
978 .filter_map(move |worktree| worktree.upgrade(cx))
979 }
980
981 pub fn visible_worktrees<'a>(
982 &'a self,
983 cx: &'a AppContext,
984 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
985 self.worktrees.iter().filter_map(|worktree| {
986 worktree.upgrade(cx).and_then(|worktree| {
987 if worktree.read(cx).is_visible() {
988 Some(worktree)
989 } else {
990 None
991 }
992 })
993 })
994 }
995
996 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
997 self.visible_worktrees(cx)
998 .map(|tree| tree.read(cx).root_name())
999 }
1000
1001 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
1002 self.worktrees
1003 .iter()
1004 .filter_map(|worktree| {
1005 let worktree = worktree.upgrade(&cx)?.read(cx);
1006 if worktree.is_visible() {
1007 Some(format!(
1008 "project-path-online:{}",
1009 worktree.as_local().unwrap().abs_path().to_string_lossy()
1010 ))
1011 } else {
1012 None
1013 }
1014 })
1015 .collect::<Vec<_>>()
1016 }
1017
1018 pub fn worktree_for_id(
1019 &self,
1020 id: WorktreeId,
1021 cx: &AppContext,
1022 ) -> Option<ModelHandle<Worktree>> {
1023 self.worktrees(cx)
1024 .find(|worktree| worktree.read(cx).id() == id)
1025 }
1026
1027 pub fn worktree_for_entry(
1028 &self,
1029 entry_id: ProjectEntryId,
1030 cx: &AppContext,
1031 ) -> Option<ModelHandle<Worktree>> {
1032 self.worktrees(cx)
1033 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1034 }
1035
1036 pub fn worktree_id_for_entry(
1037 &self,
1038 entry_id: ProjectEntryId,
1039 cx: &AppContext,
1040 ) -> Option<WorktreeId> {
1041 self.worktree_for_entry(entry_id, cx)
1042 .map(|worktree| worktree.read(cx).id())
1043 }
1044
1045 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1046 paths.iter().all(|path| self.contains_path(&path, cx))
1047 }
1048
1049 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1050 for worktree in self.worktrees(cx) {
1051 let worktree = worktree.read(cx).as_local();
1052 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1053 return true;
1054 }
1055 }
1056 false
1057 }
1058
1059 pub fn create_entry(
1060 &mut self,
1061 project_path: impl Into<ProjectPath>,
1062 is_directory: bool,
1063 cx: &mut ModelContext<Self>,
1064 ) -> Option<Task<Result<Entry>>> {
1065 let project_path = project_path.into();
1066 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1067 if self.is_local() {
1068 Some(worktree.update(cx, |worktree, cx| {
1069 worktree
1070 .as_local_mut()
1071 .unwrap()
1072 .create_entry(project_path.path, is_directory, cx)
1073 }))
1074 } else {
1075 let client = self.client.clone();
1076 let project_id = self.remote_id().unwrap();
1077 Some(cx.spawn_weak(|_, mut cx| async move {
1078 let response = client
1079 .request(proto::CreateProjectEntry {
1080 worktree_id: project_path.worktree_id.to_proto(),
1081 project_id,
1082 path: project_path.path.as_os_str().as_bytes().to_vec(),
1083 is_directory,
1084 })
1085 .await?;
1086 let entry = response
1087 .entry
1088 .ok_or_else(|| anyhow!("missing entry in response"))?;
1089 worktree
1090 .update(&mut cx, |worktree, cx| {
1091 worktree.as_remote().unwrap().insert_entry(
1092 entry,
1093 response.worktree_scan_id as usize,
1094 cx,
1095 )
1096 })
1097 .await
1098 }))
1099 }
1100 }
1101
1102 pub fn copy_entry(
1103 &mut self,
1104 entry_id: ProjectEntryId,
1105 new_path: impl Into<Arc<Path>>,
1106 cx: &mut ModelContext<Self>,
1107 ) -> Option<Task<Result<Entry>>> {
1108 let worktree = self.worktree_for_entry(entry_id, cx)?;
1109 let new_path = new_path.into();
1110 if self.is_local() {
1111 worktree.update(cx, |worktree, cx| {
1112 worktree
1113 .as_local_mut()
1114 .unwrap()
1115 .copy_entry(entry_id, new_path, cx)
1116 })
1117 } else {
1118 let client = self.client.clone();
1119 let project_id = self.remote_id().unwrap();
1120
1121 Some(cx.spawn_weak(|_, mut cx| async move {
1122 let response = client
1123 .request(proto::CopyProjectEntry {
1124 project_id,
1125 entry_id: entry_id.to_proto(),
1126 new_path: new_path.as_os_str().as_bytes().to_vec(),
1127 })
1128 .await?;
1129 let entry = response
1130 .entry
1131 .ok_or_else(|| anyhow!("missing entry in response"))?;
1132 worktree
1133 .update(&mut cx, |worktree, cx| {
1134 worktree.as_remote().unwrap().insert_entry(
1135 entry,
1136 response.worktree_scan_id as usize,
1137 cx,
1138 )
1139 })
1140 .await
1141 }))
1142 }
1143 }
1144
1145 pub fn rename_entry(
1146 &mut self,
1147 entry_id: ProjectEntryId,
1148 new_path: impl Into<Arc<Path>>,
1149 cx: &mut ModelContext<Self>,
1150 ) -> Option<Task<Result<Entry>>> {
1151 let worktree = self.worktree_for_entry(entry_id, cx)?;
1152 let new_path = new_path.into();
1153 if self.is_local() {
1154 worktree.update(cx, |worktree, cx| {
1155 worktree
1156 .as_local_mut()
1157 .unwrap()
1158 .rename_entry(entry_id, new_path, cx)
1159 })
1160 } else {
1161 let client = self.client.clone();
1162 let project_id = self.remote_id().unwrap();
1163
1164 Some(cx.spawn_weak(|_, mut cx| async move {
1165 let response = client
1166 .request(proto::RenameProjectEntry {
1167 project_id,
1168 entry_id: entry_id.to_proto(),
1169 new_path: new_path.as_os_str().as_bytes().to_vec(),
1170 })
1171 .await?;
1172 let entry = response
1173 .entry
1174 .ok_or_else(|| anyhow!("missing entry in response"))?;
1175 worktree
1176 .update(&mut cx, |worktree, cx| {
1177 worktree.as_remote().unwrap().insert_entry(
1178 entry,
1179 response.worktree_scan_id as usize,
1180 cx,
1181 )
1182 })
1183 .await
1184 }))
1185 }
1186 }
1187
1188 pub fn delete_entry(
1189 &mut self,
1190 entry_id: ProjectEntryId,
1191 cx: &mut ModelContext<Self>,
1192 ) -> Option<Task<Result<()>>> {
1193 let worktree = self.worktree_for_entry(entry_id, cx)?;
1194 if self.is_local() {
1195 worktree.update(cx, |worktree, cx| {
1196 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1197 })
1198 } else {
1199 let client = self.client.clone();
1200 let project_id = self.remote_id().unwrap();
1201 Some(cx.spawn_weak(|_, mut cx| async move {
1202 let response = client
1203 .request(proto::DeleteProjectEntry {
1204 project_id,
1205 entry_id: entry_id.to_proto(),
1206 })
1207 .await?;
1208 worktree
1209 .update(&mut cx, move |worktree, cx| {
1210 worktree.as_remote().unwrap().delete_entry(
1211 entry_id,
1212 response.worktree_scan_id as usize,
1213 cx,
1214 )
1215 })
1216 .await
1217 }))
1218 }
1219 }
1220
1221 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1222 let project_id;
1223 if let ProjectClientState::Local {
1224 remote_id_rx,
1225 is_shared,
1226 ..
1227 } = &mut self.client_state
1228 {
1229 if *is_shared {
1230 return Task::ready(Ok(()));
1231 }
1232 *is_shared = true;
1233 if let Some(id) = *remote_id_rx.borrow() {
1234 project_id = id;
1235 } else {
1236 return Task::ready(Err(anyhow!("project hasn't been registered")));
1237 }
1238 } else {
1239 return Task::ready(Err(anyhow!("can't share a remote project")));
1240 };
1241
1242 for open_buffer in self.opened_buffers.values_mut() {
1243 match open_buffer {
1244 OpenBuffer::Strong(_) => {}
1245 OpenBuffer::Weak(buffer) => {
1246 if let Some(buffer) = buffer.upgrade(cx) {
1247 *open_buffer = OpenBuffer::Strong(buffer);
1248 }
1249 }
1250 OpenBuffer::Loading(_) => unreachable!(),
1251 }
1252 }
1253
1254 for worktree_handle in self.worktrees.iter_mut() {
1255 match worktree_handle {
1256 WorktreeHandle::Strong(_) => {}
1257 WorktreeHandle::Weak(worktree) => {
1258 if let Some(worktree) = worktree.upgrade(cx) {
1259 *worktree_handle = WorktreeHandle::Strong(worktree);
1260 }
1261 }
1262 }
1263 }
1264
1265 let mut tasks = Vec::new();
1266 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1267 worktree.update(cx, |worktree, cx| {
1268 let worktree = worktree.as_local_mut().unwrap();
1269 tasks.push(worktree.share(project_id, cx));
1270 });
1271 }
1272
1273 for (server_id, status) in &self.language_server_statuses {
1274 self.client
1275 .send(proto::StartLanguageServer {
1276 project_id,
1277 server: Some(proto::LanguageServer {
1278 id: *server_id as u64,
1279 name: status.name.clone(),
1280 }),
1281 })
1282 .log_err();
1283 }
1284
1285 cx.spawn(|this, mut cx| async move {
1286 for task in tasks {
1287 task.await?;
1288 }
1289 this.update(&mut cx, |_, cx| cx.notify());
1290 Ok(())
1291 })
1292 }
1293
1294 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1295 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1296 if !*is_shared {
1297 return;
1298 }
1299
1300 *is_shared = false;
1301 self.collaborators.clear();
1302 self.shared_buffers.clear();
1303 for worktree_handle in self.worktrees.iter_mut() {
1304 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1305 let is_visible = worktree.update(cx, |worktree, _| {
1306 worktree.as_local_mut().unwrap().unshare();
1307 worktree.is_visible()
1308 });
1309 if !is_visible {
1310 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1311 }
1312 }
1313 }
1314
1315 for open_buffer in self.opened_buffers.values_mut() {
1316 match open_buffer {
1317 OpenBuffer::Strong(buffer) => {
1318 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1319 }
1320 _ => {}
1321 }
1322 }
1323
1324 cx.notify();
1325 } else {
1326 log::error!("attempted to unshare a remote project");
1327 }
1328 }
1329
1330 pub fn respond_to_join_request(
1331 &mut self,
1332 requester_id: u64,
1333 allow: bool,
1334 cx: &mut ModelContext<Self>,
1335 ) {
1336 if let Some(project_id) = self.remote_id() {
1337 let share = self.share(cx);
1338 let client = self.client.clone();
1339 cx.foreground()
1340 .spawn(async move {
1341 share.await?;
1342 client.send(proto::RespondToJoinProjectRequest {
1343 requester_id,
1344 project_id,
1345 allow,
1346 })
1347 })
1348 .detach_and_log_err(cx);
1349 }
1350 }
1351
1352 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1353 if let ProjectClientState::Remote {
1354 sharing_has_stopped,
1355 ..
1356 } = &mut self.client_state
1357 {
1358 *sharing_has_stopped = true;
1359 self.collaborators.clear();
1360 for worktree in &self.worktrees {
1361 if let Some(worktree) = worktree.upgrade(cx) {
1362 worktree.update(cx, |worktree, _| {
1363 if let Some(worktree) = worktree.as_remote_mut() {
1364 worktree.disconnected_from_host();
1365 }
1366 });
1367 }
1368 }
1369 cx.notify();
1370 }
1371 }
1372
1373 pub fn is_read_only(&self) -> bool {
1374 match &self.client_state {
1375 ProjectClientState::Local { .. } => false,
1376 ProjectClientState::Remote {
1377 sharing_has_stopped,
1378 ..
1379 } => *sharing_has_stopped,
1380 }
1381 }
1382
1383 pub fn is_local(&self) -> bool {
1384 match &self.client_state {
1385 ProjectClientState::Local { .. } => true,
1386 ProjectClientState::Remote { .. } => false,
1387 }
1388 }
1389
1390 pub fn is_remote(&self) -> bool {
1391 !self.is_local()
1392 }
1393
1394 pub fn create_buffer(
1395 &mut self,
1396 text: &str,
1397 language: Option<Arc<Language>>,
1398 cx: &mut ModelContext<Self>,
1399 ) -> Result<ModelHandle<Buffer>> {
1400 if self.is_remote() {
1401 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1402 }
1403
1404 let buffer = cx.add_model(|cx| {
1405 Buffer::new(self.replica_id(), text, cx)
1406 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1407 });
1408 self.register_buffer(&buffer, cx)?;
1409 Ok(buffer)
1410 }
1411
1412 pub fn open_path(
1413 &mut self,
1414 path: impl Into<ProjectPath>,
1415 cx: &mut ModelContext<Self>,
1416 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1417 let task = self.open_buffer(path, cx);
1418 cx.spawn_weak(|_, cx| async move {
1419 let buffer = task.await?;
1420 let project_entry_id = buffer
1421 .read_with(&cx, |buffer, cx| {
1422 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1423 })
1424 .ok_or_else(|| anyhow!("no project entry"))?;
1425 Ok((project_entry_id, buffer.into()))
1426 })
1427 }
1428
1429 pub fn open_local_buffer(
1430 &mut self,
1431 abs_path: impl AsRef<Path>,
1432 cx: &mut ModelContext<Self>,
1433 ) -> Task<Result<ModelHandle<Buffer>>> {
1434 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1435 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1436 } else {
1437 Task::ready(Err(anyhow!("no such path")))
1438 }
1439 }
1440
1441 pub fn open_buffer(
1442 &mut self,
1443 path: impl Into<ProjectPath>,
1444 cx: &mut ModelContext<Self>,
1445 ) -> Task<Result<ModelHandle<Buffer>>> {
1446 let project_path = path.into();
1447 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1448 worktree
1449 } else {
1450 return Task::ready(Err(anyhow!("no such worktree")));
1451 };
1452
1453 // If there is already a buffer for the given path, then return it.
1454 let existing_buffer = self.get_open_buffer(&project_path, cx);
1455 if let Some(existing_buffer) = existing_buffer {
1456 return Task::ready(Ok(existing_buffer));
1457 }
1458
1459 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1460 // If the given path is already being loaded, then wait for that existing
1461 // task to complete and return the same buffer.
1462 hash_map::Entry::Occupied(e) => e.get().clone(),
1463
1464 // Otherwise, record the fact that this path is now being loaded.
1465 hash_map::Entry::Vacant(entry) => {
1466 let (mut tx, rx) = postage::watch::channel();
1467 entry.insert(rx.clone());
1468
1469 let load_buffer = if worktree.read(cx).is_local() {
1470 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1471 } else {
1472 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1473 };
1474
1475 cx.spawn(move |this, mut cx| async move {
1476 let load_result = load_buffer.await;
1477 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1478 // Record the fact that the buffer is no longer loading.
1479 this.loading_buffers.remove(&project_path);
1480 let buffer = load_result.map_err(Arc::new)?;
1481 Ok(buffer)
1482 }));
1483 })
1484 .detach();
1485 rx
1486 }
1487 };
1488
1489 cx.foreground().spawn(async move {
1490 loop {
1491 if let Some(result) = loading_watch.borrow().as_ref() {
1492 match result {
1493 Ok(buffer) => return Ok(buffer.clone()),
1494 Err(error) => return Err(anyhow!("{}", error)),
1495 }
1496 }
1497 loading_watch.next().await;
1498 }
1499 })
1500 }
1501
1502 fn open_local_buffer_internal(
1503 &mut self,
1504 path: &Arc<Path>,
1505 worktree: &ModelHandle<Worktree>,
1506 cx: &mut ModelContext<Self>,
1507 ) -> Task<Result<ModelHandle<Buffer>>> {
1508 let load_buffer = worktree.update(cx, |worktree, cx| {
1509 let worktree = worktree.as_local_mut().unwrap();
1510 worktree.load_buffer(path, cx)
1511 });
1512 cx.spawn(|this, mut cx| async move {
1513 let buffer = load_buffer.await?;
1514 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1515 Ok(buffer)
1516 })
1517 }
1518
1519 fn open_remote_buffer_internal(
1520 &mut self,
1521 path: &Arc<Path>,
1522 worktree: &ModelHandle<Worktree>,
1523 cx: &mut ModelContext<Self>,
1524 ) -> Task<Result<ModelHandle<Buffer>>> {
1525 let rpc = self.client.clone();
1526 let project_id = self.remote_id().unwrap();
1527 let remote_worktree_id = worktree.read(cx).id();
1528 let path = path.clone();
1529 let path_string = path.to_string_lossy().to_string();
1530 cx.spawn(|this, mut cx| async move {
1531 let response = rpc
1532 .request(proto::OpenBufferByPath {
1533 project_id,
1534 worktree_id: remote_worktree_id.to_proto(),
1535 path: path_string,
1536 })
1537 .await?;
1538 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1539 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1540 .await
1541 })
1542 }
1543
1544 fn open_local_buffer_via_lsp(
1545 &mut self,
1546 abs_path: lsp::Url,
1547 language_server_id: usize,
1548 language_server_name: LanguageServerName,
1549 cx: &mut ModelContext<Self>,
1550 ) -> Task<Result<ModelHandle<Buffer>>> {
1551 cx.spawn(|this, mut cx| async move {
1552 let abs_path = abs_path
1553 .to_file_path()
1554 .map_err(|_| anyhow!("can't convert URI to path"))?;
1555 let (worktree, relative_path) = if let Some(result) =
1556 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1557 {
1558 result
1559 } else {
1560 let worktree = this
1561 .update(&mut cx, |this, cx| {
1562 this.create_local_worktree(&abs_path, false, cx)
1563 })
1564 .await?;
1565 this.update(&mut cx, |this, cx| {
1566 this.language_server_ids.insert(
1567 (worktree.read(cx).id(), language_server_name),
1568 language_server_id,
1569 );
1570 });
1571 (worktree, PathBuf::new())
1572 };
1573
1574 let project_path = ProjectPath {
1575 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1576 path: relative_path.into(),
1577 };
1578 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1579 .await
1580 })
1581 }
1582
1583 pub fn open_buffer_by_id(
1584 &mut self,
1585 id: u64,
1586 cx: &mut ModelContext<Self>,
1587 ) -> Task<Result<ModelHandle<Buffer>>> {
1588 if let Some(buffer) = self.buffer_for_id(id, cx) {
1589 Task::ready(Ok(buffer))
1590 } else if self.is_local() {
1591 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1592 } else if let Some(project_id) = self.remote_id() {
1593 let request = self
1594 .client
1595 .request(proto::OpenBufferById { project_id, id });
1596 cx.spawn(|this, mut cx| async move {
1597 let buffer = request
1598 .await?
1599 .buffer
1600 .ok_or_else(|| anyhow!("invalid buffer"))?;
1601 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1602 .await
1603 })
1604 } else {
1605 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1606 }
1607 }
1608
1609 pub fn save_buffer_as(
1610 &mut self,
1611 buffer: ModelHandle<Buffer>,
1612 abs_path: PathBuf,
1613 cx: &mut ModelContext<Project>,
1614 ) -> Task<Result<()>> {
1615 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1616 let old_path =
1617 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1618 cx.spawn(|this, mut cx| async move {
1619 if let Some(old_path) = old_path {
1620 this.update(&mut cx, |this, cx| {
1621 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1622 });
1623 }
1624 let (worktree, path) = worktree_task.await?;
1625 worktree
1626 .update(&mut cx, |worktree, cx| {
1627 worktree
1628 .as_local_mut()
1629 .unwrap()
1630 .save_buffer_as(buffer.clone(), path, cx)
1631 })
1632 .await?;
1633 this.update(&mut cx, |this, cx| {
1634 this.assign_language_to_buffer(&buffer, cx);
1635 this.register_buffer_with_language_server(&buffer, cx);
1636 });
1637 Ok(())
1638 })
1639 }
1640
1641 pub fn get_open_buffer(
1642 &mut self,
1643 path: &ProjectPath,
1644 cx: &mut ModelContext<Self>,
1645 ) -> Option<ModelHandle<Buffer>> {
1646 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1647 self.opened_buffers.values().find_map(|buffer| {
1648 let buffer = buffer.upgrade(cx)?;
1649 let file = File::from_dyn(buffer.read(cx).file())?;
1650 if file.worktree == worktree && file.path() == &path.path {
1651 Some(buffer)
1652 } else {
1653 None
1654 }
1655 })
1656 }
1657
1658 fn register_buffer(
1659 &mut self,
1660 buffer: &ModelHandle<Buffer>,
1661 cx: &mut ModelContext<Self>,
1662 ) -> Result<()> {
1663 let remote_id = buffer.read(cx).remote_id();
1664 let open_buffer = if self.is_remote() || self.is_shared() {
1665 OpenBuffer::Strong(buffer.clone())
1666 } else {
1667 OpenBuffer::Weak(buffer.downgrade())
1668 };
1669
1670 match self.opened_buffers.insert(remote_id, open_buffer) {
1671 None => {}
1672 Some(OpenBuffer::Loading(operations)) => {
1673 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1674 }
1675 Some(OpenBuffer::Weak(existing_handle)) => {
1676 if existing_handle.upgrade(cx).is_some() {
1677 Err(anyhow!(
1678 "already registered buffer with remote id {}",
1679 remote_id
1680 ))?
1681 }
1682 }
1683 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1684 "already registered buffer with remote id {}",
1685 remote_id
1686 ))?,
1687 }
1688 cx.subscribe(buffer, |this, buffer, event, cx| {
1689 this.on_buffer_event(buffer, event, cx);
1690 })
1691 .detach();
1692
1693 self.assign_language_to_buffer(buffer, cx);
1694 self.register_buffer_with_language_server(buffer, cx);
1695 cx.observe_release(buffer, |this, buffer, cx| {
1696 if let Some(file) = File::from_dyn(buffer.file()) {
1697 if file.is_local() {
1698 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1699 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1700 server
1701 .notify::<lsp::notification::DidCloseTextDocument>(
1702 lsp::DidCloseTextDocumentParams {
1703 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1704 },
1705 )
1706 .log_err();
1707 }
1708 }
1709 }
1710 })
1711 .detach();
1712
1713 Ok(())
1714 }
1715
1716 fn register_buffer_with_language_server(
1717 &mut self,
1718 buffer_handle: &ModelHandle<Buffer>,
1719 cx: &mut ModelContext<Self>,
1720 ) {
1721 let buffer = buffer_handle.read(cx);
1722 let buffer_id = buffer.remote_id();
1723 if let Some(file) = File::from_dyn(buffer.file()) {
1724 if file.is_local() {
1725 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1726 let initial_snapshot = buffer.text_snapshot();
1727
1728 let mut language_server = None;
1729 let mut language_id = None;
1730 if let Some(language) = buffer.language() {
1731 let worktree_id = file.worktree_id(cx);
1732 if let Some(adapter) = language.lsp_adapter() {
1733 language_id = adapter.id_for_language(language.name().as_ref());
1734 language_server = self
1735 .language_server_ids
1736 .get(&(worktree_id, adapter.name()))
1737 .and_then(|id| self.language_servers.get(&id))
1738 .and_then(|server_state| {
1739 if let LanguageServerState::Running { server, .. } = server_state {
1740 Some(server.clone())
1741 } else {
1742 None
1743 }
1744 });
1745 }
1746 }
1747
1748 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1749 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1750 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1751 .log_err();
1752 }
1753 }
1754
1755 if let Some(server) = language_server {
1756 server
1757 .notify::<lsp::notification::DidOpenTextDocument>(
1758 lsp::DidOpenTextDocumentParams {
1759 text_document: lsp::TextDocumentItem::new(
1760 uri,
1761 language_id.unwrap_or_default(),
1762 0,
1763 initial_snapshot.text(),
1764 ),
1765 }
1766 .clone(),
1767 )
1768 .log_err();
1769 buffer_handle.update(cx, |buffer, cx| {
1770 buffer.set_completion_triggers(
1771 server
1772 .capabilities()
1773 .completion_provider
1774 .as_ref()
1775 .and_then(|provider| provider.trigger_characters.clone())
1776 .unwrap_or(Vec::new()),
1777 cx,
1778 )
1779 });
1780 self.buffer_snapshots
1781 .insert(buffer_id, vec![(0, initial_snapshot)]);
1782 }
1783 }
1784 }
1785 }
1786
1787 fn unregister_buffer_from_language_server(
1788 &mut self,
1789 buffer: &ModelHandle<Buffer>,
1790 old_path: PathBuf,
1791 cx: &mut ModelContext<Self>,
1792 ) {
1793 buffer.update(cx, |buffer, cx| {
1794 buffer.update_diagnostics(Default::default(), cx);
1795 self.buffer_snapshots.remove(&buffer.remote_id());
1796 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1797 language_server
1798 .notify::<lsp::notification::DidCloseTextDocument>(
1799 lsp::DidCloseTextDocumentParams {
1800 text_document: lsp::TextDocumentIdentifier::new(
1801 lsp::Url::from_file_path(old_path).unwrap(),
1802 ),
1803 },
1804 )
1805 .log_err();
1806 }
1807 });
1808 }
1809
1810 fn on_buffer_event(
1811 &mut self,
1812 buffer: ModelHandle<Buffer>,
1813 event: &BufferEvent,
1814 cx: &mut ModelContext<Self>,
1815 ) -> Option<()> {
1816 match event {
1817 BufferEvent::Operation(operation) => {
1818 if let Some(project_id) = self.shared_remote_id() {
1819 let request = self.client.request(proto::UpdateBuffer {
1820 project_id,
1821 buffer_id: buffer.read(cx).remote_id(),
1822 operations: vec![language::proto::serialize_operation(&operation)],
1823 });
1824 cx.background().spawn(request).detach_and_log_err(cx);
1825 } else if let Some(project_id) = self.remote_id() {
1826 let _ = self
1827 .client
1828 .send(proto::RegisterProjectActivity { project_id });
1829 }
1830 }
1831 BufferEvent::Edited { .. } => {
1832 let language_server = self
1833 .language_server_for_buffer(buffer.read(cx), cx)
1834 .map(|(_, server)| server.clone())?;
1835 let buffer = buffer.read(cx);
1836 let file = File::from_dyn(buffer.file())?;
1837 let abs_path = file.as_local()?.abs_path(cx);
1838 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1839 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1840 let (version, prev_snapshot) = buffer_snapshots.last()?;
1841 let next_snapshot = buffer.text_snapshot();
1842 let next_version = version + 1;
1843
1844 let content_changes = buffer
1845 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1846 .map(|edit| {
1847 let edit_start = edit.new.start.0;
1848 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1849 let new_text = next_snapshot
1850 .text_for_range(edit.new.start.1..edit.new.end.1)
1851 .collect();
1852 lsp::TextDocumentContentChangeEvent {
1853 range: Some(lsp::Range::new(
1854 point_to_lsp(edit_start),
1855 point_to_lsp(edit_end),
1856 )),
1857 range_length: None,
1858 text: new_text,
1859 }
1860 })
1861 .collect();
1862
1863 buffer_snapshots.push((next_version, next_snapshot));
1864
1865 language_server
1866 .notify::<lsp::notification::DidChangeTextDocument>(
1867 lsp::DidChangeTextDocumentParams {
1868 text_document: lsp::VersionedTextDocumentIdentifier::new(
1869 uri,
1870 next_version,
1871 ),
1872 content_changes,
1873 },
1874 )
1875 .log_err();
1876 }
1877 BufferEvent::Saved => {
1878 let file = File::from_dyn(buffer.read(cx).file())?;
1879 let worktree_id = file.worktree_id(cx);
1880 let abs_path = file.as_local()?.abs_path(cx);
1881 let text_document = lsp::TextDocumentIdentifier {
1882 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1883 };
1884
1885 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1886 server
1887 .notify::<lsp::notification::DidSaveTextDocument>(
1888 lsp::DidSaveTextDocumentParams {
1889 text_document: text_document.clone(),
1890 text: None,
1891 },
1892 )
1893 .log_err();
1894 }
1895
1896 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1897 // that don't support a disk-based progress token.
1898 let (lsp_adapter, language_server) =
1899 self.language_server_for_buffer(buffer.read(cx), cx)?;
1900 if lsp_adapter
1901 .disk_based_diagnostics_progress_token()
1902 .is_none()
1903 {
1904 let server_id = language_server.server_id();
1905 self.disk_based_diagnostics_finished(server_id, cx);
1906 self.broadcast_language_server_update(
1907 server_id,
1908 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1909 proto::LspDiskBasedDiagnosticsUpdated {},
1910 ),
1911 );
1912 }
1913 }
1914 _ => {}
1915 }
1916
1917 None
1918 }
1919
1920 fn language_servers_for_worktree(
1921 &self,
1922 worktree_id: WorktreeId,
1923 ) -> impl Iterator<Item = (&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
1924 self.language_server_ids
1925 .iter()
1926 .filter_map(move |((language_server_worktree_id, _), id)| {
1927 if *language_server_worktree_id == worktree_id {
1928 if let Some(LanguageServerState::Running { adapter, server }) =
1929 self.language_servers.get(&id)
1930 {
1931 return Some((adapter, server));
1932 }
1933 }
1934 None
1935 })
1936 }
1937
1938 fn assign_language_to_buffer(
1939 &mut self,
1940 buffer: &ModelHandle<Buffer>,
1941 cx: &mut ModelContext<Self>,
1942 ) -> Option<()> {
1943 // If the buffer has a language, set it and start the language server if we haven't already.
1944 let full_path = buffer.read(cx).file()?.full_path(cx);
1945 let language = self.languages.select_language(&full_path)?;
1946 buffer.update(cx, |buffer, cx| {
1947 buffer.set_language(Some(language.clone()), cx);
1948 });
1949
1950 let file = File::from_dyn(buffer.read(cx).file())?;
1951 let worktree = file.worktree.read(cx).as_local()?;
1952 let worktree_id = worktree.id();
1953 let worktree_abs_path = worktree.abs_path().clone();
1954 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1955
1956 None
1957 }
1958
1959 fn start_language_server(
1960 &mut self,
1961 worktree_id: WorktreeId,
1962 worktree_path: Arc<Path>,
1963 language: Arc<Language>,
1964 cx: &mut ModelContext<Self>,
1965 ) {
1966 if !cx
1967 .global::<Settings>()
1968 .enable_language_server(Some(&language.name()))
1969 {
1970 return;
1971 }
1972
1973 let adapter = if let Some(adapter) = language.lsp_adapter() {
1974 adapter
1975 } else {
1976 return;
1977 };
1978 let key = (worktree_id, adapter.name());
1979
1980 self.language_server_ids
1981 .entry(key.clone())
1982 .or_insert_with(|| {
1983 let server_id = post_inc(&mut self.next_language_server_id);
1984 let language_server = self.languages.start_language_server(
1985 server_id,
1986 language.clone(),
1987 worktree_path,
1988 self.client.http_client(),
1989 cx,
1990 );
1991 self.language_servers.insert(
1992 server_id,
1993 LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
1994 let language_server = language_server?.await.log_err()?;
1995 let language_server = language_server
1996 .initialize(adapter.initialization_options())
1997 .await
1998 .log_err()?;
1999 let this = this.upgrade(&cx)?;
2000 let disk_based_diagnostics_progress_token =
2001 adapter.disk_based_diagnostics_progress_token();
2002
2003 language_server
2004 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2005 let this = this.downgrade();
2006 let adapter = adapter.clone();
2007 move |params, mut cx| {
2008 if let Some(this) = this.upgrade(&cx) {
2009 this.update(&mut cx, |this, cx| {
2010 this.on_lsp_diagnostics_published(
2011 server_id, params, &adapter, cx,
2012 );
2013 });
2014 }
2015 }
2016 })
2017 .detach();
2018
2019 language_server
2020 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2021 let settings = this.read_with(&cx, |this, _| {
2022 this.language_server_settings.clone()
2023 });
2024 move |params, _| {
2025 let settings = settings.lock().clone();
2026 async move {
2027 Ok(params
2028 .items
2029 .into_iter()
2030 .map(|item| {
2031 if let Some(section) = &item.section {
2032 settings
2033 .get(section)
2034 .cloned()
2035 .unwrap_or(serde_json::Value::Null)
2036 } else {
2037 settings.clone()
2038 }
2039 })
2040 .collect())
2041 }
2042 }
2043 })
2044 .detach();
2045
2046 // Even though we don't have handling for these requests, respond to them to
2047 // avoid stalling any language server like `gopls` which waits for a response
2048 // to these requests when initializing.
2049 language_server
2050 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2051 let this = this.downgrade();
2052 move |params, mut cx| async move {
2053 if let Some(this) = this.upgrade(&cx) {
2054 this.update(&mut cx, |this, _| {
2055 if let Some(status) =
2056 this.language_server_statuses.get_mut(&server_id)
2057 {
2058 if let lsp::NumberOrString::String(token) =
2059 params.token
2060 {
2061 status.progress_tokens.insert(token);
2062 }
2063 }
2064 });
2065 }
2066 Ok(())
2067 }
2068 })
2069 .detach();
2070 language_server
2071 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2072 Ok(())
2073 })
2074 .detach();
2075
2076 language_server
2077 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2078 let this = this.downgrade();
2079 let adapter = adapter.clone();
2080 let language_server = language_server.clone();
2081 move |params, cx| {
2082 Self::on_lsp_workspace_edit(
2083 this,
2084 params,
2085 server_id,
2086 adapter.clone(),
2087 language_server.clone(),
2088 cx,
2089 )
2090 }
2091 })
2092 .detach();
2093
2094 language_server
2095 .on_notification::<lsp::notification::Progress, _>({
2096 let this = this.downgrade();
2097 move |params, mut cx| {
2098 if let Some(this) = this.upgrade(&cx) {
2099 this.update(&mut cx, |this, cx| {
2100 this.on_lsp_progress(
2101 params,
2102 server_id,
2103 disk_based_diagnostics_progress_token,
2104 cx,
2105 );
2106 });
2107 }
2108 }
2109 })
2110 .detach();
2111
2112 this.update(&mut cx, |this, cx| {
2113 // If the language server for this key doesn't match the server id, don't store the
2114 // server.
2115 if this
2116 .language_server_ids
2117 .get(&key)
2118 .map(|id| id != &server_id)
2119 .unwrap_or(false)
2120 {
2121 return None;
2122 }
2123
2124 this.language_servers.insert(
2125 server_id,
2126 LanguageServerState::Running {
2127 adapter: adapter.clone(),
2128 server: language_server.clone(),
2129 },
2130 );
2131 this.language_server_statuses.insert(
2132 server_id,
2133 LanguageServerStatus {
2134 name: language_server.name().to_string(),
2135 pending_work: Default::default(),
2136 has_pending_diagnostic_updates: false,
2137 progress_tokens: Default::default(),
2138 },
2139 );
2140 language_server
2141 .notify::<lsp::notification::DidChangeConfiguration>(
2142 lsp::DidChangeConfigurationParams {
2143 settings: this.language_server_settings.lock().clone(),
2144 },
2145 )
2146 .ok();
2147
2148 if let Some(project_id) = this.shared_remote_id() {
2149 this.client
2150 .send(proto::StartLanguageServer {
2151 project_id,
2152 server: Some(proto::LanguageServer {
2153 id: server_id as u64,
2154 name: language_server.name().to_string(),
2155 }),
2156 })
2157 .log_err();
2158 }
2159
2160 // Tell the language server about every open buffer in the worktree that matches the language.
2161 for buffer in this.opened_buffers.values() {
2162 if let Some(buffer_handle) = buffer.upgrade(cx) {
2163 let buffer = buffer_handle.read(cx);
2164 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2165 file
2166 } else {
2167 continue;
2168 };
2169 let language = if let Some(language) = buffer.language() {
2170 language
2171 } else {
2172 continue;
2173 };
2174 if file.worktree.read(cx).id() != key.0
2175 || language.lsp_adapter().map(|a| a.name())
2176 != Some(key.1.clone())
2177 {
2178 continue;
2179 }
2180
2181 let file = file.as_local()?;
2182 let versions = this
2183 .buffer_snapshots
2184 .entry(buffer.remote_id())
2185 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2186 let (version, initial_snapshot) = versions.last().unwrap();
2187 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2188 let language_id =
2189 adapter.id_for_language(language.name().as_ref());
2190 language_server
2191 .notify::<lsp::notification::DidOpenTextDocument>(
2192 lsp::DidOpenTextDocumentParams {
2193 text_document: lsp::TextDocumentItem::new(
2194 uri,
2195 language_id.unwrap_or_default(),
2196 *version,
2197 initial_snapshot.text(),
2198 ),
2199 },
2200 )
2201 .log_err()?;
2202 buffer_handle.update(cx, |buffer, cx| {
2203 buffer.set_completion_triggers(
2204 language_server
2205 .capabilities()
2206 .completion_provider
2207 .as_ref()
2208 .and_then(|provider| {
2209 provider.trigger_characters.clone()
2210 })
2211 .unwrap_or(Vec::new()),
2212 cx,
2213 )
2214 });
2215 }
2216 }
2217
2218 cx.notify();
2219 Some(language_server)
2220 })
2221 })),
2222 );
2223
2224 server_id
2225 });
2226 }
2227
2228 fn stop_language_server(
2229 &mut self,
2230 worktree_id: WorktreeId,
2231 adapter_name: LanguageServerName,
2232 cx: &mut ModelContext<Self>,
2233 ) -> Task<()> {
2234 let key = (worktree_id, adapter_name);
2235 if let Some(server_id) = self.language_server_ids.remove(&key) {
2236 let server_state = self.language_servers.remove(&server_id);
2237 cx.spawn_weak(|this, mut cx| async move {
2238 let server = match server_state {
2239 Some(LanguageServerState::Starting(started_language_server)) => {
2240 started_language_server.await
2241 }
2242 Some(LanguageServerState::Running { server, .. }) => Some(server),
2243 None => None,
2244 };
2245
2246 if let Some(server) = server {
2247 if let Some(shutdown) = server.shutdown() {
2248 shutdown.await;
2249 }
2250 }
2251
2252 if let Some(this) = this.upgrade(&cx) {
2253 this.update(&mut cx, |this, cx| {
2254 this.language_server_statuses.remove(&server_id);
2255 cx.notify();
2256 });
2257 }
2258 })
2259 } else {
2260 Task::ready(())
2261 }
2262 }
2263
2264 pub fn restart_language_servers_for_buffers(
2265 &mut self,
2266 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2267 cx: &mut ModelContext<Self>,
2268 ) -> Option<()> {
2269 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2270 .into_iter()
2271 .filter_map(|buffer| {
2272 let file = File::from_dyn(buffer.read(cx).file())?;
2273 let worktree = file.worktree.read(cx).as_local()?;
2274 let worktree_id = worktree.id();
2275 let worktree_abs_path = worktree.abs_path().clone();
2276 let full_path = file.full_path(cx);
2277 Some((worktree_id, worktree_abs_path, full_path))
2278 })
2279 .collect();
2280 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2281 let language = self.languages.select_language(&full_path)?;
2282 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2283 }
2284
2285 None
2286 }
2287
2288 fn restart_language_server(
2289 &mut self,
2290 worktree_id: WorktreeId,
2291 worktree_path: Arc<Path>,
2292 language: Arc<Language>,
2293 cx: &mut ModelContext<Self>,
2294 ) {
2295 let adapter = if let Some(adapter) = language.lsp_adapter() {
2296 adapter
2297 } else {
2298 return;
2299 };
2300
2301 let stop = self.stop_language_server(worktree_id, adapter.name(), cx);
2302 cx.spawn_weak(|this, mut cx| async move {
2303 stop.await;
2304 if let Some(this) = this.upgrade(&cx) {
2305 this.update(&mut cx, |this, cx| {
2306 this.start_language_server(worktree_id, worktree_path, language, cx);
2307 });
2308 }
2309 })
2310 .detach();
2311 }
2312
2313 fn on_lsp_diagnostics_published(
2314 &mut self,
2315 server_id: usize,
2316 mut params: lsp::PublishDiagnosticsParams,
2317 adapter: &Arc<dyn LspAdapter>,
2318 cx: &mut ModelContext<Self>,
2319 ) {
2320 adapter.process_diagnostics(&mut params);
2321 self.update_diagnostics(
2322 server_id,
2323 params,
2324 adapter.disk_based_diagnostic_sources(),
2325 cx,
2326 )
2327 .log_err();
2328 }
2329
2330 fn on_lsp_progress(
2331 &mut self,
2332 progress: lsp::ProgressParams,
2333 server_id: usize,
2334 disk_based_diagnostics_progress_token: Option<&str>,
2335 cx: &mut ModelContext<Self>,
2336 ) {
2337 let token = match progress.token {
2338 lsp::NumberOrString::String(token) => token,
2339 lsp::NumberOrString::Number(token) => {
2340 log::info!("skipping numeric progress token {}", token);
2341 return;
2342 }
2343 };
2344 let progress = match progress.value {
2345 lsp::ProgressParamsValue::WorkDone(value) => value,
2346 };
2347 let language_server_status =
2348 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2349 status
2350 } else {
2351 return;
2352 };
2353
2354 if !language_server_status.progress_tokens.contains(&token) {
2355 return;
2356 }
2357
2358 match progress {
2359 lsp::WorkDoneProgress::Begin(report) => {
2360 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2361 language_server_status.has_pending_diagnostic_updates = true;
2362 self.disk_based_diagnostics_started(server_id, cx);
2363 self.broadcast_language_server_update(
2364 server_id,
2365 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2366 proto::LspDiskBasedDiagnosticsUpdating {},
2367 ),
2368 );
2369 } else {
2370 self.on_lsp_work_start(
2371 server_id,
2372 token.clone(),
2373 LanguageServerProgress {
2374 message: report.message.clone(),
2375 percentage: report.percentage.map(|p| p as usize),
2376 last_update_at: Instant::now(),
2377 },
2378 cx,
2379 );
2380 self.broadcast_language_server_update(
2381 server_id,
2382 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2383 token,
2384 message: report.message,
2385 percentage: report.percentage.map(|p| p as u32),
2386 }),
2387 );
2388 }
2389 }
2390 lsp::WorkDoneProgress::Report(report) => {
2391 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2392 self.on_lsp_work_progress(
2393 server_id,
2394 token.clone(),
2395 LanguageServerProgress {
2396 message: report.message.clone(),
2397 percentage: report.percentage.map(|p| p as usize),
2398 last_update_at: Instant::now(),
2399 },
2400 cx,
2401 );
2402 self.broadcast_language_server_update(
2403 server_id,
2404 proto::update_language_server::Variant::WorkProgress(
2405 proto::LspWorkProgress {
2406 token,
2407 message: report.message,
2408 percentage: report.percentage.map(|p| p as u32),
2409 },
2410 ),
2411 );
2412 }
2413 }
2414 lsp::WorkDoneProgress::End(_) => {
2415 language_server_status.progress_tokens.remove(&token);
2416
2417 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2418 language_server_status.has_pending_diagnostic_updates = false;
2419 self.disk_based_diagnostics_finished(server_id, cx);
2420 self.broadcast_language_server_update(
2421 server_id,
2422 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2423 proto::LspDiskBasedDiagnosticsUpdated {},
2424 ),
2425 );
2426 } else {
2427 self.on_lsp_work_end(server_id, token.clone(), cx);
2428 self.broadcast_language_server_update(
2429 server_id,
2430 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2431 token,
2432 }),
2433 );
2434 }
2435 }
2436 }
2437 }
2438
2439 fn on_lsp_work_start(
2440 &mut self,
2441 language_server_id: usize,
2442 token: String,
2443 progress: LanguageServerProgress,
2444 cx: &mut ModelContext<Self>,
2445 ) {
2446 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2447 status.pending_work.insert(token, progress);
2448 cx.notify();
2449 }
2450 }
2451
2452 fn on_lsp_work_progress(
2453 &mut self,
2454 language_server_id: usize,
2455 token: String,
2456 progress: LanguageServerProgress,
2457 cx: &mut ModelContext<Self>,
2458 ) {
2459 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2460 let entry = status
2461 .pending_work
2462 .entry(token)
2463 .or_insert(LanguageServerProgress {
2464 message: Default::default(),
2465 percentage: Default::default(),
2466 last_update_at: progress.last_update_at,
2467 });
2468 if progress.message.is_some() {
2469 entry.message = progress.message;
2470 }
2471 if progress.percentage.is_some() {
2472 entry.percentage = progress.percentage;
2473 }
2474 entry.last_update_at = progress.last_update_at;
2475 cx.notify();
2476 }
2477 }
2478
2479 fn on_lsp_work_end(
2480 &mut self,
2481 language_server_id: usize,
2482 token: String,
2483 cx: &mut ModelContext<Self>,
2484 ) {
2485 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2486 status.pending_work.remove(&token);
2487 cx.notify();
2488 }
2489 }
2490
2491 async fn on_lsp_workspace_edit(
2492 this: WeakModelHandle<Self>,
2493 params: lsp::ApplyWorkspaceEditParams,
2494 server_id: usize,
2495 adapter: Arc<dyn LspAdapter>,
2496 language_server: Arc<LanguageServer>,
2497 mut cx: AsyncAppContext,
2498 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2499 let this = this
2500 .upgrade(&cx)
2501 .ok_or_else(|| anyhow!("project project closed"))?;
2502 let transaction = Self::deserialize_workspace_edit(
2503 this.clone(),
2504 params.edit,
2505 true,
2506 adapter.clone(),
2507 language_server.clone(),
2508 &mut cx,
2509 )
2510 .await
2511 .log_err();
2512 this.update(&mut cx, |this, _| {
2513 if let Some(transaction) = transaction {
2514 this.last_workspace_edits_by_language_server
2515 .insert(server_id, transaction);
2516 }
2517 });
2518 Ok(lsp::ApplyWorkspaceEditResponse {
2519 applied: true,
2520 failed_change: None,
2521 failure_reason: None,
2522 })
2523 }
2524
2525 fn broadcast_language_server_update(
2526 &self,
2527 language_server_id: usize,
2528 event: proto::update_language_server::Variant,
2529 ) {
2530 if let Some(project_id) = self.shared_remote_id() {
2531 self.client
2532 .send(proto::UpdateLanguageServer {
2533 project_id,
2534 language_server_id: language_server_id as u64,
2535 variant: Some(event),
2536 })
2537 .log_err();
2538 }
2539 }
2540
2541 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2542 for server_state in self.language_servers.values() {
2543 if let LanguageServerState::Running { server, .. } = server_state {
2544 server
2545 .notify::<lsp::notification::DidChangeConfiguration>(
2546 lsp::DidChangeConfigurationParams {
2547 settings: settings.clone(),
2548 },
2549 )
2550 .ok();
2551 }
2552 }
2553 *self.language_server_settings.lock() = settings;
2554 }
2555
2556 pub fn language_server_statuses(
2557 &self,
2558 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2559 self.language_server_statuses.values()
2560 }
2561
2562 pub fn update_diagnostics(
2563 &mut self,
2564 language_server_id: usize,
2565 params: lsp::PublishDiagnosticsParams,
2566 disk_based_sources: &[&str],
2567 cx: &mut ModelContext<Self>,
2568 ) -> Result<()> {
2569 let abs_path = params
2570 .uri
2571 .to_file_path()
2572 .map_err(|_| anyhow!("URI is not a file"))?;
2573 let mut diagnostics = Vec::default();
2574 let mut primary_diagnostic_group_ids = HashMap::default();
2575 let mut sources_by_group_id = HashMap::default();
2576 let mut supporting_diagnostics = HashMap::default();
2577 for diagnostic in ¶ms.diagnostics {
2578 let source = diagnostic.source.as_ref();
2579 let code = diagnostic.code.as_ref().map(|code| match code {
2580 lsp::NumberOrString::Number(code) => code.to_string(),
2581 lsp::NumberOrString::String(code) => code.clone(),
2582 });
2583 let range = range_from_lsp(diagnostic.range);
2584 let is_supporting = diagnostic
2585 .related_information
2586 .as_ref()
2587 .map_or(false, |infos| {
2588 infos.iter().any(|info| {
2589 primary_diagnostic_group_ids.contains_key(&(
2590 source,
2591 code.clone(),
2592 range_from_lsp(info.location.range),
2593 ))
2594 })
2595 });
2596
2597 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2598 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2599 });
2600
2601 if is_supporting {
2602 supporting_diagnostics.insert(
2603 (source, code.clone(), range),
2604 (diagnostic.severity, is_unnecessary),
2605 );
2606 } else {
2607 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2608 let is_disk_based = source.map_or(false, |source| {
2609 disk_based_sources.contains(&source.as_str())
2610 });
2611
2612 sources_by_group_id.insert(group_id, source);
2613 primary_diagnostic_group_ids
2614 .insert((source, code.clone(), range.clone()), group_id);
2615
2616 diagnostics.push(DiagnosticEntry {
2617 range,
2618 diagnostic: Diagnostic {
2619 code: code.clone(),
2620 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2621 message: diagnostic.message.clone(),
2622 group_id,
2623 is_primary: true,
2624 is_valid: true,
2625 is_disk_based,
2626 is_unnecessary,
2627 },
2628 });
2629 if let Some(infos) = &diagnostic.related_information {
2630 for info in infos {
2631 if info.location.uri == params.uri && !info.message.is_empty() {
2632 let range = range_from_lsp(info.location.range);
2633 diagnostics.push(DiagnosticEntry {
2634 range,
2635 diagnostic: Diagnostic {
2636 code: code.clone(),
2637 severity: DiagnosticSeverity::INFORMATION,
2638 message: info.message.clone(),
2639 group_id,
2640 is_primary: false,
2641 is_valid: true,
2642 is_disk_based,
2643 is_unnecessary: false,
2644 },
2645 });
2646 }
2647 }
2648 }
2649 }
2650 }
2651
2652 for entry in &mut diagnostics {
2653 let diagnostic = &mut entry.diagnostic;
2654 if !diagnostic.is_primary {
2655 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2656 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2657 source,
2658 diagnostic.code.clone(),
2659 entry.range.clone(),
2660 )) {
2661 if let Some(severity) = severity {
2662 diagnostic.severity = severity;
2663 }
2664 diagnostic.is_unnecessary = is_unnecessary;
2665 }
2666 }
2667 }
2668
2669 self.update_diagnostic_entries(
2670 language_server_id,
2671 abs_path,
2672 params.version,
2673 diagnostics,
2674 cx,
2675 )?;
2676 Ok(())
2677 }
2678
2679 pub fn update_diagnostic_entries(
2680 &mut self,
2681 language_server_id: usize,
2682 abs_path: PathBuf,
2683 version: Option<i32>,
2684 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2685 cx: &mut ModelContext<Project>,
2686 ) -> Result<(), anyhow::Error> {
2687 let (worktree, relative_path) = self
2688 .find_local_worktree(&abs_path, cx)
2689 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2690
2691 let project_path = ProjectPath {
2692 worktree_id: worktree.read(cx).id(),
2693 path: relative_path.into(),
2694 };
2695 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2696 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2697 }
2698
2699 let updated = worktree.update(cx, |worktree, cx| {
2700 worktree
2701 .as_local_mut()
2702 .ok_or_else(|| anyhow!("not a local worktree"))?
2703 .update_diagnostics(
2704 language_server_id,
2705 project_path.path.clone(),
2706 diagnostics,
2707 cx,
2708 )
2709 })?;
2710 if updated {
2711 cx.emit(Event::DiagnosticsUpdated {
2712 language_server_id,
2713 path: project_path,
2714 });
2715 }
2716 Ok(())
2717 }
2718
2719 fn update_buffer_diagnostics(
2720 &mut self,
2721 buffer: &ModelHandle<Buffer>,
2722 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2723 version: Option<i32>,
2724 cx: &mut ModelContext<Self>,
2725 ) -> Result<()> {
2726 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2727 Ordering::Equal
2728 .then_with(|| b.is_primary.cmp(&a.is_primary))
2729 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2730 .then_with(|| a.severity.cmp(&b.severity))
2731 .then_with(|| a.message.cmp(&b.message))
2732 }
2733
2734 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2735
2736 diagnostics.sort_unstable_by(|a, b| {
2737 Ordering::Equal
2738 .then_with(|| a.range.start.cmp(&b.range.start))
2739 .then_with(|| b.range.end.cmp(&a.range.end))
2740 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2741 });
2742
2743 let mut sanitized_diagnostics = Vec::new();
2744 let edits_since_save = Patch::new(
2745 snapshot
2746 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2747 .collect(),
2748 );
2749 for entry in diagnostics {
2750 let start;
2751 let end;
2752 if entry.diagnostic.is_disk_based {
2753 // Some diagnostics are based on files on disk instead of buffers'
2754 // current contents. Adjust these diagnostics' ranges to reflect
2755 // any unsaved edits.
2756 start = edits_since_save.old_to_new(entry.range.start);
2757 end = edits_since_save.old_to_new(entry.range.end);
2758 } else {
2759 start = entry.range.start;
2760 end = entry.range.end;
2761 }
2762
2763 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2764 ..snapshot.clip_point_utf16(end, Bias::Right);
2765
2766 // Expand empty ranges by one character
2767 if range.start == range.end {
2768 range.end.column += 1;
2769 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2770 if range.start == range.end && range.end.column > 0 {
2771 range.start.column -= 1;
2772 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2773 }
2774 }
2775
2776 sanitized_diagnostics.push(DiagnosticEntry {
2777 range,
2778 diagnostic: entry.diagnostic,
2779 });
2780 }
2781 drop(edits_since_save);
2782
2783 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2784 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2785 Ok(())
2786 }
2787
2788 pub fn reload_buffers(
2789 &self,
2790 buffers: HashSet<ModelHandle<Buffer>>,
2791 push_to_history: bool,
2792 cx: &mut ModelContext<Self>,
2793 ) -> Task<Result<ProjectTransaction>> {
2794 let mut local_buffers = Vec::new();
2795 let mut remote_buffers = None;
2796 for buffer_handle in buffers {
2797 let buffer = buffer_handle.read(cx);
2798 if buffer.is_dirty() {
2799 if let Some(file) = File::from_dyn(buffer.file()) {
2800 if file.is_local() {
2801 local_buffers.push(buffer_handle);
2802 } else {
2803 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2804 }
2805 }
2806 }
2807 }
2808
2809 let remote_buffers = self.remote_id().zip(remote_buffers);
2810 let client = self.client.clone();
2811
2812 cx.spawn(|this, mut cx| async move {
2813 let mut project_transaction = ProjectTransaction::default();
2814
2815 if let Some((project_id, remote_buffers)) = remote_buffers {
2816 let response = client
2817 .request(proto::ReloadBuffers {
2818 project_id,
2819 buffer_ids: remote_buffers
2820 .iter()
2821 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2822 .collect(),
2823 })
2824 .await?
2825 .transaction
2826 .ok_or_else(|| anyhow!("missing transaction"))?;
2827 project_transaction = this
2828 .update(&mut cx, |this, cx| {
2829 this.deserialize_project_transaction(response, push_to_history, cx)
2830 })
2831 .await?;
2832 }
2833
2834 for buffer in local_buffers {
2835 let transaction = buffer
2836 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2837 .await?;
2838 buffer.update(&mut cx, |buffer, cx| {
2839 if let Some(transaction) = transaction {
2840 if !push_to_history {
2841 buffer.forget_transaction(transaction.id);
2842 }
2843 project_transaction.0.insert(cx.handle(), transaction);
2844 }
2845 });
2846 }
2847
2848 Ok(project_transaction)
2849 })
2850 }
2851
2852 pub fn format(
2853 &self,
2854 buffers: HashSet<ModelHandle<Buffer>>,
2855 push_to_history: bool,
2856 cx: &mut ModelContext<Project>,
2857 ) -> Task<Result<ProjectTransaction>> {
2858 let mut local_buffers = Vec::new();
2859 let mut remote_buffers = None;
2860 for buffer_handle in buffers {
2861 let buffer = buffer_handle.read(cx);
2862 if let Some(file) = File::from_dyn(buffer.file()) {
2863 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2864 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2865 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2866 }
2867 } else {
2868 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2869 }
2870 } else {
2871 return Task::ready(Ok(Default::default()));
2872 }
2873 }
2874
2875 let remote_buffers = self.remote_id().zip(remote_buffers);
2876 let client = self.client.clone();
2877
2878 cx.spawn(|this, mut cx| async move {
2879 let mut project_transaction = ProjectTransaction::default();
2880
2881 if let Some((project_id, remote_buffers)) = remote_buffers {
2882 let response = client
2883 .request(proto::FormatBuffers {
2884 project_id,
2885 buffer_ids: remote_buffers
2886 .iter()
2887 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2888 .collect(),
2889 })
2890 .await?
2891 .transaction
2892 .ok_or_else(|| anyhow!("missing transaction"))?;
2893 project_transaction = this
2894 .update(&mut cx, |this, cx| {
2895 this.deserialize_project_transaction(response, push_to_history, cx)
2896 })
2897 .await?;
2898 }
2899
2900 for (buffer, buffer_abs_path, language_server) in local_buffers {
2901 let text_document = lsp::TextDocumentIdentifier::new(
2902 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2903 );
2904 let capabilities = &language_server.capabilities();
2905 let tab_size = cx.update(|cx| {
2906 let language_name = buffer.read(cx).language().map(|language| language.name());
2907 cx.global::<Settings>().tab_size(language_name.as_deref())
2908 });
2909 let lsp_edits = if capabilities
2910 .document_formatting_provider
2911 .as_ref()
2912 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2913 {
2914 language_server
2915 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2916 text_document,
2917 options: lsp::FormattingOptions {
2918 tab_size: tab_size.into(),
2919 insert_spaces: true,
2920 insert_final_newline: Some(true),
2921 ..Default::default()
2922 },
2923 work_done_progress_params: Default::default(),
2924 })
2925 .await?
2926 } else if capabilities
2927 .document_range_formatting_provider
2928 .as_ref()
2929 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2930 {
2931 let buffer_start = lsp::Position::new(0, 0);
2932 let buffer_end =
2933 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2934 language_server
2935 .request::<lsp::request::RangeFormatting>(
2936 lsp::DocumentRangeFormattingParams {
2937 text_document,
2938 range: lsp::Range::new(buffer_start, buffer_end),
2939 options: lsp::FormattingOptions {
2940 tab_size: tab_size.into(),
2941 insert_spaces: true,
2942 insert_final_newline: Some(true),
2943 ..Default::default()
2944 },
2945 work_done_progress_params: Default::default(),
2946 },
2947 )
2948 .await?
2949 } else {
2950 continue;
2951 };
2952
2953 if let Some(lsp_edits) = lsp_edits {
2954 let edits = this
2955 .update(&mut cx, |this, cx| {
2956 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2957 })
2958 .await?;
2959 buffer.update(&mut cx, |buffer, cx| {
2960 buffer.finalize_last_transaction();
2961 buffer.start_transaction();
2962 for (range, text) in edits {
2963 buffer.edit([(range, text)], cx);
2964 }
2965 if buffer.end_transaction(cx).is_some() {
2966 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2967 if !push_to_history {
2968 buffer.forget_transaction(transaction.id);
2969 }
2970 project_transaction.0.insert(cx.handle(), transaction);
2971 }
2972 });
2973 }
2974 }
2975
2976 Ok(project_transaction)
2977 })
2978 }
2979
2980 pub fn definition<T: ToPointUtf16>(
2981 &self,
2982 buffer: &ModelHandle<Buffer>,
2983 position: T,
2984 cx: &mut ModelContext<Self>,
2985 ) -> Task<Result<Vec<LocationLink>>> {
2986 let position = position.to_point_utf16(buffer.read(cx));
2987 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2988 }
2989
2990 pub fn references<T: ToPointUtf16>(
2991 &self,
2992 buffer: &ModelHandle<Buffer>,
2993 position: T,
2994 cx: &mut ModelContext<Self>,
2995 ) -> Task<Result<Vec<Location>>> {
2996 let position = position.to_point_utf16(buffer.read(cx));
2997 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2998 }
2999
3000 pub fn document_highlights<T: ToPointUtf16>(
3001 &self,
3002 buffer: &ModelHandle<Buffer>,
3003 position: T,
3004 cx: &mut ModelContext<Self>,
3005 ) -> Task<Result<Vec<DocumentHighlight>>> {
3006 let position = position.to_point_utf16(buffer.read(cx));
3007
3008 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3009 }
3010
3011 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3012 if self.is_local() {
3013 let mut requests = Vec::new();
3014 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
3015 let worktree_id = *worktree_id;
3016 if let Some(worktree) = self
3017 .worktree_for_id(worktree_id, cx)
3018 .and_then(|worktree| worktree.read(cx).as_local())
3019 {
3020 if let Some(LanguageServerState::Running { adapter, server }) =
3021 self.language_servers.get(server_id)
3022 {
3023 let adapter = adapter.clone();
3024 let worktree_abs_path = worktree.abs_path().clone();
3025 requests.push(
3026 server
3027 .request::<lsp::request::WorkspaceSymbol>(
3028 lsp::WorkspaceSymbolParams {
3029 query: query.to_string(),
3030 ..Default::default()
3031 },
3032 )
3033 .log_err()
3034 .map(move |response| {
3035 (
3036 adapter,
3037 worktree_id,
3038 worktree_abs_path,
3039 response.unwrap_or_default(),
3040 )
3041 }),
3042 );
3043 }
3044 }
3045 }
3046
3047 cx.spawn_weak(|this, cx| async move {
3048 let responses = futures::future::join_all(requests).await;
3049 let this = if let Some(this) = this.upgrade(&cx) {
3050 this
3051 } else {
3052 return Ok(Default::default());
3053 };
3054 this.read_with(&cx, |this, cx| {
3055 let mut symbols = Vec::new();
3056 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3057 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3058 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3059 let mut worktree_id = source_worktree_id;
3060 let path;
3061 if let Some((worktree, rel_path)) =
3062 this.find_local_worktree(&abs_path, cx)
3063 {
3064 worktree_id = worktree.read(cx).id();
3065 path = rel_path;
3066 } else {
3067 path = relativize_path(&worktree_abs_path, &abs_path);
3068 }
3069
3070 let label = this
3071 .languages
3072 .select_language(&path)
3073 .and_then(|language| {
3074 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3075 })
3076 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3077 let signature = this.symbol_signature(worktree_id, &path);
3078
3079 Some(Symbol {
3080 source_worktree_id,
3081 worktree_id,
3082 language_server_name: adapter.name(),
3083 name: lsp_symbol.name,
3084 kind: lsp_symbol.kind,
3085 label,
3086 path,
3087 range: range_from_lsp(lsp_symbol.location.range),
3088 signature,
3089 })
3090 }));
3091 }
3092 Ok(symbols)
3093 })
3094 })
3095 } else if let Some(project_id) = self.remote_id() {
3096 let request = self.client.request(proto::GetProjectSymbols {
3097 project_id,
3098 query: query.to_string(),
3099 });
3100 cx.spawn_weak(|this, cx| async move {
3101 let response = request.await?;
3102 let mut symbols = Vec::new();
3103 if let Some(this) = this.upgrade(&cx) {
3104 this.read_with(&cx, |this, _| {
3105 symbols.extend(
3106 response
3107 .symbols
3108 .into_iter()
3109 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3110 );
3111 })
3112 }
3113 Ok(symbols)
3114 })
3115 } else {
3116 Task::ready(Ok(Default::default()))
3117 }
3118 }
3119
3120 pub fn open_buffer_for_symbol(
3121 &mut self,
3122 symbol: &Symbol,
3123 cx: &mut ModelContext<Self>,
3124 ) -> Task<Result<ModelHandle<Buffer>>> {
3125 if self.is_local() {
3126 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
3127 symbol.source_worktree_id,
3128 symbol.language_server_name.clone(),
3129 )) {
3130 *id
3131 } else {
3132 return Task::ready(Err(anyhow!(
3133 "language server for worktree and language not found"
3134 )));
3135 };
3136
3137 let worktree_abs_path = if let Some(worktree_abs_path) = self
3138 .worktree_for_id(symbol.worktree_id, cx)
3139 .and_then(|worktree| worktree.read(cx).as_local())
3140 .map(|local_worktree| local_worktree.abs_path())
3141 {
3142 worktree_abs_path
3143 } else {
3144 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3145 };
3146 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3147 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3148 uri
3149 } else {
3150 return Task::ready(Err(anyhow!("invalid symbol path")));
3151 };
3152
3153 self.open_local_buffer_via_lsp(
3154 symbol_uri,
3155 language_server_id,
3156 symbol.language_server_name.clone(),
3157 cx,
3158 )
3159 } else if let Some(project_id) = self.remote_id() {
3160 let request = self.client.request(proto::OpenBufferForSymbol {
3161 project_id,
3162 symbol: Some(serialize_symbol(symbol)),
3163 });
3164 cx.spawn(|this, mut cx| async move {
3165 let response = request.await?;
3166 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3167 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3168 .await
3169 })
3170 } else {
3171 Task::ready(Err(anyhow!("project does not have a remote id")))
3172 }
3173 }
3174
3175 pub fn hover<T: ToPointUtf16>(
3176 &self,
3177 buffer: &ModelHandle<Buffer>,
3178 position: T,
3179 cx: &mut ModelContext<Self>,
3180 ) -> Task<Result<Option<Hover>>> {
3181 let position = position.to_point_utf16(buffer.read(cx));
3182 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3183 }
3184
3185 pub fn completions<T: ToPointUtf16>(
3186 &self,
3187 source_buffer_handle: &ModelHandle<Buffer>,
3188 position: T,
3189 cx: &mut ModelContext<Self>,
3190 ) -> Task<Result<Vec<Completion>>> {
3191 let source_buffer_handle = source_buffer_handle.clone();
3192 let source_buffer = source_buffer_handle.read(cx);
3193 let buffer_id = source_buffer.remote_id();
3194 let language = source_buffer.language().cloned();
3195 let worktree;
3196 let buffer_abs_path;
3197 if let Some(file) = File::from_dyn(source_buffer.file()) {
3198 worktree = file.worktree.clone();
3199 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3200 } else {
3201 return Task::ready(Ok(Default::default()));
3202 };
3203
3204 let position = position.to_point_utf16(source_buffer);
3205 let anchor = source_buffer.anchor_after(position);
3206
3207 if worktree.read(cx).as_local().is_some() {
3208 let buffer_abs_path = buffer_abs_path.unwrap();
3209 let lang_server =
3210 if let Some((_, server)) = self.language_server_for_buffer(source_buffer, cx) {
3211 server.clone()
3212 } else {
3213 return Task::ready(Ok(Default::default()));
3214 };
3215
3216 cx.spawn(|_, cx| async move {
3217 let completions = lang_server
3218 .request::<lsp::request::Completion>(lsp::CompletionParams {
3219 text_document_position: lsp::TextDocumentPositionParams::new(
3220 lsp::TextDocumentIdentifier::new(
3221 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3222 ),
3223 point_to_lsp(position),
3224 ),
3225 context: Default::default(),
3226 work_done_progress_params: Default::default(),
3227 partial_result_params: Default::default(),
3228 })
3229 .await
3230 .context("lsp completion request failed")?;
3231
3232 let completions = if let Some(completions) = completions {
3233 match completions {
3234 lsp::CompletionResponse::Array(completions) => completions,
3235 lsp::CompletionResponse::List(list) => list.items,
3236 }
3237 } else {
3238 Default::default()
3239 };
3240
3241 source_buffer_handle.read_with(&cx, |this, _| {
3242 let snapshot = this.snapshot();
3243 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3244 let mut range_for_token = None;
3245 Ok(completions
3246 .into_iter()
3247 .filter_map(|lsp_completion| {
3248 // For now, we can only handle additional edits if they are returned
3249 // when resolving the completion, not if they are present initially.
3250 if lsp_completion
3251 .additional_text_edits
3252 .as_ref()
3253 .map_or(false, |edits| !edits.is_empty())
3254 {
3255 return None;
3256 }
3257
3258 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3259 // If the language server provides a range to overwrite, then
3260 // check that the range is valid.
3261 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3262 let range = range_from_lsp(edit.range);
3263 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3264 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3265 if start != range.start || end != range.end {
3266 log::info!("completion out of expected range");
3267 return None;
3268 }
3269 (
3270 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3271 edit.new_text.clone(),
3272 )
3273 }
3274 // If the language server does not provide a range, then infer
3275 // the range based on the syntax tree.
3276 None => {
3277 if position != clipped_position {
3278 log::info!("completion out of expected range");
3279 return None;
3280 }
3281 let Range { start, end } = range_for_token
3282 .get_or_insert_with(|| {
3283 let offset = position.to_offset(&snapshot);
3284 let (range, kind) = snapshot.surrounding_word(offset);
3285 if kind == Some(CharKind::Word) {
3286 range
3287 } else {
3288 offset..offset
3289 }
3290 })
3291 .clone();
3292 let text = lsp_completion
3293 .insert_text
3294 .as_ref()
3295 .unwrap_or(&lsp_completion.label)
3296 .clone();
3297 (
3298 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3299 text.clone(),
3300 )
3301 }
3302 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3303 log::info!("unsupported insert/replace completion");
3304 return None;
3305 }
3306 };
3307
3308 Some(Completion {
3309 old_range,
3310 new_text,
3311 label: language
3312 .as_ref()
3313 .and_then(|l| l.label_for_completion(&lsp_completion))
3314 .unwrap_or_else(|| {
3315 CodeLabel::plain(
3316 lsp_completion.label.clone(),
3317 lsp_completion.filter_text.as_deref(),
3318 )
3319 }),
3320 lsp_completion,
3321 })
3322 })
3323 .collect())
3324 })
3325 })
3326 } else if let Some(project_id) = self.remote_id() {
3327 let rpc = self.client.clone();
3328 let message = proto::GetCompletions {
3329 project_id,
3330 buffer_id,
3331 position: Some(language::proto::serialize_anchor(&anchor)),
3332 version: serialize_version(&source_buffer.version()),
3333 };
3334 cx.spawn_weak(|_, mut cx| async move {
3335 let response = rpc.request(message).await?;
3336
3337 source_buffer_handle
3338 .update(&mut cx, |buffer, _| {
3339 buffer.wait_for_version(deserialize_version(response.version))
3340 })
3341 .await;
3342
3343 response
3344 .completions
3345 .into_iter()
3346 .map(|completion| {
3347 language::proto::deserialize_completion(completion, language.as_ref())
3348 })
3349 .collect()
3350 })
3351 } else {
3352 Task::ready(Ok(Default::default()))
3353 }
3354 }
3355
3356 pub fn apply_additional_edits_for_completion(
3357 &self,
3358 buffer_handle: ModelHandle<Buffer>,
3359 completion: Completion,
3360 push_to_history: bool,
3361 cx: &mut ModelContext<Self>,
3362 ) -> Task<Result<Option<Transaction>>> {
3363 let buffer = buffer_handle.read(cx);
3364 let buffer_id = buffer.remote_id();
3365
3366 if self.is_local() {
3367 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3368 {
3369 server.clone()
3370 } else {
3371 return Task::ready(Ok(Default::default()));
3372 };
3373
3374 cx.spawn(|this, mut cx| async move {
3375 let resolved_completion = lang_server
3376 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3377 .await?;
3378 if let Some(edits) = resolved_completion.additional_text_edits {
3379 let edits = this
3380 .update(&mut cx, |this, cx| {
3381 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3382 })
3383 .await?;
3384 buffer_handle.update(&mut cx, |buffer, cx| {
3385 buffer.finalize_last_transaction();
3386 buffer.start_transaction();
3387 for (range, text) in edits {
3388 buffer.edit([(range, text)], cx);
3389 }
3390 let transaction = if buffer.end_transaction(cx).is_some() {
3391 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3392 if !push_to_history {
3393 buffer.forget_transaction(transaction.id);
3394 }
3395 Some(transaction)
3396 } else {
3397 None
3398 };
3399 Ok(transaction)
3400 })
3401 } else {
3402 Ok(None)
3403 }
3404 })
3405 } else if let Some(project_id) = self.remote_id() {
3406 let client = self.client.clone();
3407 cx.spawn(|_, mut cx| async move {
3408 let response = client
3409 .request(proto::ApplyCompletionAdditionalEdits {
3410 project_id,
3411 buffer_id,
3412 completion: Some(language::proto::serialize_completion(&completion)),
3413 })
3414 .await?;
3415
3416 if let Some(transaction) = response.transaction {
3417 let transaction = language::proto::deserialize_transaction(transaction)?;
3418 buffer_handle
3419 .update(&mut cx, |buffer, _| {
3420 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3421 })
3422 .await;
3423 if push_to_history {
3424 buffer_handle.update(&mut cx, |buffer, _| {
3425 buffer.push_transaction(transaction.clone(), Instant::now());
3426 });
3427 }
3428 Ok(Some(transaction))
3429 } else {
3430 Ok(None)
3431 }
3432 })
3433 } else {
3434 Task::ready(Err(anyhow!("project does not have a remote id")))
3435 }
3436 }
3437
3438 pub fn code_actions<T: Clone + ToOffset>(
3439 &self,
3440 buffer_handle: &ModelHandle<Buffer>,
3441 range: Range<T>,
3442 cx: &mut ModelContext<Self>,
3443 ) -> Task<Result<Vec<CodeAction>>> {
3444 let buffer_handle = buffer_handle.clone();
3445 let buffer = buffer_handle.read(cx);
3446 let snapshot = buffer.snapshot();
3447 let relevant_diagnostics = snapshot
3448 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3449 .map(|entry| entry.to_lsp_diagnostic_stub())
3450 .collect();
3451 let buffer_id = buffer.remote_id();
3452 let worktree;
3453 let buffer_abs_path;
3454 if let Some(file) = File::from_dyn(buffer.file()) {
3455 worktree = file.worktree.clone();
3456 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3457 } else {
3458 return Task::ready(Ok(Default::default()));
3459 };
3460 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3461
3462 if worktree.read(cx).as_local().is_some() {
3463 let buffer_abs_path = buffer_abs_path.unwrap();
3464 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3465 {
3466 server.clone()
3467 } else {
3468 return Task::ready(Ok(Default::default()));
3469 };
3470
3471 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3472 cx.foreground().spawn(async move {
3473 if !lang_server.capabilities().code_action_provider.is_some() {
3474 return Ok(Default::default());
3475 }
3476
3477 Ok(lang_server
3478 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3479 text_document: lsp::TextDocumentIdentifier::new(
3480 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3481 ),
3482 range: lsp_range,
3483 work_done_progress_params: Default::default(),
3484 partial_result_params: Default::default(),
3485 context: lsp::CodeActionContext {
3486 diagnostics: relevant_diagnostics,
3487 only: Some(vec![
3488 lsp::CodeActionKind::QUICKFIX,
3489 lsp::CodeActionKind::REFACTOR,
3490 lsp::CodeActionKind::REFACTOR_EXTRACT,
3491 lsp::CodeActionKind::SOURCE,
3492 ]),
3493 },
3494 })
3495 .await?
3496 .unwrap_or_default()
3497 .into_iter()
3498 .filter_map(|entry| {
3499 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3500 Some(CodeAction {
3501 range: range.clone(),
3502 lsp_action,
3503 })
3504 } else {
3505 None
3506 }
3507 })
3508 .collect())
3509 })
3510 } else if let Some(project_id) = self.remote_id() {
3511 let rpc = self.client.clone();
3512 let version = buffer.version();
3513 cx.spawn_weak(|_, mut cx| async move {
3514 let response = rpc
3515 .request(proto::GetCodeActions {
3516 project_id,
3517 buffer_id,
3518 start: Some(language::proto::serialize_anchor(&range.start)),
3519 end: Some(language::proto::serialize_anchor(&range.end)),
3520 version: serialize_version(&version),
3521 })
3522 .await?;
3523
3524 buffer_handle
3525 .update(&mut cx, |buffer, _| {
3526 buffer.wait_for_version(deserialize_version(response.version))
3527 })
3528 .await;
3529
3530 response
3531 .actions
3532 .into_iter()
3533 .map(language::proto::deserialize_code_action)
3534 .collect()
3535 })
3536 } else {
3537 Task::ready(Ok(Default::default()))
3538 }
3539 }
3540
3541 pub fn apply_code_action(
3542 &self,
3543 buffer_handle: ModelHandle<Buffer>,
3544 mut action: CodeAction,
3545 push_to_history: bool,
3546 cx: &mut ModelContext<Self>,
3547 ) -> Task<Result<ProjectTransaction>> {
3548 if self.is_local() {
3549 let buffer = buffer_handle.read(cx);
3550 let (lsp_adapter, lang_server) =
3551 if let Some((adapter, server)) = self.language_server_for_buffer(buffer, cx) {
3552 (adapter.clone(), server.clone())
3553 } else {
3554 return Task::ready(Ok(Default::default()));
3555 };
3556 let range = action.range.to_point_utf16(buffer);
3557
3558 cx.spawn(|this, mut cx| async move {
3559 if let Some(lsp_range) = action
3560 .lsp_action
3561 .data
3562 .as_mut()
3563 .and_then(|d| d.get_mut("codeActionParams"))
3564 .and_then(|d| d.get_mut("range"))
3565 {
3566 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3567 action.lsp_action = lang_server
3568 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3569 .await?;
3570 } else {
3571 let actions = this
3572 .update(&mut cx, |this, cx| {
3573 this.code_actions(&buffer_handle, action.range, cx)
3574 })
3575 .await?;
3576 action.lsp_action = actions
3577 .into_iter()
3578 .find(|a| a.lsp_action.title == action.lsp_action.title)
3579 .ok_or_else(|| anyhow!("code action is outdated"))?
3580 .lsp_action;
3581 }
3582
3583 if let Some(edit) = action.lsp_action.edit {
3584 Self::deserialize_workspace_edit(
3585 this,
3586 edit,
3587 push_to_history,
3588 lsp_adapter.clone(),
3589 lang_server.clone(),
3590 &mut cx,
3591 )
3592 .await
3593 } else if let Some(command) = action.lsp_action.command {
3594 this.update(&mut cx, |this, _| {
3595 this.last_workspace_edits_by_language_server
3596 .remove(&lang_server.server_id());
3597 });
3598 lang_server
3599 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3600 command: command.command,
3601 arguments: command.arguments.unwrap_or_default(),
3602 ..Default::default()
3603 })
3604 .await?;
3605 Ok(this.update(&mut cx, |this, _| {
3606 this.last_workspace_edits_by_language_server
3607 .remove(&lang_server.server_id())
3608 .unwrap_or_default()
3609 }))
3610 } else {
3611 Ok(ProjectTransaction::default())
3612 }
3613 })
3614 } else if let Some(project_id) = self.remote_id() {
3615 let client = self.client.clone();
3616 let request = proto::ApplyCodeAction {
3617 project_id,
3618 buffer_id: buffer_handle.read(cx).remote_id(),
3619 action: Some(language::proto::serialize_code_action(&action)),
3620 };
3621 cx.spawn(|this, mut cx| async move {
3622 let response = client
3623 .request(request)
3624 .await?
3625 .transaction
3626 .ok_or_else(|| anyhow!("missing transaction"))?;
3627 this.update(&mut cx, |this, cx| {
3628 this.deserialize_project_transaction(response, push_to_history, cx)
3629 })
3630 .await
3631 })
3632 } else {
3633 Task::ready(Err(anyhow!("project does not have a remote id")))
3634 }
3635 }
3636
3637 async fn deserialize_workspace_edit(
3638 this: ModelHandle<Self>,
3639 edit: lsp::WorkspaceEdit,
3640 push_to_history: bool,
3641 lsp_adapter: Arc<dyn LspAdapter>,
3642 language_server: Arc<LanguageServer>,
3643 cx: &mut AsyncAppContext,
3644 ) -> Result<ProjectTransaction> {
3645 let fs = this.read_with(cx, |this, _| this.fs.clone());
3646 let mut operations = Vec::new();
3647 if let Some(document_changes) = edit.document_changes {
3648 match document_changes {
3649 lsp::DocumentChanges::Edits(edits) => {
3650 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3651 }
3652 lsp::DocumentChanges::Operations(ops) => operations = ops,
3653 }
3654 } else if let Some(changes) = edit.changes {
3655 operations.extend(changes.into_iter().map(|(uri, edits)| {
3656 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3657 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3658 uri,
3659 version: None,
3660 },
3661 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3662 })
3663 }));
3664 }
3665
3666 let mut project_transaction = ProjectTransaction::default();
3667 for operation in operations {
3668 match operation {
3669 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3670 let abs_path = op
3671 .uri
3672 .to_file_path()
3673 .map_err(|_| anyhow!("can't convert URI to path"))?;
3674
3675 if let Some(parent_path) = abs_path.parent() {
3676 fs.create_dir(parent_path).await?;
3677 }
3678 if abs_path.ends_with("/") {
3679 fs.create_dir(&abs_path).await?;
3680 } else {
3681 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3682 .await?;
3683 }
3684 }
3685 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3686 let source_abs_path = op
3687 .old_uri
3688 .to_file_path()
3689 .map_err(|_| anyhow!("can't convert URI to path"))?;
3690 let target_abs_path = op
3691 .new_uri
3692 .to_file_path()
3693 .map_err(|_| anyhow!("can't convert URI to path"))?;
3694 fs.rename(
3695 &source_abs_path,
3696 &target_abs_path,
3697 op.options.map(Into::into).unwrap_or_default(),
3698 )
3699 .await?;
3700 }
3701 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3702 let abs_path = op
3703 .uri
3704 .to_file_path()
3705 .map_err(|_| anyhow!("can't convert URI to path"))?;
3706 let options = op.options.map(Into::into).unwrap_or_default();
3707 if abs_path.ends_with("/") {
3708 fs.remove_dir(&abs_path, options).await?;
3709 } else {
3710 fs.remove_file(&abs_path, options).await?;
3711 }
3712 }
3713 lsp::DocumentChangeOperation::Edit(op) => {
3714 let buffer_to_edit = this
3715 .update(cx, |this, cx| {
3716 this.open_local_buffer_via_lsp(
3717 op.text_document.uri,
3718 language_server.server_id(),
3719 lsp_adapter.name(),
3720 cx,
3721 )
3722 })
3723 .await?;
3724
3725 let edits = this
3726 .update(cx, |this, cx| {
3727 let edits = op.edits.into_iter().map(|edit| match edit {
3728 lsp::OneOf::Left(edit) => edit,
3729 lsp::OneOf::Right(edit) => edit.text_edit,
3730 });
3731 this.edits_from_lsp(
3732 &buffer_to_edit,
3733 edits,
3734 op.text_document.version,
3735 cx,
3736 )
3737 })
3738 .await?;
3739
3740 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3741 buffer.finalize_last_transaction();
3742 buffer.start_transaction();
3743 for (range, text) in edits {
3744 buffer.edit([(range, text)], cx);
3745 }
3746 let transaction = if buffer.end_transaction(cx).is_some() {
3747 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3748 if !push_to_history {
3749 buffer.forget_transaction(transaction.id);
3750 }
3751 Some(transaction)
3752 } else {
3753 None
3754 };
3755
3756 transaction
3757 });
3758 if let Some(transaction) = transaction {
3759 project_transaction.0.insert(buffer_to_edit, transaction);
3760 }
3761 }
3762 }
3763 }
3764
3765 Ok(project_transaction)
3766 }
3767
3768 pub fn prepare_rename<T: ToPointUtf16>(
3769 &self,
3770 buffer: ModelHandle<Buffer>,
3771 position: T,
3772 cx: &mut ModelContext<Self>,
3773 ) -> Task<Result<Option<Range<Anchor>>>> {
3774 let position = position.to_point_utf16(buffer.read(cx));
3775 self.request_lsp(buffer, PrepareRename { position }, cx)
3776 }
3777
3778 pub fn perform_rename<T: ToPointUtf16>(
3779 &self,
3780 buffer: ModelHandle<Buffer>,
3781 position: T,
3782 new_name: String,
3783 push_to_history: bool,
3784 cx: &mut ModelContext<Self>,
3785 ) -> Task<Result<ProjectTransaction>> {
3786 let position = position.to_point_utf16(buffer.read(cx));
3787 self.request_lsp(
3788 buffer,
3789 PerformRename {
3790 position,
3791 new_name,
3792 push_to_history,
3793 },
3794 cx,
3795 )
3796 }
3797
3798 pub fn search(
3799 &self,
3800 query: SearchQuery,
3801 cx: &mut ModelContext<Self>,
3802 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3803 if self.is_local() {
3804 let snapshots = self
3805 .visible_worktrees(cx)
3806 .filter_map(|tree| {
3807 let tree = tree.read(cx).as_local()?;
3808 Some(tree.snapshot())
3809 })
3810 .collect::<Vec<_>>();
3811
3812 let background = cx.background().clone();
3813 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3814 if path_count == 0 {
3815 return Task::ready(Ok(Default::default()));
3816 }
3817 let workers = background.num_cpus().min(path_count);
3818 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3819 cx.background()
3820 .spawn({
3821 let fs = self.fs.clone();
3822 let background = cx.background().clone();
3823 let query = query.clone();
3824 async move {
3825 let fs = &fs;
3826 let query = &query;
3827 let matching_paths_tx = &matching_paths_tx;
3828 let paths_per_worker = (path_count + workers - 1) / workers;
3829 let snapshots = &snapshots;
3830 background
3831 .scoped(|scope| {
3832 for worker_ix in 0..workers {
3833 let worker_start_ix = worker_ix * paths_per_worker;
3834 let worker_end_ix = worker_start_ix + paths_per_worker;
3835 scope.spawn(async move {
3836 let mut snapshot_start_ix = 0;
3837 let mut abs_path = PathBuf::new();
3838 for snapshot in snapshots {
3839 let snapshot_end_ix =
3840 snapshot_start_ix + snapshot.visible_file_count();
3841 if worker_end_ix <= snapshot_start_ix {
3842 break;
3843 } else if worker_start_ix > snapshot_end_ix {
3844 snapshot_start_ix = snapshot_end_ix;
3845 continue;
3846 } else {
3847 let start_in_snapshot = worker_start_ix
3848 .saturating_sub(snapshot_start_ix);
3849 let end_in_snapshot =
3850 cmp::min(worker_end_ix, snapshot_end_ix)
3851 - snapshot_start_ix;
3852
3853 for entry in snapshot
3854 .files(false, start_in_snapshot)
3855 .take(end_in_snapshot - start_in_snapshot)
3856 {
3857 if matching_paths_tx.is_closed() {
3858 break;
3859 }
3860
3861 abs_path.clear();
3862 abs_path.push(&snapshot.abs_path());
3863 abs_path.push(&entry.path);
3864 let matches = if let Some(file) =
3865 fs.open_sync(&abs_path).await.log_err()
3866 {
3867 query.detect(file).unwrap_or(false)
3868 } else {
3869 false
3870 };
3871
3872 if matches {
3873 let project_path =
3874 (snapshot.id(), entry.path.clone());
3875 if matching_paths_tx
3876 .send(project_path)
3877 .await
3878 .is_err()
3879 {
3880 break;
3881 }
3882 }
3883 }
3884
3885 snapshot_start_ix = snapshot_end_ix;
3886 }
3887 }
3888 });
3889 }
3890 })
3891 .await;
3892 }
3893 })
3894 .detach();
3895
3896 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3897 let open_buffers = self
3898 .opened_buffers
3899 .values()
3900 .filter_map(|b| b.upgrade(cx))
3901 .collect::<HashSet<_>>();
3902 cx.spawn(|this, cx| async move {
3903 for buffer in &open_buffers {
3904 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3905 buffers_tx.send((buffer.clone(), snapshot)).await?;
3906 }
3907
3908 let open_buffers = Rc::new(RefCell::new(open_buffers));
3909 while let Some(project_path) = matching_paths_rx.next().await {
3910 if buffers_tx.is_closed() {
3911 break;
3912 }
3913
3914 let this = this.clone();
3915 let open_buffers = open_buffers.clone();
3916 let buffers_tx = buffers_tx.clone();
3917 cx.spawn(|mut cx| async move {
3918 if let Some(buffer) = this
3919 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3920 .await
3921 .log_err()
3922 {
3923 if open_buffers.borrow_mut().insert(buffer.clone()) {
3924 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3925 buffers_tx.send((buffer, snapshot)).await?;
3926 }
3927 }
3928
3929 Ok::<_, anyhow::Error>(())
3930 })
3931 .detach();
3932 }
3933
3934 Ok::<_, anyhow::Error>(())
3935 })
3936 .detach_and_log_err(cx);
3937
3938 let background = cx.background().clone();
3939 cx.background().spawn(async move {
3940 let query = &query;
3941 let mut matched_buffers = Vec::new();
3942 for _ in 0..workers {
3943 matched_buffers.push(HashMap::default());
3944 }
3945 background
3946 .scoped(|scope| {
3947 for worker_matched_buffers in matched_buffers.iter_mut() {
3948 let mut buffers_rx = buffers_rx.clone();
3949 scope.spawn(async move {
3950 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3951 let buffer_matches = query
3952 .search(snapshot.as_rope())
3953 .await
3954 .iter()
3955 .map(|range| {
3956 snapshot.anchor_before(range.start)
3957 ..snapshot.anchor_after(range.end)
3958 })
3959 .collect::<Vec<_>>();
3960 if !buffer_matches.is_empty() {
3961 worker_matched_buffers
3962 .insert(buffer.clone(), buffer_matches);
3963 }
3964 }
3965 });
3966 }
3967 })
3968 .await;
3969 Ok(matched_buffers.into_iter().flatten().collect())
3970 })
3971 } else if let Some(project_id) = self.remote_id() {
3972 let request = self.client.request(query.to_proto(project_id));
3973 cx.spawn(|this, mut cx| async move {
3974 let response = request.await?;
3975 let mut result = HashMap::default();
3976 for location in response.locations {
3977 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3978 let target_buffer = this
3979 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3980 .await?;
3981 let start = location
3982 .start
3983 .and_then(deserialize_anchor)
3984 .ok_or_else(|| anyhow!("missing target start"))?;
3985 let end = location
3986 .end
3987 .and_then(deserialize_anchor)
3988 .ok_or_else(|| anyhow!("missing target end"))?;
3989 result
3990 .entry(target_buffer)
3991 .or_insert(Vec::new())
3992 .push(start..end)
3993 }
3994 Ok(result)
3995 })
3996 } else {
3997 Task::ready(Ok(Default::default()))
3998 }
3999 }
4000
4001 fn request_lsp<R: LspCommand>(
4002 &self,
4003 buffer_handle: ModelHandle<Buffer>,
4004 request: R,
4005 cx: &mut ModelContext<Self>,
4006 ) -> Task<Result<R::Response>>
4007 where
4008 <R::LspRequest as lsp::request::Request>::Result: Send,
4009 {
4010 let buffer = buffer_handle.read(cx);
4011 if self.is_local() {
4012 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4013 if let Some((file, language_server)) = file.zip(
4014 self.language_server_for_buffer(buffer, cx)
4015 .map(|(_, server)| server.clone()),
4016 ) {
4017 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
4018 return cx.spawn(|this, cx| async move {
4019 if !request.check_capabilities(&language_server.capabilities()) {
4020 return Ok(Default::default());
4021 }
4022
4023 let response = language_server
4024 .request::<R::LspRequest>(lsp_params)
4025 .await
4026 .context("lsp request failed")?;
4027 request
4028 .response_from_lsp(response, this, buffer_handle, cx)
4029 .await
4030 });
4031 }
4032 } else if let Some(project_id) = self.remote_id() {
4033 let rpc = self.client.clone();
4034 let message = request.to_proto(project_id, buffer);
4035 return cx.spawn(|this, cx| async move {
4036 let response = rpc.request(message).await?;
4037 request
4038 .response_from_proto(response, this, buffer_handle, cx)
4039 .await
4040 });
4041 }
4042 Task::ready(Ok(Default::default()))
4043 }
4044
4045 pub fn find_or_create_local_worktree(
4046 &mut self,
4047 abs_path: impl AsRef<Path>,
4048 visible: bool,
4049 cx: &mut ModelContext<Self>,
4050 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4051 let abs_path = abs_path.as_ref();
4052 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4053 Task::ready(Ok((tree.clone(), relative_path.into())))
4054 } else {
4055 let worktree = self.create_local_worktree(abs_path, visible, cx);
4056 cx.foreground()
4057 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4058 }
4059 }
4060
4061 pub fn find_local_worktree(
4062 &self,
4063 abs_path: &Path,
4064 cx: &AppContext,
4065 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4066 for tree in &self.worktrees {
4067 if let Some(tree) = tree.upgrade(cx) {
4068 if let Some(relative_path) = tree
4069 .read(cx)
4070 .as_local()
4071 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4072 {
4073 return Some((tree.clone(), relative_path.into()));
4074 }
4075 }
4076 }
4077 None
4078 }
4079
4080 pub fn is_shared(&self) -> bool {
4081 match &self.client_state {
4082 ProjectClientState::Local { is_shared, .. } => *is_shared,
4083 ProjectClientState::Remote { .. } => false,
4084 }
4085 }
4086
4087 fn create_local_worktree(
4088 &mut self,
4089 abs_path: impl AsRef<Path>,
4090 visible: bool,
4091 cx: &mut ModelContext<Self>,
4092 ) -> Task<Result<ModelHandle<Worktree>>> {
4093 let fs = self.fs.clone();
4094 let client = self.client.clone();
4095 let next_entry_id = self.next_entry_id.clone();
4096 let path: Arc<Path> = abs_path.as_ref().into();
4097 let task = self
4098 .loading_local_worktrees
4099 .entry(path.clone())
4100 .or_insert_with(|| {
4101 cx.spawn(|project, mut cx| {
4102 async move {
4103 let worktree = Worktree::local(
4104 client.clone(),
4105 path.clone(),
4106 visible,
4107 fs,
4108 next_entry_id,
4109 &mut cx,
4110 )
4111 .await;
4112 project.update(&mut cx, |project, _| {
4113 project.loading_local_worktrees.remove(&path);
4114 });
4115 let worktree = worktree?;
4116
4117 let project_id = project.update(&mut cx, |project, cx| {
4118 project.add_worktree(&worktree, cx);
4119 project.shared_remote_id()
4120 });
4121
4122 if let Some(project_id) = project_id {
4123 worktree
4124 .update(&mut cx, |worktree, cx| {
4125 worktree.as_local_mut().unwrap().share(project_id, cx)
4126 })
4127 .await
4128 .log_err();
4129 }
4130
4131 Ok(worktree)
4132 }
4133 .map_err(|err| Arc::new(err))
4134 })
4135 .shared()
4136 })
4137 .clone();
4138 cx.foreground().spawn(async move {
4139 match task.await {
4140 Ok(worktree) => Ok(worktree),
4141 Err(err) => Err(anyhow!("{}", err)),
4142 }
4143 })
4144 }
4145
4146 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4147 self.worktrees.retain(|worktree| {
4148 if let Some(worktree) = worktree.upgrade(cx) {
4149 let id = worktree.read(cx).id();
4150 if id == id_to_remove {
4151 cx.emit(Event::WorktreeRemoved(id));
4152 false
4153 } else {
4154 true
4155 }
4156 } else {
4157 false
4158 }
4159 });
4160 self.metadata_changed(true, cx);
4161 cx.notify();
4162 }
4163
4164 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4165 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4166 if worktree.read(cx).is_local() {
4167 cx.subscribe(&worktree, |this, worktree, _, cx| {
4168 this.update_local_worktree_buffers(worktree, cx);
4169 })
4170 .detach();
4171 }
4172
4173 let push_strong_handle = {
4174 let worktree = worktree.read(cx);
4175 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4176 };
4177 if push_strong_handle {
4178 self.worktrees
4179 .push(WorktreeHandle::Strong(worktree.clone()));
4180 } else {
4181 self.worktrees
4182 .push(WorktreeHandle::Weak(worktree.downgrade()));
4183 }
4184
4185 self.metadata_changed(true, cx);
4186 cx.observe_release(&worktree, |this, worktree, cx| {
4187 this.remove_worktree(worktree.id(), cx);
4188 cx.notify();
4189 })
4190 .detach();
4191
4192 cx.emit(Event::WorktreeAdded);
4193 cx.notify();
4194 }
4195
4196 fn update_local_worktree_buffers(
4197 &mut self,
4198 worktree_handle: ModelHandle<Worktree>,
4199 cx: &mut ModelContext<Self>,
4200 ) {
4201 let snapshot = worktree_handle.read(cx).snapshot();
4202 let mut buffers_to_delete = Vec::new();
4203 let mut renamed_buffers = Vec::new();
4204 for (buffer_id, buffer) in &self.opened_buffers {
4205 if let Some(buffer) = buffer.upgrade(cx) {
4206 buffer.update(cx, |buffer, cx| {
4207 if let Some(old_file) = File::from_dyn(buffer.file()) {
4208 if old_file.worktree != worktree_handle {
4209 return;
4210 }
4211
4212 let new_file = if let Some(entry) = old_file
4213 .entry_id
4214 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4215 {
4216 File {
4217 is_local: true,
4218 entry_id: Some(entry.id),
4219 mtime: entry.mtime,
4220 path: entry.path.clone(),
4221 worktree: worktree_handle.clone(),
4222 }
4223 } else if let Some(entry) =
4224 snapshot.entry_for_path(old_file.path().as_ref())
4225 {
4226 File {
4227 is_local: true,
4228 entry_id: Some(entry.id),
4229 mtime: entry.mtime,
4230 path: entry.path.clone(),
4231 worktree: worktree_handle.clone(),
4232 }
4233 } else {
4234 File {
4235 is_local: true,
4236 entry_id: None,
4237 path: old_file.path().clone(),
4238 mtime: old_file.mtime(),
4239 worktree: worktree_handle.clone(),
4240 }
4241 };
4242
4243 let old_path = old_file.abs_path(cx);
4244 if new_file.abs_path(cx) != old_path {
4245 renamed_buffers.push((cx.handle(), old_path));
4246 }
4247
4248 if let Some(project_id) = self.shared_remote_id() {
4249 self.client
4250 .send(proto::UpdateBufferFile {
4251 project_id,
4252 buffer_id: *buffer_id as u64,
4253 file: Some(new_file.to_proto()),
4254 })
4255 .log_err();
4256 }
4257 buffer.file_updated(Arc::new(new_file), cx).detach();
4258 }
4259 });
4260 } else {
4261 buffers_to_delete.push(*buffer_id);
4262 }
4263 }
4264
4265 for buffer_id in buffers_to_delete {
4266 self.opened_buffers.remove(&buffer_id);
4267 }
4268
4269 for (buffer, old_path) in renamed_buffers {
4270 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4271 self.assign_language_to_buffer(&buffer, cx);
4272 self.register_buffer_with_language_server(&buffer, cx);
4273 }
4274 }
4275
4276 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4277 let new_active_entry = entry.and_then(|project_path| {
4278 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4279 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4280 Some(entry.id)
4281 });
4282 if new_active_entry != self.active_entry {
4283 self.active_entry = new_active_entry;
4284 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4285 }
4286 }
4287
4288 pub fn language_servers_running_disk_based_diagnostics<'a>(
4289 &'a self,
4290 ) -> impl 'a + Iterator<Item = usize> {
4291 self.language_server_statuses
4292 .iter()
4293 .filter_map(|(id, status)| {
4294 if status.has_pending_diagnostic_updates {
4295 Some(*id)
4296 } else {
4297 None
4298 }
4299 })
4300 }
4301
4302 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4303 let mut summary = DiagnosticSummary::default();
4304 for (_, path_summary) in self.diagnostic_summaries(cx) {
4305 summary.error_count += path_summary.error_count;
4306 summary.warning_count += path_summary.warning_count;
4307 }
4308 summary
4309 }
4310
4311 pub fn diagnostic_summaries<'a>(
4312 &'a self,
4313 cx: &'a AppContext,
4314 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4315 self.visible_worktrees(cx).flat_map(move |worktree| {
4316 let worktree = worktree.read(cx);
4317 let worktree_id = worktree.id();
4318 worktree
4319 .diagnostic_summaries()
4320 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4321 })
4322 }
4323
4324 pub fn disk_based_diagnostics_started(
4325 &mut self,
4326 language_server_id: usize,
4327 cx: &mut ModelContext<Self>,
4328 ) {
4329 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4330 }
4331
4332 pub fn disk_based_diagnostics_finished(
4333 &mut self,
4334 language_server_id: usize,
4335 cx: &mut ModelContext<Self>,
4336 ) {
4337 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4338 }
4339
4340 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4341 self.active_entry
4342 }
4343
4344 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4345 self.worktree_for_id(path.worktree_id, cx)?
4346 .read(cx)
4347 .entry_for_path(&path.path)
4348 .map(|entry| entry.id)
4349 }
4350
4351 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4352 let worktree = self.worktree_for_entry(entry_id, cx)?;
4353 let worktree = worktree.read(cx);
4354 let worktree_id = worktree.id();
4355 let path = worktree.entry_for_id(entry_id)?.path.clone();
4356 Some(ProjectPath { worktree_id, path })
4357 }
4358
4359 // RPC message handlers
4360
4361 async fn handle_request_join_project(
4362 this: ModelHandle<Self>,
4363 message: TypedEnvelope<proto::RequestJoinProject>,
4364 _: Arc<Client>,
4365 mut cx: AsyncAppContext,
4366 ) -> Result<()> {
4367 let user_id = message.payload.requester_id;
4368 if this.read_with(&cx, |project, _| {
4369 project.collaborators.values().any(|c| c.user.id == user_id)
4370 }) {
4371 this.update(&mut cx, |this, cx| {
4372 this.respond_to_join_request(user_id, true, cx)
4373 });
4374 } else {
4375 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4376 let user = user_store
4377 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4378 .await?;
4379 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4380 }
4381 Ok(())
4382 }
4383
4384 async fn handle_unregister_project(
4385 this: ModelHandle<Self>,
4386 _: TypedEnvelope<proto::UnregisterProject>,
4387 _: Arc<Client>,
4388 mut cx: AsyncAppContext,
4389 ) -> Result<()> {
4390 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4391 Ok(())
4392 }
4393
4394 async fn handle_project_unshared(
4395 this: ModelHandle<Self>,
4396 _: TypedEnvelope<proto::ProjectUnshared>,
4397 _: Arc<Client>,
4398 mut cx: AsyncAppContext,
4399 ) -> Result<()> {
4400 this.update(&mut cx, |this, cx| this.unshared(cx));
4401 Ok(())
4402 }
4403
4404 async fn handle_add_collaborator(
4405 this: ModelHandle<Self>,
4406 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4407 _: Arc<Client>,
4408 mut cx: AsyncAppContext,
4409 ) -> Result<()> {
4410 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4411 let collaborator = envelope
4412 .payload
4413 .collaborator
4414 .take()
4415 .ok_or_else(|| anyhow!("empty collaborator"))?;
4416
4417 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4418 this.update(&mut cx, |this, cx| {
4419 this.collaborators
4420 .insert(collaborator.peer_id, collaborator);
4421 cx.notify();
4422 });
4423
4424 Ok(())
4425 }
4426
4427 async fn handle_remove_collaborator(
4428 this: ModelHandle<Self>,
4429 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4430 _: Arc<Client>,
4431 mut cx: AsyncAppContext,
4432 ) -> Result<()> {
4433 this.update(&mut cx, |this, cx| {
4434 let peer_id = PeerId(envelope.payload.peer_id);
4435 let replica_id = this
4436 .collaborators
4437 .remove(&peer_id)
4438 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4439 .replica_id;
4440 for (_, buffer) in &this.opened_buffers {
4441 if let Some(buffer) = buffer.upgrade(cx) {
4442 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4443 }
4444 }
4445
4446 cx.emit(Event::CollaboratorLeft(peer_id));
4447 cx.notify();
4448 Ok(())
4449 })
4450 }
4451
4452 async fn handle_join_project_request_cancelled(
4453 this: ModelHandle<Self>,
4454 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4455 _: Arc<Client>,
4456 mut cx: AsyncAppContext,
4457 ) -> Result<()> {
4458 let user = this
4459 .update(&mut cx, |this, cx| {
4460 this.user_store.update(cx, |user_store, cx| {
4461 user_store.fetch_user(envelope.payload.requester_id, cx)
4462 })
4463 })
4464 .await?;
4465
4466 this.update(&mut cx, |_, cx| {
4467 cx.emit(Event::ContactCancelledJoinRequest(user));
4468 });
4469
4470 Ok(())
4471 }
4472
4473 async fn handle_update_project(
4474 this: ModelHandle<Self>,
4475 envelope: TypedEnvelope<proto::UpdateProject>,
4476 client: Arc<Client>,
4477 mut cx: AsyncAppContext,
4478 ) -> Result<()> {
4479 this.update(&mut cx, |this, cx| {
4480 let replica_id = this.replica_id();
4481 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4482
4483 let mut old_worktrees_by_id = this
4484 .worktrees
4485 .drain(..)
4486 .filter_map(|worktree| {
4487 let worktree = worktree.upgrade(cx)?;
4488 Some((worktree.read(cx).id(), worktree))
4489 })
4490 .collect::<HashMap<_, _>>();
4491
4492 for worktree in envelope.payload.worktrees {
4493 if let Some(old_worktree) =
4494 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4495 {
4496 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4497 } else {
4498 let worktree = proto::Worktree {
4499 id: worktree.id,
4500 root_name: worktree.root_name,
4501 entries: Default::default(),
4502 diagnostic_summaries: Default::default(),
4503 visible: worktree.visible,
4504 scan_id: 0,
4505 };
4506 let (worktree, load_task) =
4507 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4508 this.add_worktree(&worktree, cx);
4509 load_task.detach();
4510 }
4511 }
4512
4513 this.metadata_changed(true, cx);
4514 for (id, _) in old_worktrees_by_id {
4515 cx.emit(Event::WorktreeRemoved(id));
4516 }
4517
4518 Ok(())
4519 })
4520 }
4521
4522 async fn handle_update_worktree(
4523 this: ModelHandle<Self>,
4524 envelope: TypedEnvelope<proto::UpdateWorktree>,
4525 _: Arc<Client>,
4526 mut cx: AsyncAppContext,
4527 ) -> Result<()> {
4528 this.update(&mut cx, |this, cx| {
4529 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4530 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4531 worktree.update(cx, |worktree, _| {
4532 let worktree = worktree.as_remote_mut().unwrap();
4533 worktree.update_from_remote(envelope)
4534 })?;
4535 }
4536 Ok(())
4537 })
4538 }
4539
4540 async fn handle_create_project_entry(
4541 this: ModelHandle<Self>,
4542 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4543 _: Arc<Client>,
4544 mut cx: AsyncAppContext,
4545 ) -> Result<proto::ProjectEntryResponse> {
4546 let worktree = this.update(&mut cx, |this, cx| {
4547 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4548 this.worktree_for_id(worktree_id, cx)
4549 .ok_or_else(|| anyhow!("worktree not found"))
4550 })?;
4551 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4552 let entry = worktree
4553 .update(&mut cx, |worktree, cx| {
4554 let worktree = worktree.as_local_mut().unwrap();
4555 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4556 worktree.create_entry(path, envelope.payload.is_directory, cx)
4557 })
4558 .await?;
4559 Ok(proto::ProjectEntryResponse {
4560 entry: Some((&entry).into()),
4561 worktree_scan_id: worktree_scan_id as u64,
4562 })
4563 }
4564
4565 async fn handle_rename_project_entry(
4566 this: ModelHandle<Self>,
4567 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4568 _: Arc<Client>,
4569 mut cx: AsyncAppContext,
4570 ) -> Result<proto::ProjectEntryResponse> {
4571 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4572 let worktree = this.read_with(&cx, |this, cx| {
4573 this.worktree_for_entry(entry_id, cx)
4574 .ok_or_else(|| anyhow!("worktree not found"))
4575 })?;
4576 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4577 let entry = worktree
4578 .update(&mut cx, |worktree, cx| {
4579 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4580 worktree
4581 .as_local_mut()
4582 .unwrap()
4583 .rename_entry(entry_id, new_path, cx)
4584 .ok_or_else(|| anyhow!("invalid entry"))
4585 })?
4586 .await?;
4587 Ok(proto::ProjectEntryResponse {
4588 entry: Some((&entry).into()),
4589 worktree_scan_id: worktree_scan_id as u64,
4590 })
4591 }
4592
4593 async fn handle_copy_project_entry(
4594 this: ModelHandle<Self>,
4595 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4596 _: Arc<Client>,
4597 mut cx: AsyncAppContext,
4598 ) -> Result<proto::ProjectEntryResponse> {
4599 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4600 let worktree = this.read_with(&cx, |this, cx| {
4601 this.worktree_for_entry(entry_id, cx)
4602 .ok_or_else(|| anyhow!("worktree not found"))
4603 })?;
4604 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4605 let entry = worktree
4606 .update(&mut cx, |worktree, cx| {
4607 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4608 worktree
4609 .as_local_mut()
4610 .unwrap()
4611 .copy_entry(entry_id, new_path, cx)
4612 .ok_or_else(|| anyhow!("invalid entry"))
4613 })?
4614 .await?;
4615 Ok(proto::ProjectEntryResponse {
4616 entry: Some((&entry).into()),
4617 worktree_scan_id: worktree_scan_id as u64,
4618 })
4619 }
4620
4621 async fn handle_delete_project_entry(
4622 this: ModelHandle<Self>,
4623 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4624 _: Arc<Client>,
4625 mut cx: AsyncAppContext,
4626 ) -> Result<proto::ProjectEntryResponse> {
4627 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4628 let worktree = this.read_with(&cx, |this, cx| {
4629 this.worktree_for_entry(entry_id, cx)
4630 .ok_or_else(|| anyhow!("worktree not found"))
4631 })?;
4632 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4633 worktree
4634 .update(&mut cx, |worktree, cx| {
4635 worktree
4636 .as_local_mut()
4637 .unwrap()
4638 .delete_entry(entry_id, cx)
4639 .ok_or_else(|| anyhow!("invalid entry"))
4640 })?
4641 .await?;
4642 Ok(proto::ProjectEntryResponse {
4643 entry: None,
4644 worktree_scan_id: worktree_scan_id as u64,
4645 })
4646 }
4647
4648 async fn handle_update_diagnostic_summary(
4649 this: ModelHandle<Self>,
4650 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4651 _: Arc<Client>,
4652 mut cx: AsyncAppContext,
4653 ) -> Result<()> {
4654 this.update(&mut cx, |this, cx| {
4655 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4656 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4657 if let Some(summary) = envelope.payload.summary {
4658 let project_path = ProjectPath {
4659 worktree_id,
4660 path: Path::new(&summary.path).into(),
4661 };
4662 worktree.update(cx, |worktree, _| {
4663 worktree
4664 .as_remote_mut()
4665 .unwrap()
4666 .update_diagnostic_summary(project_path.path.clone(), &summary);
4667 });
4668 cx.emit(Event::DiagnosticsUpdated {
4669 language_server_id: summary.language_server_id as usize,
4670 path: project_path,
4671 });
4672 }
4673 }
4674 Ok(())
4675 })
4676 }
4677
4678 async fn handle_start_language_server(
4679 this: ModelHandle<Self>,
4680 envelope: TypedEnvelope<proto::StartLanguageServer>,
4681 _: Arc<Client>,
4682 mut cx: AsyncAppContext,
4683 ) -> Result<()> {
4684 let server = envelope
4685 .payload
4686 .server
4687 .ok_or_else(|| anyhow!("invalid server"))?;
4688 this.update(&mut cx, |this, cx| {
4689 this.language_server_statuses.insert(
4690 server.id as usize,
4691 LanguageServerStatus {
4692 name: server.name,
4693 pending_work: Default::default(),
4694 has_pending_diagnostic_updates: false,
4695 progress_tokens: Default::default(),
4696 },
4697 );
4698 cx.notify();
4699 });
4700 Ok(())
4701 }
4702
4703 async fn handle_update_language_server(
4704 this: ModelHandle<Self>,
4705 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4706 _: Arc<Client>,
4707 mut cx: AsyncAppContext,
4708 ) -> Result<()> {
4709 let language_server_id = envelope.payload.language_server_id as usize;
4710 match envelope
4711 .payload
4712 .variant
4713 .ok_or_else(|| anyhow!("invalid variant"))?
4714 {
4715 proto::update_language_server::Variant::WorkStart(payload) => {
4716 this.update(&mut cx, |this, cx| {
4717 this.on_lsp_work_start(
4718 language_server_id,
4719 payload.token,
4720 LanguageServerProgress {
4721 message: payload.message,
4722 percentage: payload.percentage.map(|p| p as usize),
4723 last_update_at: Instant::now(),
4724 },
4725 cx,
4726 );
4727 })
4728 }
4729 proto::update_language_server::Variant::WorkProgress(payload) => {
4730 this.update(&mut cx, |this, cx| {
4731 this.on_lsp_work_progress(
4732 language_server_id,
4733 payload.token,
4734 LanguageServerProgress {
4735 message: payload.message,
4736 percentage: payload.percentage.map(|p| p as usize),
4737 last_update_at: Instant::now(),
4738 },
4739 cx,
4740 );
4741 })
4742 }
4743 proto::update_language_server::Variant::WorkEnd(payload) => {
4744 this.update(&mut cx, |this, cx| {
4745 this.on_lsp_work_end(language_server_id, payload.token, cx);
4746 })
4747 }
4748 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4749 this.update(&mut cx, |this, cx| {
4750 this.disk_based_diagnostics_started(language_server_id, cx);
4751 })
4752 }
4753 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4754 this.update(&mut cx, |this, cx| {
4755 this.disk_based_diagnostics_finished(language_server_id, cx)
4756 });
4757 }
4758 }
4759
4760 Ok(())
4761 }
4762
4763 async fn handle_update_buffer(
4764 this: ModelHandle<Self>,
4765 envelope: TypedEnvelope<proto::UpdateBuffer>,
4766 _: Arc<Client>,
4767 mut cx: AsyncAppContext,
4768 ) -> Result<()> {
4769 this.update(&mut cx, |this, cx| {
4770 let payload = envelope.payload.clone();
4771 let buffer_id = payload.buffer_id;
4772 let ops = payload
4773 .operations
4774 .into_iter()
4775 .map(|op| language::proto::deserialize_operation(op))
4776 .collect::<Result<Vec<_>, _>>()?;
4777 let is_remote = this.is_remote();
4778 match this.opened_buffers.entry(buffer_id) {
4779 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4780 OpenBuffer::Strong(buffer) => {
4781 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4782 }
4783 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4784 OpenBuffer::Weak(_) => {}
4785 },
4786 hash_map::Entry::Vacant(e) => {
4787 assert!(
4788 is_remote,
4789 "received buffer update from {:?}",
4790 envelope.original_sender_id
4791 );
4792 e.insert(OpenBuffer::Loading(ops));
4793 }
4794 }
4795 Ok(())
4796 })
4797 }
4798
4799 async fn handle_update_buffer_file(
4800 this: ModelHandle<Self>,
4801 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4802 _: Arc<Client>,
4803 mut cx: AsyncAppContext,
4804 ) -> Result<()> {
4805 this.update(&mut cx, |this, cx| {
4806 let payload = envelope.payload.clone();
4807 let buffer_id = payload.buffer_id;
4808 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4809 let worktree = this
4810 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4811 .ok_or_else(|| anyhow!("no such worktree"))?;
4812 let file = File::from_proto(file, worktree.clone(), cx)?;
4813 let buffer = this
4814 .opened_buffers
4815 .get_mut(&buffer_id)
4816 .and_then(|b| b.upgrade(cx))
4817 .ok_or_else(|| anyhow!("no such buffer"))?;
4818 buffer.update(cx, |buffer, cx| {
4819 buffer.file_updated(Arc::new(file), cx).detach();
4820 });
4821 Ok(())
4822 })
4823 }
4824
4825 async fn handle_save_buffer(
4826 this: ModelHandle<Self>,
4827 envelope: TypedEnvelope<proto::SaveBuffer>,
4828 _: Arc<Client>,
4829 mut cx: AsyncAppContext,
4830 ) -> Result<proto::BufferSaved> {
4831 let buffer_id = envelope.payload.buffer_id;
4832 let requested_version = deserialize_version(envelope.payload.version);
4833
4834 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4835 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4836 let buffer = this
4837 .opened_buffers
4838 .get(&buffer_id)
4839 .and_then(|buffer| buffer.upgrade(cx))
4840 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4841 Ok::<_, anyhow::Error>((project_id, buffer))
4842 })?;
4843 buffer
4844 .update(&mut cx, |buffer, _| {
4845 buffer.wait_for_version(requested_version)
4846 })
4847 .await;
4848
4849 let (saved_version, fingerprint, mtime) =
4850 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4851 Ok(proto::BufferSaved {
4852 project_id,
4853 buffer_id,
4854 version: serialize_version(&saved_version),
4855 mtime: Some(mtime.into()),
4856 fingerprint,
4857 })
4858 }
4859
4860 async fn handle_reload_buffers(
4861 this: ModelHandle<Self>,
4862 envelope: TypedEnvelope<proto::ReloadBuffers>,
4863 _: Arc<Client>,
4864 mut cx: AsyncAppContext,
4865 ) -> Result<proto::ReloadBuffersResponse> {
4866 let sender_id = envelope.original_sender_id()?;
4867 let reload = this.update(&mut cx, |this, cx| {
4868 let mut buffers = HashSet::default();
4869 for buffer_id in &envelope.payload.buffer_ids {
4870 buffers.insert(
4871 this.opened_buffers
4872 .get(buffer_id)
4873 .and_then(|buffer| buffer.upgrade(cx))
4874 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4875 );
4876 }
4877 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4878 })?;
4879
4880 let project_transaction = reload.await?;
4881 let project_transaction = this.update(&mut cx, |this, cx| {
4882 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4883 });
4884 Ok(proto::ReloadBuffersResponse {
4885 transaction: Some(project_transaction),
4886 })
4887 }
4888
4889 async fn handle_format_buffers(
4890 this: ModelHandle<Self>,
4891 envelope: TypedEnvelope<proto::FormatBuffers>,
4892 _: Arc<Client>,
4893 mut cx: AsyncAppContext,
4894 ) -> Result<proto::FormatBuffersResponse> {
4895 let sender_id = envelope.original_sender_id()?;
4896 let format = this.update(&mut cx, |this, cx| {
4897 let mut buffers = HashSet::default();
4898 for buffer_id in &envelope.payload.buffer_ids {
4899 buffers.insert(
4900 this.opened_buffers
4901 .get(buffer_id)
4902 .and_then(|buffer| buffer.upgrade(cx))
4903 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4904 );
4905 }
4906 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4907 })?;
4908
4909 let project_transaction = format.await?;
4910 let project_transaction = this.update(&mut cx, |this, cx| {
4911 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4912 });
4913 Ok(proto::FormatBuffersResponse {
4914 transaction: Some(project_transaction),
4915 })
4916 }
4917
4918 async fn handle_get_completions(
4919 this: ModelHandle<Self>,
4920 envelope: TypedEnvelope<proto::GetCompletions>,
4921 _: Arc<Client>,
4922 mut cx: AsyncAppContext,
4923 ) -> Result<proto::GetCompletionsResponse> {
4924 let position = envelope
4925 .payload
4926 .position
4927 .and_then(language::proto::deserialize_anchor)
4928 .ok_or_else(|| anyhow!("invalid position"))?;
4929 let version = deserialize_version(envelope.payload.version);
4930 let buffer = this.read_with(&cx, |this, cx| {
4931 this.opened_buffers
4932 .get(&envelope.payload.buffer_id)
4933 .and_then(|buffer| buffer.upgrade(cx))
4934 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4935 })?;
4936 buffer
4937 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4938 .await;
4939 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4940 let completions = this
4941 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4942 .await?;
4943
4944 Ok(proto::GetCompletionsResponse {
4945 completions: completions
4946 .iter()
4947 .map(language::proto::serialize_completion)
4948 .collect(),
4949 version: serialize_version(&version),
4950 })
4951 }
4952
4953 async fn handle_apply_additional_edits_for_completion(
4954 this: ModelHandle<Self>,
4955 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4956 _: Arc<Client>,
4957 mut cx: AsyncAppContext,
4958 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4959 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4960 let buffer = this
4961 .opened_buffers
4962 .get(&envelope.payload.buffer_id)
4963 .and_then(|buffer| buffer.upgrade(cx))
4964 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4965 let language = buffer.read(cx).language();
4966 let completion = language::proto::deserialize_completion(
4967 envelope
4968 .payload
4969 .completion
4970 .ok_or_else(|| anyhow!("invalid completion"))?,
4971 language,
4972 )?;
4973 Ok::<_, anyhow::Error>(
4974 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4975 )
4976 })?;
4977
4978 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4979 transaction: apply_additional_edits
4980 .await?
4981 .as_ref()
4982 .map(language::proto::serialize_transaction),
4983 })
4984 }
4985
4986 async fn handle_get_code_actions(
4987 this: ModelHandle<Self>,
4988 envelope: TypedEnvelope<proto::GetCodeActions>,
4989 _: Arc<Client>,
4990 mut cx: AsyncAppContext,
4991 ) -> Result<proto::GetCodeActionsResponse> {
4992 let start = envelope
4993 .payload
4994 .start
4995 .and_then(language::proto::deserialize_anchor)
4996 .ok_or_else(|| anyhow!("invalid start"))?;
4997 let end = envelope
4998 .payload
4999 .end
5000 .and_then(language::proto::deserialize_anchor)
5001 .ok_or_else(|| anyhow!("invalid end"))?;
5002 let buffer = this.update(&mut cx, |this, cx| {
5003 this.opened_buffers
5004 .get(&envelope.payload.buffer_id)
5005 .and_then(|buffer| buffer.upgrade(cx))
5006 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
5007 })?;
5008 buffer
5009 .update(&mut cx, |buffer, _| {
5010 buffer.wait_for_version(deserialize_version(envelope.payload.version))
5011 })
5012 .await;
5013
5014 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5015 let code_actions = this.update(&mut cx, |this, cx| {
5016 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
5017 })?;
5018
5019 Ok(proto::GetCodeActionsResponse {
5020 actions: code_actions
5021 .await?
5022 .iter()
5023 .map(language::proto::serialize_code_action)
5024 .collect(),
5025 version: serialize_version(&version),
5026 })
5027 }
5028
5029 async fn handle_apply_code_action(
5030 this: ModelHandle<Self>,
5031 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5032 _: Arc<Client>,
5033 mut cx: AsyncAppContext,
5034 ) -> Result<proto::ApplyCodeActionResponse> {
5035 let sender_id = envelope.original_sender_id()?;
5036 let action = language::proto::deserialize_code_action(
5037 envelope
5038 .payload
5039 .action
5040 .ok_or_else(|| anyhow!("invalid action"))?,
5041 )?;
5042 let apply_code_action = this.update(&mut cx, |this, cx| {
5043 let buffer = this
5044 .opened_buffers
5045 .get(&envelope.payload.buffer_id)
5046 .and_then(|buffer| buffer.upgrade(cx))
5047 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5048 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5049 })?;
5050
5051 let project_transaction = apply_code_action.await?;
5052 let project_transaction = this.update(&mut cx, |this, cx| {
5053 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5054 });
5055 Ok(proto::ApplyCodeActionResponse {
5056 transaction: Some(project_transaction),
5057 })
5058 }
5059
5060 async fn handle_lsp_command<T: LspCommand>(
5061 this: ModelHandle<Self>,
5062 envelope: TypedEnvelope<T::ProtoRequest>,
5063 _: Arc<Client>,
5064 mut cx: AsyncAppContext,
5065 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5066 where
5067 <T::LspRequest as lsp::request::Request>::Result: Send,
5068 {
5069 let sender_id = envelope.original_sender_id()?;
5070 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5071 let buffer_handle = this.read_with(&cx, |this, _| {
5072 this.opened_buffers
5073 .get(&buffer_id)
5074 .and_then(|buffer| buffer.upgrade(&cx))
5075 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5076 })?;
5077 let request = T::from_proto(
5078 envelope.payload,
5079 this.clone(),
5080 buffer_handle.clone(),
5081 cx.clone(),
5082 )
5083 .await?;
5084 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5085 let response = this
5086 .update(&mut cx, |this, cx| {
5087 this.request_lsp(buffer_handle, request, cx)
5088 })
5089 .await?;
5090 this.update(&mut cx, |this, cx| {
5091 Ok(T::response_to_proto(
5092 response,
5093 this,
5094 sender_id,
5095 &buffer_version,
5096 cx,
5097 ))
5098 })
5099 }
5100
5101 async fn handle_get_project_symbols(
5102 this: ModelHandle<Self>,
5103 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5104 _: Arc<Client>,
5105 mut cx: AsyncAppContext,
5106 ) -> Result<proto::GetProjectSymbolsResponse> {
5107 let symbols = this
5108 .update(&mut cx, |this, cx| {
5109 this.symbols(&envelope.payload.query, cx)
5110 })
5111 .await?;
5112
5113 Ok(proto::GetProjectSymbolsResponse {
5114 symbols: symbols.iter().map(serialize_symbol).collect(),
5115 })
5116 }
5117
5118 async fn handle_search_project(
5119 this: ModelHandle<Self>,
5120 envelope: TypedEnvelope<proto::SearchProject>,
5121 _: Arc<Client>,
5122 mut cx: AsyncAppContext,
5123 ) -> Result<proto::SearchProjectResponse> {
5124 let peer_id = envelope.original_sender_id()?;
5125 let query = SearchQuery::from_proto(envelope.payload)?;
5126 let result = this
5127 .update(&mut cx, |this, cx| this.search(query, cx))
5128 .await?;
5129
5130 this.update(&mut cx, |this, cx| {
5131 let mut locations = Vec::new();
5132 for (buffer, ranges) in result {
5133 for range in ranges {
5134 let start = serialize_anchor(&range.start);
5135 let end = serialize_anchor(&range.end);
5136 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5137 locations.push(proto::Location {
5138 buffer: Some(buffer),
5139 start: Some(start),
5140 end: Some(end),
5141 });
5142 }
5143 }
5144 Ok(proto::SearchProjectResponse { locations })
5145 })
5146 }
5147
5148 async fn handle_open_buffer_for_symbol(
5149 this: ModelHandle<Self>,
5150 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5151 _: Arc<Client>,
5152 mut cx: AsyncAppContext,
5153 ) -> Result<proto::OpenBufferForSymbolResponse> {
5154 let peer_id = envelope.original_sender_id()?;
5155 let symbol = envelope
5156 .payload
5157 .symbol
5158 .ok_or_else(|| anyhow!("invalid symbol"))?;
5159 let symbol = this.read_with(&cx, |this, _| {
5160 let symbol = this.deserialize_symbol(symbol)?;
5161 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5162 if signature == symbol.signature {
5163 Ok(symbol)
5164 } else {
5165 Err(anyhow!("invalid symbol signature"))
5166 }
5167 })?;
5168 let buffer = this
5169 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5170 .await?;
5171
5172 Ok(proto::OpenBufferForSymbolResponse {
5173 buffer: Some(this.update(&mut cx, |this, cx| {
5174 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5175 })),
5176 })
5177 }
5178
5179 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5180 let mut hasher = Sha256::new();
5181 hasher.update(worktree_id.to_proto().to_be_bytes());
5182 hasher.update(path.to_string_lossy().as_bytes());
5183 hasher.update(self.nonce.to_be_bytes());
5184 hasher.finalize().as_slice().try_into().unwrap()
5185 }
5186
5187 async fn handle_open_buffer_by_id(
5188 this: ModelHandle<Self>,
5189 envelope: TypedEnvelope<proto::OpenBufferById>,
5190 _: Arc<Client>,
5191 mut cx: AsyncAppContext,
5192 ) -> Result<proto::OpenBufferResponse> {
5193 let peer_id = envelope.original_sender_id()?;
5194 let buffer = this
5195 .update(&mut cx, |this, cx| {
5196 this.open_buffer_by_id(envelope.payload.id, cx)
5197 })
5198 .await?;
5199 this.update(&mut cx, |this, cx| {
5200 Ok(proto::OpenBufferResponse {
5201 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5202 })
5203 })
5204 }
5205
5206 async fn handle_open_buffer_by_path(
5207 this: ModelHandle<Self>,
5208 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5209 _: Arc<Client>,
5210 mut cx: AsyncAppContext,
5211 ) -> Result<proto::OpenBufferResponse> {
5212 let peer_id = envelope.original_sender_id()?;
5213 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5214 let open_buffer = this.update(&mut cx, |this, cx| {
5215 this.open_buffer(
5216 ProjectPath {
5217 worktree_id,
5218 path: PathBuf::from(envelope.payload.path).into(),
5219 },
5220 cx,
5221 )
5222 });
5223
5224 let buffer = open_buffer.await?;
5225 this.update(&mut cx, |this, cx| {
5226 Ok(proto::OpenBufferResponse {
5227 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5228 })
5229 })
5230 }
5231
5232 fn serialize_project_transaction_for_peer(
5233 &mut self,
5234 project_transaction: ProjectTransaction,
5235 peer_id: PeerId,
5236 cx: &AppContext,
5237 ) -> proto::ProjectTransaction {
5238 let mut serialized_transaction = proto::ProjectTransaction {
5239 buffers: Default::default(),
5240 transactions: Default::default(),
5241 };
5242 for (buffer, transaction) in project_transaction.0 {
5243 serialized_transaction
5244 .buffers
5245 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5246 serialized_transaction
5247 .transactions
5248 .push(language::proto::serialize_transaction(&transaction));
5249 }
5250 serialized_transaction
5251 }
5252
5253 fn deserialize_project_transaction(
5254 &mut self,
5255 message: proto::ProjectTransaction,
5256 push_to_history: bool,
5257 cx: &mut ModelContext<Self>,
5258 ) -> Task<Result<ProjectTransaction>> {
5259 cx.spawn(|this, mut cx| async move {
5260 let mut project_transaction = ProjectTransaction::default();
5261 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5262 let buffer = this
5263 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5264 .await?;
5265 let transaction = language::proto::deserialize_transaction(transaction)?;
5266 project_transaction.0.insert(buffer, transaction);
5267 }
5268
5269 for (buffer, transaction) in &project_transaction.0 {
5270 buffer
5271 .update(&mut cx, |buffer, _| {
5272 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5273 })
5274 .await;
5275
5276 if push_to_history {
5277 buffer.update(&mut cx, |buffer, _| {
5278 buffer.push_transaction(transaction.clone(), Instant::now());
5279 });
5280 }
5281 }
5282
5283 Ok(project_transaction)
5284 })
5285 }
5286
5287 fn serialize_buffer_for_peer(
5288 &mut self,
5289 buffer: &ModelHandle<Buffer>,
5290 peer_id: PeerId,
5291 cx: &AppContext,
5292 ) -> proto::Buffer {
5293 let buffer_id = buffer.read(cx).remote_id();
5294 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5295 if shared_buffers.insert(buffer_id) {
5296 proto::Buffer {
5297 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5298 }
5299 } else {
5300 proto::Buffer {
5301 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5302 }
5303 }
5304 }
5305
5306 fn deserialize_buffer(
5307 &mut self,
5308 buffer: proto::Buffer,
5309 cx: &mut ModelContext<Self>,
5310 ) -> Task<Result<ModelHandle<Buffer>>> {
5311 let replica_id = self.replica_id();
5312
5313 let opened_buffer_tx = self.opened_buffer.0.clone();
5314 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5315 cx.spawn(|this, mut cx| async move {
5316 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5317 proto::buffer::Variant::Id(id) => {
5318 let buffer = loop {
5319 let buffer = this.read_with(&cx, |this, cx| {
5320 this.opened_buffers
5321 .get(&id)
5322 .and_then(|buffer| buffer.upgrade(cx))
5323 });
5324 if let Some(buffer) = buffer {
5325 break buffer;
5326 }
5327 opened_buffer_rx
5328 .next()
5329 .await
5330 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5331 };
5332 Ok(buffer)
5333 }
5334 proto::buffer::Variant::State(mut buffer) => {
5335 let mut buffer_worktree = None;
5336 let mut buffer_file = None;
5337 if let Some(file) = buffer.file.take() {
5338 this.read_with(&cx, |this, cx| {
5339 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5340 let worktree =
5341 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5342 anyhow!("no worktree found for id {}", file.worktree_id)
5343 })?;
5344 buffer_file =
5345 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5346 as Arc<dyn language::File>);
5347 buffer_worktree = Some(worktree);
5348 Ok::<_, anyhow::Error>(())
5349 })?;
5350 }
5351
5352 let buffer = cx.add_model(|cx| {
5353 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5354 });
5355
5356 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5357
5358 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5359 Ok(buffer)
5360 }
5361 }
5362 })
5363 }
5364
5365 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5366 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5367 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5368 let start = serialized_symbol
5369 .start
5370 .ok_or_else(|| anyhow!("invalid start"))?;
5371 let end = serialized_symbol
5372 .end
5373 .ok_or_else(|| anyhow!("invalid end"))?;
5374 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5375 let path = PathBuf::from(serialized_symbol.path);
5376 let language = self.languages.select_language(&path);
5377 Ok(Symbol {
5378 source_worktree_id,
5379 worktree_id,
5380 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5381 label: language
5382 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5383 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5384 name: serialized_symbol.name,
5385 path,
5386 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5387 kind,
5388 signature: serialized_symbol
5389 .signature
5390 .try_into()
5391 .map_err(|_| anyhow!("invalid signature"))?,
5392 })
5393 }
5394
5395 async fn handle_buffer_saved(
5396 this: ModelHandle<Self>,
5397 envelope: TypedEnvelope<proto::BufferSaved>,
5398 _: Arc<Client>,
5399 mut cx: AsyncAppContext,
5400 ) -> Result<()> {
5401 let version = deserialize_version(envelope.payload.version);
5402 let mtime = envelope
5403 .payload
5404 .mtime
5405 .ok_or_else(|| anyhow!("missing mtime"))?
5406 .into();
5407
5408 this.update(&mut cx, |this, cx| {
5409 let buffer = this
5410 .opened_buffers
5411 .get(&envelope.payload.buffer_id)
5412 .and_then(|buffer| buffer.upgrade(cx));
5413 if let Some(buffer) = buffer {
5414 buffer.update(cx, |buffer, cx| {
5415 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5416 });
5417 }
5418 Ok(())
5419 })
5420 }
5421
5422 async fn handle_buffer_reloaded(
5423 this: ModelHandle<Self>,
5424 envelope: TypedEnvelope<proto::BufferReloaded>,
5425 _: Arc<Client>,
5426 mut cx: AsyncAppContext,
5427 ) -> Result<()> {
5428 let payload = envelope.payload.clone();
5429 let version = deserialize_version(payload.version);
5430 let mtime = payload
5431 .mtime
5432 .ok_or_else(|| anyhow!("missing mtime"))?
5433 .into();
5434 this.update(&mut cx, |this, cx| {
5435 let buffer = this
5436 .opened_buffers
5437 .get(&payload.buffer_id)
5438 .and_then(|buffer| buffer.upgrade(cx));
5439 if let Some(buffer) = buffer {
5440 buffer.update(cx, |buffer, cx| {
5441 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5442 });
5443 }
5444 Ok(())
5445 })
5446 }
5447
5448 pub fn match_paths<'a>(
5449 &self,
5450 query: &'a str,
5451 include_ignored: bool,
5452 smart_case: bool,
5453 max_results: usize,
5454 cancel_flag: &'a AtomicBool,
5455 cx: &AppContext,
5456 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5457 let worktrees = self
5458 .worktrees(cx)
5459 .filter(|worktree| worktree.read(cx).is_visible())
5460 .collect::<Vec<_>>();
5461 let include_root_name = worktrees.len() > 1;
5462 let candidate_sets = worktrees
5463 .into_iter()
5464 .map(|worktree| CandidateSet {
5465 snapshot: worktree.read(cx).snapshot(),
5466 include_ignored,
5467 include_root_name,
5468 })
5469 .collect::<Vec<_>>();
5470
5471 let background = cx.background().clone();
5472 async move {
5473 fuzzy::match_paths(
5474 candidate_sets.as_slice(),
5475 query,
5476 smart_case,
5477 max_results,
5478 cancel_flag,
5479 background,
5480 )
5481 .await
5482 }
5483 }
5484
5485 fn edits_from_lsp(
5486 &mut self,
5487 buffer: &ModelHandle<Buffer>,
5488 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5489 version: Option<i32>,
5490 cx: &mut ModelContext<Self>,
5491 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5492 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5493 cx.background().spawn(async move {
5494 let snapshot = snapshot?;
5495 let mut lsp_edits = lsp_edits
5496 .into_iter()
5497 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5498 .collect::<Vec<_>>();
5499 lsp_edits.sort_by_key(|(range, _)| range.start);
5500
5501 let mut lsp_edits = lsp_edits.into_iter().peekable();
5502 let mut edits = Vec::new();
5503 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5504 // Combine any LSP edits that are adjacent.
5505 //
5506 // Also, combine LSP edits that are separated from each other by only
5507 // a newline. This is important because for some code actions,
5508 // Rust-analyzer rewrites the entire buffer via a series of edits that
5509 // are separated by unchanged newline characters.
5510 //
5511 // In order for the diffing logic below to work properly, any edits that
5512 // cancel each other out must be combined into one.
5513 while let Some((next_range, next_text)) = lsp_edits.peek() {
5514 if next_range.start > range.end {
5515 if next_range.start.row > range.end.row + 1
5516 || next_range.start.column > 0
5517 || snapshot.clip_point_utf16(
5518 PointUtf16::new(range.end.row, u32::MAX),
5519 Bias::Left,
5520 ) > range.end
5521 {
5522 break;
5523 }
5524 new_text.push('\n');
5525 }
5526 range.end = next_range.end;
5527 new_text.push_str(&next_text);
5528 lsp_edits.next();
5529 }
5530
5531 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5532 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5533 {
5534 return Err(anyhow!("invalid edits received from language server"));
5535 }
5536
5537 // For multiline edits, perform a diff of the old and new text so that
5538 // we can identify the changes more precisely, preserving the locations
5539 // of any anchors positioned in the unchanged regions.
5540 if range.end.row > range.start.row {
5541 let mut offset = range.start.to_offset(&snapshot);
5542 let old_text = snapshot.text_for_range(range).collect::<String>();
5543
5544 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5545 let mut moved_since_edit = true;
5546 for change in diff.iter_all_changes() {
5547 let tag = change.tag();
5548 let value = change.value();
5549 match tag {
5550 ChangeTag::Equal => {
5551 offset += value.len();
5552 moved_since_edit = true;
5553 }
5554 ChangeTag::Delete => {
5555 let start = snapshot.anchor_after(offset);
5556 let end = snapshot.anchor_before(offset + value.len());
5557 if moved_since_edit {
5558 edits.push((start..end, String::new()));
5559 } else {
5560 edits.last_mut().unwrap().0.end = end;
5561 }
5562 offset += value.len();
5563 moved_since_edit = false;
5564 }
5565 ChangeTag::Insert => {
5566 if moved_since_edit {
5567 let anchor = snapshot.anchor_after(offset);
5568 edits.push((anchor.clone()..anchor, value.to_string()));
5569 } else {
5570 edits.last_mut().unwrap().1.push_str(value);
5571 }
5572 moved_since_edit = false;
5573 }
5574 }
5575 }
5576 } else if range.end == range.start {
5577 let anchor = snapshot.anchor_after(range.start);
5578 edits.push((anchor.clone()..anchor, new_text));
5579 } else {
5580 let edit_start = snapshot.anchor_after(range.start);
5581 let edit_end = snapshot.anchor_before(range.end);
5582 edits.push((edit_start..edit_end, new_text));
5583 }
5584 }
5585
5586 Ok(edits)
5587 })
5588 }
5589
5590 fn buffer_snapshot_for_lsp_version(
5591 &mut self,
5592 buffer: &ModelHandle<Buffer>,
5593 version: Option<i32>,
5594 cx: &AppContext,
5595 ) -> Result<TextBufferSnapshot> {
5596 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5597
5598 if let Some(version) = version {
5599 let buffer_id = buffer.read(cx).remote_id();
5600 let snapshots = self
5601 .buffer_snapshots
5602 .get_mut(&buffer_id)
5603 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5604 let mut found_snapshot = None;
5605 snapshots.retain(|(snapshot_version, snapshot)| {
5606 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5607 false
5608 } else {
5609 if *snapshot_version == version {
5610 found_snapshot = Some(snapshot.clone());
5611 }
5612 true
5613 }
5614 });
5615
5616 found_snapshot.ok_or_else(|| {
5617 anyhow!(
5618 "snapshot not found for buffer {} at version {}",
5619 buffer_id,
5620 version
5621 )
5622 })
5623 } else {
5624 Ok((buffer.read(cx)).text_snapshot())
5625 }
5626 }
5627
5628 fn language_server_for_buffer(
5629 &self,
5630 buffer: &Buffer,
5631 cx: &AppContext,
5632 ) -> Option<(&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
5633 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5634 let worktree_id = file.worktree_id(cx);
5635 let key = (worktree_id, language.lsp_adapter()?.name());
5636
5637 if let Some(server_id) = self.language_server_ids.get(&key) {
5638 if let Some(LanguageServerState::Running { adapter, server }) =
5639 self.language_servers.get(&server_id)
5640 {
5641 return Some((adapter, server));
5642 }
5643 }
5644 }
5645
5646 None
5647 }
5648}
5649
5650impl ProjectStore {
5651 pub fn new(db: Arc<Db>) -> Self {
5652 Self {
5653 db,
5654 projects: Default::default(),
5655 }
5656 }
5657
5658 pub fn projects<'a>(
5659 &'a self,
5660 cx: &'a AppContext,
5661 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5662 self.projects
5663 .iter()
5664 .filter_map(|project| project.upgrade(cx))
5665 }
5666
5667 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5668 if let Err(ix) = self
5669 .projects
5670 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5671 {
5672 self.projects.insert(ix, project);
5673 }
5674 cx.notify();
5675 }
5676
5677 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5678 let mut did_change = false;
5679 self.projects.retain(|project| {
5680 if project.is_upgradable(cx) {
5681 true
5682 } else {
5683 did_change = true;
5684 false
5685 }
5686 });
5687 if did_change {
5688 cx.notify();
5689 }
5690 }
5691}
5692
5693impl WorktreeHandle {
5694 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5695 match self {
5696 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5697 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5698 }
5699 }
5700}
5701
5702impl OpenBuffer {
5703 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5704 match self {
5705 OpenBuffer::Strong(handle) => Some(handle.clone()),
5706 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5707 OpenBuffer::Loading(_) => None,
5708 }
5709 }
5710}
5711
5712struct CandidateSet {
5713 snapshot: Snapshot,
5714 include_ignored: bool,
5715 include_root_name: bool,
5716}
5717
5718impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5719 type Candidates = CandidateSetIter<'a>;
5720
5721 fn id(&self) -> usize {
5722 self.snapshot.id().to_usize()
5723 }
5724
5725 fn len(&self) -> usize {
5726 if self.include_ignored {
5727 self.snapshot.file_count()
5728 } else {
5729 self.snapshot.visible_file_count()
5730 }
5731 }
5732
5733 fn prefix(&self) -> Arc<str> {
5734 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5735 self.snapshot.root_name().into()
5736 } else if self.include_root_name {
5737 format!("{}/", self.snapshot.root_name()).into()
5738 } else {
5739 "".into()
5740 }
5741 }
5742
5743 fn candidates(&'a self, start: usize) -> Self::Candidates {
5744 CandidateSetIter {
5745 traversal: self.snapshot.files(self.include_ignored, start),
5746 }
5747 }
5748}
5749
5750struct CandidateSetIter<'a> {
5751 traversal: Traversal<'a>,
5752}
5753
5754impl<'a> Iterator for CandidateSetIter<'a> {
5755 type Item = PathMatchCandidate<'a>;
5756
5757 fn next(&mut self) -> Option<Self::Item> {
5758 self.traversal.next().map(|entry| {
5759 if let EntryKind::File(char_bag) = entry.kind {
5760 PathMatchCandidate {
5761 path: &entry.path,
5762 char_bag,
5763 }
5764 } else {
5765 unreachable!()
5766 }
5767 })
5768 }
5769}
5770
5771impl Entity for ProjectStore {
5772 type Event = ();
5773}
5774
5775impl Entity for Project {
5776 type Event = Event;
5777
5778 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5779 self.project_store.update(cx, ProjectStore::prune_projects);
5780
5781 match &self.client_state {
5782 ProjectClientState::Local { remote_id_rx, .. } => {
5783 if let Some(project_id) = *remote_id_rx.borrow() {
5784 self.client
5785 .send(proto::UnregisterProject { project_id })
5786 .log_err();
5787 }
5788 }
5789 ProjectClientState::Remote { remote_id, .. } => {
5790 self.client
5791 .send(proto::LeaveProject {
5792 project_id: *remote_id,
5793 })
5794 .log_err();
5795 }
5796 }
5797 }
5798
5799 fn app_will_quit(
5800 &mut self,
5801 _: &mut MutableAppContext,
5802 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5803 let shutdown_futures = self
5804 .language_servers
5805 .drain()
5806 .filter_map(|(_, server_state)| {
5807 // TODO: Handle starting servers?
5808 if let LanguageServerState::Running { server, .. } = server_state {
5809 server.shutdown()
5810 } else {
5811 None
5812 }
5813 })
5814 .collect::<Vec<_>>();
5815
5816 Some(
5817 async move {
5818 futures::future::join_all(shutdown_futures).await;
5819 }
5820 .boxed(),
5821 )
5822 }
5823}
5824
5825impl Collaborator {
5826 fn from_proto(
5827 message: proto::Collaborator,
5828 user_store: &ModelHandle<UserStore>,
5829 cx: &mut AsyncAppContext,
5830 ) -> impl Future<Output = Result<Self>> {
5831 let user = user_store.update(cx, |user_store, cx| {
5832 user_store.fetch_user(message.user_id, cx)
5833 });
5834
5835 async move {
5836 Ok(Self {
5837 peer_id: PeerId(message.peer_id),
5838 user: user.await?,
5839 replica_id: message.replica_id as ReplicaId,
5840 })
5841 }
5842 }
5843}
5844
5845impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5846 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5847 Self {
5848 worktree_id,
5849 path: path.as_ref().into(),
5850 }
5851 }
5852}
5853
5854impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5855 fn from(options: lsp::CreateFileOptions) -> Self {
5856 Self {
5857 overwrite: options.overwrite.unwrap_or(false),
5858 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5859 }
5860 }
5861}
5862
5863impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5864 fn from(options: lsp::RenameFileOptions) -> Self {
5865 Self {
5866 overwrite: options.overwrite.unwrap_or(false),
5867 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5868 }
5869 }
5870}
5871
5872impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5873 fn from(options: lsp::DeleteFileOptions) -> Self {
5874 Self {
5875 recursive: options.recursive.unwrap_or(false),
5876 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5877 }
5878 }
5879}
5880
5881fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5882 proto::Symbol {
5883 source_worktree_id: symbol.source_worktree_id.to_proto(),
5884 worktree_id: symbol.worktree_id.to_proto(),
5885 language_server_name: symbol.language_server_name.0.to_string(),
5886 name: symbol.name.clone(),
5887 kind: unsafe { mem::transmute(symbol.kind) },
5888 path: symbol.path.to_string_lossy().to_string(),
5889 start: Some(proto::Point {
5890 row: symbol.range.start.row,
5891 column: symbol.range.start.column,
5892 }),
5893 end: Some(proto::Point {
5894 row: symbol.range.end.row,
5895 column: symbol.range.end.column,
5896 }),
5897 signature: symbol.signature.to_vec(),
5898 }
5899}
5900
5901fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5902 let mut path_components = path.components();
5903 let mut base_components = base.components();
5904 let mut components: Vec<Component> = Vec::new();
5905 loop {
5906 match (path_components.next(), base_components.next()) {
5907 (None, None) => break,
5908 (Some(a), None) => {
5909 components.push(a);
5910 components.extend(path_components.by_ref());
5911 break;
5912 }
5913 (None, _) => components.push(Component::ParentDir),
5914 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5915 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5916 (Some(a), Some(_)) => {
5917 components.push(Component::ParentDir);
5918 for _ in base_components {
5919 components.push(Component::ParentDir);
5920 }
5921 components.push(a);
5922 components.extend(path_components.by_ref());
5923 break;
5924 }
5925 }
5926 }
5927 components.iter().map(|c| c.as_os_str()).collect()
5928}
5929
5930impl Item for Buffer {
5931 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5932 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5933 }
5934}
5935
5936#[cfg(test)]
5937mod tests {
5938 use crate::worktree::WorktreeHandle;
5939
5940 use super::{Event, *};
5941 use fs::RealFs;
5942 use futures::{future, StreamExt};
5943 use gpui::{executor::Deterministic, test::subscribe};
5944 use language::{
5945 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5946 OffsetRangeExt, Point, ToPoint,
5947 };
5948 use lsp::Url;
5949 use serde_json::json;
5950 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5951 use unindent::Unindent as _;
5952 use util::{assert_set_eq, test::temp_tree};
5953
5954 #[gpui::test]
5955 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5956 let dir = temp_tree(json!({
5957 "root": {
5958 "apple": "",
5959 "banana": {
5960 "carrot": {
5961 "date": "",
5962 "endive": "",
5963 }
5964 },
5965 "fennel": {
5966 "grape": "",
5967 }
5968 }
5969 }));
5970
5971 let root_link_path = dir.path().join("root_link");
5972 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5973 unix::fs::symlink(
5974 &dir.path().join("root/fennel"),
5975 &dir.path().join("root/finnochio"),
5976 )
5977 .unwrap();
5978
5979 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5980
5981 project.read_with(cx, |project, cx| {
5982 let tree = project.worktrees(cx).next().unwrap().read(cx);
5983 assert_eq!(tree.file_count(), 5);
5984 assert_eq!(
5985 tree.inode_for_path("fennel/grape"),
5986 tree.inode_for_path("finnochio/grape")
5987 );
5988 });
5989
5990 let cancel_flag = Default::default();
5991 let results = project
5992 .read_with(cx, |project, cx| {
5993 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5994 })
5995 .await;
5996 assert_eq!(
5997 results
5998 .into_iter()
5999 .map(|result| result.path)
6000 .collect::<Vec<Arc<Path>>>(),
6001 vec![
6002 PathBuf::from("banana/carrot/date").into(),
6003 PathBuf::from("banana/carrot/endive").into(),
6004 ]
6005 );
6006 }
6007
6008 #[gpui::test]
6009 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
6010 cx.foreground().forbid_parking();
6011
6012 let mut rust_language = Language::new(
6013 LanguageConfig {
6014 name: "Rust".into(),
6015 path_suffixes: vec!["rs".to_string()],
6016 ..Default::default()
6017 },
6018 Some(tree_sitter_rust::language()),
6019 );
6020 let mut json_language = Language::new(
6021 LanguageConfig {
6022 name: "JSON".into(),
6023 path_suffixes: vec!["json".to_string()],
6024 ..Default::default()
6025 },
6026 None,
6027 );
6028 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
6029 name: "the-rust-language-server",
6030 capabilities: lsp::ServerCapabilities {
6031 completion_provider: Some(lsp::CompletionOptions {
6032 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
6033 ..Default::default()
6034 }),
6035 ..Default::default()
6036 },
6037 ..Default::default()
6038 });
6039 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
6040 name: "the-json-language-server",
6041 capabilities: lsp::ServerCapabilities {
6042 completion_provider: Some(lsp::CompletionOptions {
6043 trigger_characters: Some(vec![":".to_string()]),
6044 ..Default::default()
6045 }),
6046 ..Default::default()
6047 },
6048 ..Default::default()
6049 });
6050
6051 let fs = FakeFs::new(cx.background());
6052 fs.insert_tree(
6053 "/the-root",
6054 json!({
6055 "test.rs": "const A: i32 = 1;",
6056 "test2.rs": "",
6057 "Cargo.toml": "a = 1",
6058 "package.json": "{\"a\": 1}",
6059 }),
6060 )
6061 .await;
6062
6063 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
6064 project.update(cx, |project, _| {
6065 project.languages.add(Arc::new(rust_language));
6066 project.languages.add(Arc::new(json_language));
6067 });
6068
6069 // Open a buffer without an associated language server.
6070 let toml_buffer = project
6071 .update(cx, |project, cx| {
6072 project.open_local_buffer("/the-root/Cargo.toml", cx)
6073 })
6074 .await
6075 .unwrap();
6076
6077 // Open a buffer with an associated language server.
6078 let rust_buffer = project
6079 .update(cx, |project, cx| {
6080 project.open_local_buffer("/the-root/test.rs", cx)
6081 })
6082 .await
6083 .unwrap();
6084
6085 // A server is started up, and it is notified about Rust files.
6086 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6087 assert_eq!(
6088 fake_rust_server
6089 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6090 .await
6091 .text_document,
6092 lsp::TextDocumentItem {
6093 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6094 version: 0,
6095 text: "const A: i32 = 1;".to_string(),
6096 language_id: Default::default()
6097 }
6098 );
6099
6100 // The buffer is configured based on the language server's capabilities.
6101 rust_buffer.read_with(cx, |buffer, _| {
6102 assert_eq!(
6103 buffer.completion_triggers(),
6104 &[".".to_string(), "::".to_string()]
6105 );
6106 });
6107 toml_buffer.read_with(cx, |buffer, _| {
6108 assert!(buffer.completion_triggers().is_empty());
6109 });
6110
6111 // Edit a buffer. The changes are reported to the language server.
6112 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6113 assert_eq!(
6114 fake_rust_server
6115 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6116 .await
6117 .text_document,
6118 lsp::VersionedTextDocumentIdentifier::new(
6119 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6120 1
6121 )
6122 );
6123
6124 // Open a third buffer with a different associated language server.
6125 let json_buffer = project
6126 .update(cx, |project, cx| {
6127 project.open_local_buffer("/the-root/package.json", cx)
6128 })
6129 .await
6130 .unwrap();
6131
6132 // A json language server is started up and is only notified about the json buffer.
6133 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6134 assert_eq!(
6135 fake_json_server
6136 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6137 .await
6138 .text_document,
6139 lsp::TextDocumentItem {
6140 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6141 version: 0,
6142 text: "{\"a\": 1}".to_string(),
6143 language_id: Default::default()
6144 }
6145 );
6146
6147 // This buffer is configured based on the second language server's
6148 // capabilities.
6149 json_buffer.read_with(cx, |buffer, _| {
6150 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6151 });
6152
6153 // When opening another buffer whose language server is already running,
6154 // it is also configured based on the existing language server's capabilities.
6155 let rust_buffer2 = project
6156 .update(cx, |project, cx| {
6157 project.open_local_buffer("/the-root/test2.rs", cx)
6158 })
6159 .await
6160 .unwrap();
6161 rust_buffer2.read_with(cx, |buffer, _| {
6162 assert_eq!(
6163 buffer.completion_triggers(),
6164 &[".".to_string(), "::".to_string()]
6165 );
6166 });
6167
6168 // Changes are reported only to servers matching the buffer's language.
6169 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6170 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6171 assert_eq!(
6172 fake_rust_server
6173 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6174 .await
6175 .text_document,
6176 lsp::VersionedTextDocumentIdentifier::new(
6177 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6178 1
6179 )
6180 );
6181
6182 // Save notifications are reported to all servers.
6183 toml_buffer
6184 .update(cx, |buffer, cx| buffer.save(cx))
6185 .await
6186 .unwrap();
6187 assert_eq!(
6188 fake_rust_server
6189 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6190 .await
6191 .text_document,
6192 lsp::TextDocumentIdentifier::new(
6193 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6194 )
6195 );
6196 assert_eq!(
6197 fake_json_server
6198 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6199 .await
6200 .text_document,
6201 lsp::TextDocumentIdentifier::new(
6202 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6203 )
6204 );
6205
6206 // Renames are reported only to servers matching the buffer's language.
6207 fs.rename(
6208 Path::new("/the-root/test2.rs"),
6209 Path::new("/the-root/test3.rs"),
6210 Default::default(),
6211 )
6212 .await
6213 .unwrap();
6214 assert_eq!(
6215 fake_rust_server
6216 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6217 .await
6218 .text_document,
6219 lsp::TextDocumentIdentifier::new(
6220 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6221 ),
6222 );
6223 assert_eq!(
6224 fake_rust_server
6225 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6226 .await
6227 .text_document,
6228 lsp::TextDocumentItem {
6229 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6230 version: 0,
6231 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6232 language_id: Default::default()
6233 },
6234 );
6235
6236 rust_buffer2.update(cx, |buffer, cx| {
6237 buffer.update_diagnostics(
6238 DiagnosticSet::from_sorted_entries(
6239 vec![DiagnosticEntry {
6240 diagnostic: Default::default(),
6241 range: Anchor::MIN..Anchor::MAX,
6242 }],
6243 &buffer.snapshot(),
6244 ),
6245 cx,
6246 );
6247 assert_eq!(
6248 buffer
6249 .snapshot()
6250 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6251 .count(),
6252 1
6253 );
6254 });
6255
6256 // When the rename changes the extension of the file, the buffer gets closed on the old
6257 // language server and gets opened on the new one.
6258 fs.rename(
6259 Path::new("/the-root/test3.rs"),
6260 Path::new("/the-root/test3.json"),
6261 Default::default(),
6262 )
6263 .await
6264 .unwrap();
6265 assert_eq!(
6266 fake_rust_server
6267 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6268 .await
6269 .text_document,
6270 lsp::TextDocumentIdentifier::new(
6271 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6272 ),
6273 );
6274 assert_eq!(
6275 fake_json_server
6276 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6277 .await
6278 .text_document,
6279 lsp::TextDocumentItem {
6280 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6281 version: 0,
6282 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6283 language_id: Default::default()
6284 },
6285 );
6286
6287 // We clear the diagnostics, since the language has changed.
6288 rust_buffer2.read_with(cx, |buffer, _| {
6289 assert_eq!(
6290 buffer
6291 .snapshot()
6292 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6293 .count(),
6294 0
6295 );
6296 });
6297
6298 // The renamed file's version resets after changing language server.
6299 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6300 assert_eq!(
6301 fake_json_server
6302 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6303 .await
6304 .text_document,
6305 lsp::VersionedTextDocumentIdentifier::new(
6306 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6307 1
6308 )
6309 );
6310
6311 // Restart language servers
6312 project.update(cx, |project, cx| {
6313 project.restart_language_servers_for_buffers(
6314 vec![rust_buffer.clone(), json_buffer.clone()],
6315 cx,
6316 );
6317 });
6318
6319 let mut rust_shutdown_requests = fake_rust_server
6320 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6321 let mut json_shutdown_requests = fake_json_server
6322 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6323 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6324
6325 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6326 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6327
6328 // Ensure rust document is reopened in new rust language server
6329 assert_eq!(
6330 fake_rust_server
6331 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6332 .await
6333 .text_document,
6334 lsp::TextDocumentItem {
6335 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6336 version: 1,
6337 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6338 language_id: Default::default()
6339 }
6340 );
6341
6342 // Ensure json documents are reopened in new json language server
6343 assert_set_eq!(
6344 [
6345 fake_json_server
6346 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6347 .await
6348 .text_document,
6349 fake_json_server
6350 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6351 .await
6352 .text_document,
6353 ],
6354 [
6355 lsp::TextDocumentItem {
6356 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6357 version: 0,
6358 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6359 language_id: Default::default()
6360 },
6361 lsp::TextDocumentItem {
6362 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6363 version: 1,
6364 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6365 language_id: Default::default()
6366 }
6367 ]
6368 );
6369
6370 // Close notifications are reported only to servers matching the buffer's language.
6371 cx.update(|_| drop(json_buffer));
6372 let close_message = lsp::DidCloseTextDocumentParams {
6373 text_document: lsp::TextDocumentIdentifier::new(
6374 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6375 ),
6376 };
6377 assert_eq!(
6378 fake_json_server
6379 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6380 .await,
6381 close_message,
6382 );
6383 }
6384
6385 #[gpui::test]
6386 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6387 cx.foreground().forbid_parking();
6388
6389 let fs = FakeFs::new(cx.background());
6390 fs.insert_tree(
6391 "/dir",
6392 json!({
6393 "a.rs": "let a = 1;",
6394 "b.rs": "let b = 2;"
6395 }),
6396 )
6397 .await;
6398
6399 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6400
6401 let buffer_a = project
6402 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6403 .await
6404 .unwrap();
6405 let buffer_b = project
6406 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6407 .await
6408 .unwrap();
6409
6410 project.update(cx, |project, cx| {
6411 project
6412 .update_diagnostics(
6413 0,
6414 lsp::PublishDiagnosticsParams {
6415 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6416 version: None,
6417 diagnostics: vec![lsp::Diagnostic {
6418 range: lsp::Range::new(
6419 lsp::Position::new(0, 4),
6420 lsp::Position::new(0, 5),
6421 ),
6422 severity: Some(lsp::DiagnosticSeverity::ERROR),
6423 message: "error 1".to_string(),
6424 ..Default::default()
6425 }],
6426 },
6427 &[],
6428 cx,
6429 )
6430 .unwrap();
6431 project
6432 .update_diagnostics(
6433 0,
6434 lsp::PublishDiagnosticsParams {
6435 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6436 version: None,
6437 diagnostics: vec![lsp::Diagnostic {
6438 range: lsp::Range::new(
6439 lsp::Position::new(0, 4),
6440 lsp::Position::new(0, 5),
6441 ),
6442 severity: Some(lsp::DiagnosticSeverity::WARNING),
6443 message: "error 2".to_string(),
6444 ..Default::default()
6445 }],
6446 },
6447 &[],
6448 cx,
6449 )
6450 .unwrap();
6451 });
6452
6453 buffer_a.read_with(cx, |buffer, _| {
6454 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6455 assert_eq!(
6456 chunks
6457 .iter()
6458 .map(|(s, d)| (s.as_str(), *d))
6459 .collect::<Vec<_>>(),
6460 &[
6461 ("let ", None),
6462 ("a", Some(DiagnosticSeverity::ERROR)),
6463 (" = 1;", None),
6464 ]
6465 );
6466 });
6467 buffer_b.read_with(cx, |buffer, _| {
6468 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6469 assert_eq!(
6470 chunks
6471 .iter()
6472 .map(|(s, d)| (s.as_str(), *d))
6473 .collect::<Vec<_>>(),
6474 &[
6475 ("let ", None),
6476 ("b", Some(DiagnosticSeverity::WARNING)),
6477 (" = 2;", None),
6478 ]
6479 );
6480 });
6481 }
6482
6483 #[gpui::test]
6484 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6485 cx.foreground().forbid_parking();
6486
6487 let fs = FakeFs::new(cx.background());
6488 fs.insert_tree(
6489 "/root",
6490 json!({
6491 "dir": {
6492 "a.rs": "let a = 1;",
6493 },
6494 "other.rs": "let b = c;"
6495 }),
6496 )
6497 .await;
6498
6499 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6500
6501 let (worktree, _) = project
6502 .update(cx, |project, cx| {
6503 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6504 })
6505 .await
6506 .unwrap();
6507 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6508
6509 project.update(cx, |project, cx| {
6510 project
6511 .update_diagnostics(
6512 0,
6513 lsp::PublishDiagnosticsParams {
6514 uri: Url::from_file_path("/root/other.rs").unwrap(),
6515 version: None,
6516 diagnostics: vec![lsp::Diagnostic {
6517 range: lsp::Range::new(
6518 lsp::Position::new(0, 8),
6519 lsp::Position::new(0, 9),
6520 ),
6521 severity: Some(lsp::DiagnosticSeverity::ERROR),
6522 message: "unknown variable 'c'".to_string(),
6523 ..Default::default()
6524 }],
6525 },
6526 &[],
6527 cx,
6528 )
6529 .unwrap();
6530 });
6531
6532 let buffer = project
6533 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6534 .await
6535 .unwrap();
6536 buffer.read_with(cx, |buffer, _| {
6537 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6538 assert_eq!(
6539 chunks
6540 .iter()
6541 .map(|(s, d)| (s.as_str(), *d))
6542 .collect::<Vec<_>>(),
6543 &[
6544 ("let b = ", None),
6545 ("c", Some(DiagnosticSeverity::ERROR)),
6546 (";", None),
6547 ]
6548 );
6549 });
6550
6551 project.read_with(cx, |project, cx| {
6552 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6553 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6554 });
6555 }
6556
6557 #[gpui::test]
6558 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6559 cx.foreground().forbid_parking();
6560
6561 let progress_token = "the-progress-token";
6562 let mut language = Language::new(
6563 LanguageConfig {
6564 name: "Rust".into(),
6565 path_suffixes: vec!["rs".to_string()],
6566 ..Default::default()
6567 },
6568 Some(tree_sitter_rust::language()),
6569 );
6570 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6571 disk_based_diagnostics_progress_token: Some(progress_token),
6572 disk_based_diagnostics_sources: &["disk"],
6573 ..Default::default()
6574 });
6575
6576 let fs = FakeFs::new(cx.background());
6577 fs.insert_tree(
6578 "/dir",
6579 json!({
6580 "a.rs": "fn a() { A }",
6581 "b.rs": "const y: i32 = 1",
6582 }),
6583 )
6584 .await;
6585
6586 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6587 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6588 let worktree_id =
6589 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6590
6591 // Cause worktree to start the fake language server
6592 let _buffer = project
6593 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6594 .await
6595 .unwrap();
6596
6597 let mut events = subscribe(&project, cx);
6598
6599 let fake_server = fake_servers.next().await.unwrap();
6600 fake_server.start_progress(progress_token).await;
6601 assert_eq!(
6602 events.next().await.unwrap(),
6603 Event::DiskBasedDiagnosticsStarted {
6604 language_server_id: 0,
6605 }
6606 );
6607
6608 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6609 lsp::PublishDiagnosticsParams {
6610 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6611 version: None,
6612 diagnostics: vec![lsp::Diagnostic {
6613 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6614 severity: Some(lsp::DiagnosticSeverity::ERROR),
6615 message: "undefined variable 'A'".to_string(),
6616 ..Default::default()
6617 }],
6618 },
6619 );
6620 assert_eq!(
6621 events.next().await.unwrap(),
6622 Event::DiagnosticsUpdated {
6623 language_server_id: 0,
6624 path: (worktree_id, Path::new("a.rs")).into()
6625 }
6626 );
6627
6628 fake_server.end_progress(progress_token);
6629 assert_eq!(
6630 events.next().await.unwrap(),
6631 Event::DiskBasedDiagnosticsFinished {
6632 language_server_id: 0
6633 }
6634 );
6635
6636 let buffer = project
6637 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6638 .await
6639 .unwrap();
6640
6641 buffer.read_with(cx, |buffer, _| {
6642 let snapshot = buffer.snapshot();
6643 let diagnostics = snapshot
6644 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6645 .collect::<Vec<_>>();
6646 assert_eq!(
6647 diagnostics,
6648 &[DiagnosticEntry {
6649 range: Point::new(0, 9)..Point::new(0, 10),
6650 diagnostic: Diagnostic {
6651 severity: lsp::DiagnosticSeverity::ERROR,
6652 message: "undefined variable 'A'".to_string(),
6653 group_id: 0,
6654 is_primary: true,
6655 ..Default::default()
6656 }
6657 }]
6658 )
6659 });
6660
6661 // Ensure publishing empty diagnostics twice only results in one update event.
6662 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6663 lsp::PublishDiagnosticsParams {
6664 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6665 version: None,
6666 diagnostics: Default::default(),
6667 },
6668 );
6669 assert_eq!(
6670 events.next().await.unwrap(),
6671 Event::DiagnosticsUpdated {
6672 language_server_id: 0,
6673 path: (worktree_id, Path::new("a.rs")).into()
6674 }
6675 );
6676
6677 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6678 lsp::PublishDiagnosticsParams {
6679 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6680 version: None,
6681 diagnostics: Default::default(),
6682 },
6683 );
6684 cx.foreground().run_until_parked();
6685 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6686 }
6687
6688 #[gpui::test]
6689 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6690 cx.foreground().forbid_parking();
6691
6692 let progress_token = "the-progress-token";
6693 let mut language = Language::new(
6694 LanguageConfig {
6695 path_suffixes: vec!["rs".to_string()],
6696 ..Default::default()
6697 },
6698 None,
6699 );
6700 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6701 disk_based_diagnostics_sources: &["disk"],
6702 disk_based_diagnostics_progress_token: Some(progress_token),
6703 ..Default::default()
6704 });
6705
6706 let fs = FakeFs::new(cx.background());
6707 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6708
6709 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6710 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6711
6712 let buffer = project
6713 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6714 .await
6715 .unwrap();
6716
6717 // Simulate diagnostics starting to update.
6718 let fake_server = fake_servers.next().await.unwrap();
6719 fake_server.start_progress(progress_token).await;
6720
6721 // Restart the server before the diagnostics finish updating.
6722 project.update(cx, |project, cx| {
6723 project.restart_language_servers_for_buffers([buffer], cx);
6724 });
6725 let mut events = subscribe(&project, cx);
6726
6727 // Simulate the newly started server sending more diagnostics.
6728 let fake_server = fake_servers.next().await.unwrap();
6729 fake_server.start_progress(progress_token).await;
6730 assert_eq!(
6731 events.next().await.unwrap(),
6732 Event::DiskBasedDiagnosticsStarted {
6733 language_server_id: 1
6734 }
6735 );
6736 project.read_with(cx, |project, _| {
6737 assert_eq!(
6738 project
6739 .language_servers_running_disk_based_diagnostics()
6740 .collect::<Vec<_>>(),
6741 [1]
6742 );
6743 });
6744
6745 // All diagnostics are considered done, despite the old server's diagnostic
6746 // task never completing.
6747 fake_server.end_progress(progress_token);
6748 assert_eq!(
6749 events.next().await.unwrap(),
6750 Event::DiskBasedDiagnosticsFinished {
6751 language_server_id: 1
6752 }
6753 );
6754 project.read_with(cx, |project, _| {
6755 assert_eq!(
6756 project
6757 .language_servers_running_disk_based_diagnostics()
6758 .collect::<Vec<_>>(),
6759 [0; 0]
6760 );
6761 });
6762 }
6763
6764 #[gpui::test]
6765 async fn test_toggling_enable_language_server(
6766 deterministic: Arc<Deterministic>,
6767 cx: &mut gpui::TestAppContext,
6768 ) {
6769 deterministic.forbid_parking();
6770
6771 let mut rust = Language::new(
6772 LanguageConfig {
6773 name: Arc::from("Rust"),
6774 path_suffixes: vec!["rs".to_string()],
6775 ..Default::default()
6776 },
6777 None,
6778 );
6779 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6780 name: "rust-lsp",
6781 ..Default::default()
6782 });
6783 let mut js = Language::new(
6784 LanguageConfig {
6785 name: Arc::from("JavaScript"),
6786 path_suffixes: vec!["js".to_string()],
6787 ..Default::default()
6788 },
6789 None,
6790 );
6791 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6792 name: "js-lsp",
6793 ..Default::default()
6794 });
6795
6796 let fs = FakeFs::new(cx.background());
6797 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6798 .await;
6799
6800 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6801 project.update(cx, |project, _| {
6802 project.languages.add(Arc::new(rust));
6803 project.languages.add(Arc::new(js));
6804 });
6805
6806 let _rs_buffer = project
6807 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6808 .await
6809 .unwrap();
6810 let _js_buffer = project
6811 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6812 .await
6813 .unwrap();
6814
6815 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6816 assert_eq!(
6817 fake_rust_server_1
6818 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6819 .await
6820 .text_document
6821 .uri
6822 .as_str(),
6823 "file:///dir/a.rs"
6824 );
6825
6826 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6827 assert_eq!(
6828 fake_js_server
6829 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6830 .await
6831 .text_document
6832 .uri
6833 .as_str(),
6834 "file:///dir/b.js"
6835 );
6836
6837 // Disable Rust language server, ensuring only that server gets stopped.
6838 cx.update(|cx| {
6839 cx.update_global(|settings: &mut Settings, _| {
6840 settings.language_overrides.insert(
6841 Arc::from("Rust"),
6842 settings::LanguageSettings {
6843 enable_language_server: Some(false),
6844 ..Default::default()
6845 },
6846 );
6847 })
6848 });
6849 fake_rust_server_1
6850 .receive_notification::<lsp::notification::Exit>()
6851 .await;
6852
6853 // Enable Rust and disable JavaScript language servers, ensuring that the
6854 // former gets started again and that the latter stops.
6855 cx.update(|cx| {
6856 cx.update_global(|settings: &mut Settings, _| {
6857 settings.language_overrides.insert(
6858 Arc::from("Rust"),
6859 settings::LanguageSettings {
6860 enable_language_server: Some(true),
6861 ..Default::default()
6862 },
6863 );
6864 settings.language_overrides.insert(
6865 Arc::from("JavaScript"),
6866 settings::LanguageSettings {
6867 enable_language_server: Some(false),
6868 ..Default::default()
6869 },
6870 );
6871 })
6872 });
6873 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6874 assert_eq!(
6875 fake_rust_server_2
6876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6877 .await
6878 .text_document
6879 .uri
6880 .as_str(),
6881 "file:///dir/a.rs"
6882 );
6883 fake_js_server
6884 .receive_notification::<lsp::notification::Exit>()
6885 .await;
6886 }
6887
6888 #[gpui::test]
6889 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6890 cx.foreground().forbid_parking();
6891
6892 let mut language = Language::new(
6893 LanguageConfig {
6894 name: "Rust".into(),
6895 path_suffixes: vec!["rs".to_string()],
6896 ..Default::default()
6897 },
6898 Some(tree_sitter_rust::language()),
6899 );
6900 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6901 disk_based_diagnostics_sources: &["disk"],
6902 ..Default::default()
6903 });
6904
6905 let text = "
6906 fn a() { A }
6907 fn b() { BB }
6908 fn c() { CCC }
6909 "
6910 .unindent();
6911
6912 let fs = FakeFs::new(cx.background());
6913 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6914
6915 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6916 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6917
6918 let buffer = project
6919 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6920 .await
6921 .unwrap();
6922
6923 let mut fake_server = fake_servers.next().await.unwrap();
6924 let open_notification = fake_server
6925 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6926 .await;
6927
6928 // Edit the buffer, moving the content down
6929 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6930 let change_notification_1 = fake_server
6931 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6932 .await;
6933 assert!(
6934 change_notification_1.text_document.version > open_notification.text_document.version
6935 );
6936
6937 // Report some diagnostics for the initial version of the buffer
6938 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6939 lsp::PublishDiagnosticsParams {
6940 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6941 version: Some(open_notification.text_document.version),
6942 diagnostics: vec![
6943 lsp::Diagnostic {
6944 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6945 severity: Some(DiagnosticSeverity::ERROR),
6946 message: "undefined variable 'A'".to_string(),
6947 source: Some("disk".to_string()),
6948 ..Default::default()
6949 },
6950 lsp::Diagnostic {
6951 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6952 severity: Some(DiagnosticSeverity::ERROR),
6953 message: "undefined variable 'BB'".to_string(),
6954 source: Some("disk".to_string()),
6955 ..Default::default()
6956 },
6957 lsp::Diagnostic {
6958 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6959 severity: Some(DiagnosticSeverity::ERROR),
6960 source: Some("disk".to_string()),
6961 message: "undefined variable 'CCC'".to_string(),
6962 ..Default::default()
6963 },
6964 ],
6965 },
6966 );
6967
6968 // The diagnostics have moved down since they were created.
6969 buffer.next_notification(cx).await;
6970 buffer.read_with(cx, |buffer, _| {
6971 assert_eq!(
6972 buffer
6973 .snapshot()
6974 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6975 .collect::<Vec<_>>(),
6976 &[
6977 DiagnosticEntry {
6978 range: Point::new(3, 9)..Point::new(3, 11),
6979 diagnostic: Diagnostic {
6980 severity: DiagnosticSeverity::ERROR,
6981 message: "undefined variable 'BB'".to_string(),
6982 is_disk_based: true,
6983 group_id: 1,
6984 is_primary: true,
6985 ..Default::default()
6986 },
6987 },
6988 DiagnosticEntry {
6989 range: Point::new(4, 9)..Point::new(4, 12),
6990 diagnostic: Diagnostic {
6991 severity: DiagnosticSeverity::ERROR,
6992 message: "undefined variable 'CCC'".to_string(),
6993 is_disk_based: true,
6994 group_id: 2,
6995 is_primary: true,
6996 ..Default::default()
6997 }
6998 }
6999 ]
7000 );
7001 assert_eq!(
7002 chunks_with_diagnostics(buffer, 0..buffer.len()),
7003 [
7004 ("\n\nfn a() { ".to_string(), None),
7005 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7006 (" }\nfn b() { ".to_string(), None),
7007 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
7008 (" }\nfn c() { ".to_string(), None),
7009 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
7010 (" }\n".to_string(), None),
7011 ]
7012 );
7013 assert_eq!(
7014 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
7015 [
7016 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
7017 (" }\nfn c() { ".to_string(), None),
7018 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
7019 ]
7020 );
7021 });
7022
7023 // Ensure overlapping diagnostics are highlighted correctly.
7024 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7025 lsp::PublishDiagnosticsParams {
7026 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7027 version: Some(open_notification.text_document.version),
7028 diagnostics: vec![
7029 lsp::Diagnostic {
7030 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7031 severity: Some(DiagnosticSeverity::ERROR),
7032 message: "undefined variable 'A'".to_string(),
7033 source: Some("disk".to_string()),
7034 ..Default::default()
7035 },
7036 lsp::Diagnostic {
7037 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
7038 severity: Some(DiagnosticSeverity::WARNING),
7039 message: "unreachable statement".to_string(),
7040 source: Some("disk".to_string()),
7041 ..Default::default()
7042 },
7043 ],
7044 },
7045 );
7046
7047 buffer.next_notification(cx).await;
7048 buffer.read_with(cx, |buffer, _| {
7049 assert_eq!(
7050 buffer
7051 .snapshot()
7052 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
7053 .collect::<Vec<_>>(),
7054 &[
7055 DiagnosticEntry {
7056 range: Point::new(2, 9)..Point::new(2, 12),
7057 diagnostic: Diagnostic {
7058 severity: DiagnosticSeverity::WARNING,
7059 message: "unreachable statement".to_string(),
7060 is_disk_based: true,
7061 group_id: 4,
7062 is_primary: true,
7063 ..Default::default()
7064 }
7065 },
7066 DiagnosticEntry {
7067 range: Point::new(2, 9)..Point::new(2, 10),
7068 diagnostic: Diagnostic {
7069 severity: DiagnosticSeverity::ERROR,
7070 message: "undefined variable 'A'".to_string(),
7071 is_disk_based: true,
7072 group_id: 3,
7073 is_primary: true,
7074 ..Default::default()
7075 },
7076 }
7077 ]
7078 );
7079 assert_eq!(
7080 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7081 [
7082 ("fn a() { ".to_string(), None),
7083 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7084 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7085 ("\n".to_string(), None),
7086 ]
7087 );
7088 assert_eq!(
7089 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7090 [
7091 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7092 ("\n".to_string(), None),
7093 ]
7094 );
7095 });
7096
7097 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7098 // changes since the last save.
7099 buffer.update(cx, |buffer, cx| {
7100 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7101 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7102 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7103 });
7104 let change_notification_2 = fake_server
7105 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7106 .await;
7107 assert!(
7108 change_notification_2.text_document.version
7109 > change_notification_1.text_document.version
7110 );
7111
7112 // Handle out-of-order diagnostics
7113 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7114 lsp::PublishDiagnosticsParams {
7115 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7116 version: Some(change_notification_2.text_document.version),
7117 diagnostics: vec![
7118 lsp::Diagnostic {
7119 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7120 severity: Some(DiagnosticSeverity::ERROR),
7121 message: "undefined variable 'BB'".to_string(),
7122 source: Some("disk".to_string()),
7123 ..Default::default()
7124 },
7125 lsp::Diagnostic {
7126 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7127 severity: Some(DiagnosticSeverity::WARNING),
7128 message: "undefined variable 'A'".to_string(),
7129 source: Some("disk".to_string()),
7130 ..Default::default()
7131 },
7132 ],
7133 },
7134 );
7135
7136 buffer.next_notification(cx).await;
7137 buffer.read_with(cx, |buffer, _| {
7138 assert_eq!(
7139 buffer
7140 .snapshot()
7141 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7142 .collect::<Vec<_>>(),
7143 &[
7144 DiagnosticEntry {
7145 range: Point::new(2, 21)..Point::new(2, 22),
7146 diagnostic: Diagnostic {
7147 severity: DiagnosticSeverity::WARNING,
7148 message: "undefined variable 'A'".to_string(),
7149 is_disk_based: true,
7150 group_id: 6,
7151 is_primary: true,
7152 ..Default::default()
7153 }
7154 },
7155 DiagnosticEntry {
7156 range: Point::new(3, 9)..Point::new(3, 14),
7157 diagnostic: Diagnostic {
7158 severity: DiagnosticSeverity::ERROR,
7159 message: "undefined variable 'BB'".to_string(),
7160 is_disk_based: true,
7161 group_id: 5,
7162 is_primary: true,
7163 ..Default::default()
7164 },
7165 }
7166 ]
7167 );
7168 });
7169 }
7170
7171 #[gpui::test]
7172 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7173 cx.foreground().forbid_parking();
7174
7175 let text = concat!(
7176 "let one = ;\n", //
7177 "let two = \n",
7178 "let three = 3;\n",
7179 );
7180
7181 let fs = FakeFs::new(cx.background());
7182 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7183
7184 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7185 let buffer = project
7186 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7187 .await
7188 .unwrap();
7189
7190 project.update(cx, |project, cx| {
7191 project
7192 .update_buffer_diagnostics(
7193 &buffer,
7194 vec![
7195 DiagnosticEntry {
7196 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7197 diagnostic: Diagnostic {
7198 severity: DiagnosticSeverity::ERROR,
7199 message: "syntax error 1".to_string(),
7200 ..Default::default()
7201 },
7202 },
7203 DiagnosticEntry {
7204 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7205 diagnostic: Diagnostic {
7206 severity: DiagnosticSeverity::ERROR,
7207 message: "syntax error 2".to_string(),
7208 ..Default::default()
7209 },
7210 },
7211 ],
7212 None,
7213 cx,
7214 )
7215 .unwrap();
7216 });
7217
7218 // An empty range is extended forward to include the following character.
7219 // At the end of a line, an empty range is extended backward to include
7220 // the preceding character.
7221 buffer.read_with(cx, |buffer, _| {
7222 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7223 assert_eq!(
7224 chunks
7225 .iter()
7226 .map(|(s, d)| (s.as_str(), *d))
7227 .collect::<Vec<_>>(),
7228 &[
7229 ("let one = ", None),
7230 (";", Some(DiagnosticSeverity::ERROR)),
7231 ("\nlet two =", None),
7232 (" ", Some(DiagnosticSeverity::ERROR)),
7233 ("\nlet three = 3;\n", None)
7234 ]
7235 );
7236 });
7237 }
7238
7239 #[gpui::test]
7240 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7241 cx.foreground().forbid_parking();
7242
7243 let mut language = Language::new(
7244 LanguageConfig {
7245 name: "Rust".into(),
7246 path_suffixes: vec!["rs".to_string()],
7247 ..Default::default()
7248 },
7249 Some(tree_sitter_rust::language()),
7250 );
7251 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7252
7253 let text = "
7254 fn a() {
7255 f1();
7256 }
7257 fn b() {
7258 f2();
7259 }
7260 fn c() {
7261 f3();
7262 }
7263 "
7264 .unindent();
7265
7266 let fs = FakeFs::new(cx.background());
7267 fs.insert_tree(
7268 "/dir",
7269 json!({
7270 "a.rs": text.clone(),
7271 }),
7272 )
7273 .await;
7274
7275 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7276 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7277 let buffer = project
7278 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7279 .await
7280 .unwrap();
7281
7282 let mut fake_server = fake_servers.next().await.unwrap();
7283 let lsp_document_version = fake_server
7284 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7285 .await
7286 .text_document
7287 .version;
7288
7289 // Simulate editing the buffer after the language server computes some edits.
7290 buffer.update(cx, |buffer, cx| {
7291 buffer.edit(
7292 [(
7293 Point::new(0, 0)..Point::new(0, 0),
7294 "// above first function\n",
7295 )],
7296 cx,
7297 );
7298 buffer.edit(
7299 [(
7300 Point::new(2, 0)..Point::new(2, 0),
7301 " // inside first function\n",
7302 )],
7303 cx,
7304 );
7305 buffer.edit(
7306 [(
7307 Point::new(6, 4)..Point::new(6, 4),
7308 "// inside second function ",
7309 )],
7310 cx,
7311 );
7312
7313 assert_eq!(
7314 buffer.text(),
7315 "
7316 // above first function
7317 fn a() {
7318 // inside first function
7319 f1();
7320 }
7321 fn b() {
7322 // inside second function f2();
7323 }
7324 fn c() {
7325 f3();
7326 }
7327 "
7328 .unindent()
7329 );
7330 });
7331
7332 let edits = project
7333 .update(cx, |project, cx| {
7334 project.edits_from_lsp(
7335 &buffer,
7336 vec![
7337 // replace body of first function
7338 lsp::TextEdit {
7339 range: lsp::Range::new(
7340 lsp::Position::new(0, 0),
7341 lsp::Position::new(3, 0),
7342 ),
7343 new_text: "
7344 fn a() {
7345 f10();
7346 }
7347 "
7348 .unindent(),
7349 },
7350 // edit inside second function
7351 lsp::TextEdit {
7352 range: lsp::Range::new(
7353 lsp::Position::new(4, 6),
7354 lsp::Position::new(4, 6),
7355 ),
7356 new_text: "00".into(),
7357 },
7358 // edit inside third function via two distinct edits
7359 lsp::TextEdit {
7360 range: lsp::Range::new(
7361 lsp::Position::new(7, 5),
7362 lsp::Position::new(7, 5),
7363 ),
7364 new_text: "4000".into(),
7365 },
7366 lsp::TextEdit {
7367 range: lsp::Range::new(
7368 lsp::Position::new(7, 5),
7369 lsp::Position::new(7, 6),
7370 ),
7371 new_text: "".into(),
7372 },
7373 ],
7374 Some(lsp_document_version),
7375 cx,
7376 )
7377 })
7378 .await
7379 .unwrap();
7380
7381 buffer.update(cx, |buffer, cx| {
7382 for (range, new_text) in edits {
7383 buffer.edit([(range, new_text)], cx);
7384 }
7385 assert_eq!(
7386 buffer.text(),
7387 "
7388 // above first function
7389 fn a() {
7390 // inside first function
7391 f10();
7392 }
7393 fn b() {
7394 // inside second function f200();
7395 }
7396 fn c() {
7397 f4000();
7398 }
7399 "
7400 .unindent()
7401 );
7402 });
7403 }
7404
7405 #[gpui::test]
7406 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7407 cx.foreground().forbid_parking();
7408
7409 let text = "
7410 use a::b;
7411 use a::c;
7412
7413 fn f() {
7414 b();
7415 c();
7416 }
7417 "
7418 .unindent();
7419
7420 let fs = FakeFs::new(cx.background());
7421 fs.insert_tree(
7422 "/dir",
7423 json!({
7424 "a.rs": text.clone(),
7425 }),
7426 )
7427 .await;
7428
7429 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7430 let buffer = project
7431 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7432 .await
7433 .unwrap();
7434
7435 // Simulate the language server sending us a small edit in the form of a very large diff.
7436 // Rust-analyzer does this when performing a merge-imports code action.
7437 let edits = project
7438 .update(cx, |project, cx| {
7439 project.edits_from_lsp(
7440 &buffer,
7441 [
7442 // Replace the first use statement without editing the semicolon.
7443 lsp::TextEdit {
7444 range: lsp::Range::new(
7445 lsp::Position::new(0, 4),
7446 lsp::Position::new(0, 8),
7447 ),
7448 new_text: "a::{b, c}".into(),
7449 },
7450 // Reinsert the remainder of the file between the semicolon and the final
7451 // newline of the file.
7452 lsp::TextEdit {
7453 range: lsp::Range::new(
7454 lsp::Position::new(0, 9),
7455 lsp::Position::new(0, 9),
7456 ),
7457 new_text: "\n\n".into(),
7458 },
7459 lsp::TextEdit {
7460 range: lsp::Range::new(
7461 lsp::Position::new(0, 9),
7462 lsp::Position::new(0, 9),
7463 ),
7464 new_text: "
7465 fn f() {
7466 b();
7467 c();
7468 }"
7469 .unindent(),
7470 },
7471 // Delete everything after the first newline of the file.
7472 lsp::TextEdit {
7473 range: lsp::Range::new(
7474 lsp::Position::new(1, 0),
7475 lsp::Position::new(7, 0),
7476 ),
7477 new_text: "".into(),
7478 },
7479 ],
7480 None,
7481 cx,
7482 )
7483 })
7484 .await
7485 .unwrap();
7486
7487 buffer.update(cx, |buffer, cx| {
7488 let edits = edits
7489 .into_iter()
7490 .map(|(range, text)| {
7491 (
7492 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7493 text,
7494 )
7495 })
7496 .collect::<Vec<_>>();
7497
7498 assert_eq!(
7499 edits,
7500 [
7501 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7502 (Point::new(1, 0)..Point::new(2, 0), "".into())
7503 ]
7504 );
7505
7506 for (range, new_text) in edits {
7507 buffer.edit([(range, new_text)], cx);
7508 }
7509 assert_eq!(
7510 buffer.text(),
7511 "
7512 use a::{b, c};
7513
7514 fn f() {
7515 b();
7516 c();
7517 }
7518 "
7519 .unindent()
7520 );
7521 });
7522 }
7523
7524 #[gpui::test]
7525 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7526 cx.foreground().forbid_parking();
7527
7528 let text = "
7529 use a::b;
7530 use a::c;
7531
7532 fn f() {
7533 b();
7534 c();
7535 }
7536 "
7537 .unindent();
7538
7539 let fs = FakeFs::new(cx.background());
7540 fs.insert_tree(
7541 "/dir",
7542 json!({
7543 "a.rs": text.clone(),
7544 }),
7545 )
7546 .await;
7547
7548 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7549 let buffer = project
7550 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7551 .await
7552 .unwrap();
7553
7554 // Simulate the language server sending us edits in a non-ordered fashion,
7555 // with ranges sometimes being inverted.
7556 let edits = project
7557 .update(cx, |project, cx| {
7558 project.edits_from_lsp(
7559 &buffer,
7560 [
7561 lsp::TextEdit {
7562 range: lsp::Range::new(
7563 lsp::Position::new(0, 9),
7564 lsp::Position::new(0, 9),
7565 ),
7566 new_text: "\n\n".into(),
7567 },
7568 lsp::TextEdit {
7569 range: lsp::Range::new(
7570 lsp::Position::new(0, 8),
7571 lsp::Position::new(0, 4),
7572 ),
7573 new_text: "a::{b, c}".into(),
7574 },
7575 lsp::TextEdit {
7576 range: lsp::Range::new(
7577 lsp::Position::new(1, 0),
7578 lsp::Position::new(7, 0),
7579 ),
7580 new_text: "".into(),
7581 },
7582 lsp::TextEdit {
7583 range: lsp::Range::new(
7584 lsp::Position::new(0, 9),
7585 lsp::Position::new(0, 9),
7586 ),
7587 new_text: "
7588 fn f() {
7589 b();
7590 c();
7591 }"
7592 .unindent(),
7593 },
7594 ],
7595 None,
7596 cx,
7597 )
7598 })
7599 .await
7600 .unwrap();
7601
7602 buffer.update(cx, |buffer, cx| {
7603 let edits = edits
7604 .into_iter()
7605 .map(|(range, text)| {
7606 (
7607 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7608 text,
7609 )
7610 })
7611 .collect::<Vec<_>>();
7612
7613 assert_eq!(
7614 edits,
7615 [
7616 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7617 (Point::new(1, 0)..Point::new(2, 0), "".into())
7618 ]
7619 );
7620
7621 for (range, new_text) in edits {
7622 buffer.edit([(range, new_text)], cx);
7623 }
7624 assert_eq!(
7625 buffer.text(),
7626 "
7627 use a::{b, c};
7628
7629 fn f() {
7630 b();
7631 c();
7632 }
7633 "
7634 .unindent()
7635 );
7636 });
7637 }
7638
7639 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7640 buffer: &Buffer,
7641 range: Range<T>,
7642 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7643 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7644 for chunk in buffer.snapshot().chunks(range, true) {
7645 if chunks.last().map_or(false, |prev_chunk| {
7646 prev_chunk.1 == chunk.diagnostic_severity
7647 }) {
7648 chunks.last_mut().unwrap().0.push_str(chunk.text);
7649 } else {
7650 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7651 }
7652 }
7653 chunks
7654 }
7655
7656 #[gpui::test]
7657 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7658 let dir = temp_tree(json!({
7659 "root": {
7660 "dir1": {},
7661 "dir2": {
7662 "dir3": {}
7663 }
7664 }
7665 }));
7666
7667 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7668 let cancel_flag = Default::default();
7669 let results = project
7670 .read_with(cx, |project, cx| {
7671 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7672 })
7673 .await;
7674
7675 assert!(results.is_empty());
7676 }
7677
7678 #[gpui::test(iterations = 10)]
7679 async fn test_definition(cx: &mut gpui::TestAppContext) {
7680 let mut language = Language::new(
7681 LanguageConfig {
7682 name: "Rust".into(),
7683 path_suffixes: vec!["rs".to_string()],
7684 ..Default::default()
7685 },
7686 Some(tree_sitter_rust::language()),
7687 );
7688 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7689
7690 let fs = FakeFs::new(cx.background());
7691 fs.insert_tree(
7692 "/dir",
7693 json!({
7694 "a.rs": "const fn a() { A }",
7695 "b.rs": "const y: i32 = crate::a()",
7696 }),
7697 )
7698 .await;
7699
7700 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7701 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7702
7703 let buffer = project
7704 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7705 .await
7706 .unwrap();
7707
7708 let fake_server = fake_servers.next().await.unwrap();
7709 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7710 let params = params.text_document_position_params;
7711 assert_eq!(
7712 params.text_document.uri.to_file_path().unwrap(),
7713 Path::new("/dir/b.rs"),
7714 );
7715 assert_eq!(params.position, lsp::Position::new(0, 22));
7716
7717 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7718 lsp::Location::new(
7719 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7720 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7721 ),
7722 )))
7723 });
7724
7725 let mut definitions = project
7726 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7727 .await
7728 .unwrap();
7729
7730 assert_eq!(definitions.len(), 1);
7731 let definition = definitions.pop().unwrap();
7732 cx.update(|cx| {
7733 let target_buffer = definition.target.buffer.read(cx);
7734 assert_eq!(
7735 target_buffer
7736 .file()
7737 .unwrap()
7738 .as_local()
7739 .unwrap()
7740 .abs_path(cx),
7741 Path::new("/dir/a.rs"),
7742 );
7743 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7744 assert_eq!(
7745 list_worktrees(&project, cx),
7746 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7747 );
7748
7749 drop(definition);
7750 });
7751 cx.read(|cx| {
7752 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7753 });
7754
7755 fn list_worktrees<'a>(
7756 project: &'a ModelHandle<Project>,
7757 cx: &'a AppContext,
7758 ) -> Vec<(&'a Path, bool)> {
7759 project
7760 .read(cx)
7761 .worktrees(cx)
7762 .map(|worktree| {
7763 let worktree = worktree.read(cx);
7764 (
7765 worktree.as_local().unwrap().abs_path().as_ref(),
7766 worktree.is_visible(),
7767 )
7768 })
7769 .collect::<Vec<_>>()
7770 }
7771 }
7772
7773 #[gpui::test]
7774 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7775 let mut language = Language::new(
7776 LanguageConfig {
7777 name: "TypeScript".into(),
7778 path_suffixes: vec!["ts".to_string()],
7779 ..Default::default()
7780 },
7781 Some(tree_sitter_typescript::language_typescript()),
7782 );
7783 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7784
7785 let fs = FakeFs::new(cx.background());
7786 fs.insert_tree(
7787 "/dir",
7788 json!({
7789 "a.ts": "",
7790 }),
7791 )
7792 .await;
7793
7794 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7795 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7796 let buffer = project
7797 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7798 .await
7799 .unwrap();
7800
7801 let fake_server = fake_language_servers.next().await.unwrap();
7802
7803 let text = "let a = b.fqn";
7804 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7805 let completions = project.update(cx, |project, cx| {
7806 project.completions(&buffer, text.len(), cx)
7807 });
7808
7809 fake_server
7810 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7811 Ok(Some(lsp::CompletionResponse::Array(vec![
7812 lsp::CompletionItem {
7813 label: "fullyQualifiedName?".into(),
7814 insert_text: Some("fullyQualifiedName".into()),
7815 ..Default::default()
7816 },
7817 ])))
7818 })
7819 .next()
7820 .await;
7821 let completions = completions.await.unwrap();
7822 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7823 assert_eq!(completions.len(), 1);
7824 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7825 assert_eq!(
7826 completions[0].old_range.to_offset(&snapshot),
7827 text.len() - 3..text.len()
7828 );
7829
7830 let text = "let a = \"atoms/cmp\"";
7831 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7832 let completions = project.update(cx, |project, cx| {
7833 project.completions(&buffer, text.len() - 1, cx)
7834 });
7835
7836 fake_server
7837 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7838 Ok(Some(lsp::CompletionResponse::Array(vec![
7839 lsp::CompletionItem {
7840 label: "component".into(),
7841 ..Default::default()
7842 },
7843 ])))
7844 })
7845 .next()
7846 .await;
7847 let completions = completions.await.unwrap();
7848 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7849 assert_eq!(completions.len(), 1);
7850 assert_eq!(completions[0].new_text, "component");
7851 assert_eq!(
7852 completions[0].old_range.to_offset(&snapshot),
7853 text.len() - 4..text.len() - 1
7854 );
7855 }
7856
7857 #[gpui::test(iterations = 10)]
7858 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7859 let mut language = Language::new(
7860 LanguageConfig {
7861 name: "TypeScript".into(),
7862 path_suffixes: vec!["ts".to_string()],
7863 ..Default::default()
7864 },
7865 None,
7866 );
7867 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7868
7869 let fs = FakeFs::new(cx.background());
7870 fs.insert_tree(
7871 "/dir",
7872 json!({
7873 "a.ts": "a",
7874 }),
7875 )
7876 .await;
7877
7878 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7879 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7880 let buffer = project
7881 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7882 .await
7883 .unwrap();
7884
7885 let fake_server = fake_language_servers.next().await.unwrap();
7886
7887 // Language server returns code actions that contain commands, and not edits.
7888 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7889 fake_server
7890 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7891 Ok(Some(vec![
7892 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7893 title: "The code action".into(),
7894 command: Some(lsp::Command {
7895 title: "The command".into(),
7896 command: "_the/command".into(),
7897 arguments: Some(vec![json!("the-argument")]),
7898 }),
7899 ..Default::default()
7900 }),
7901 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7902 title: "two".into(),
7903 ..Default::default()
7904 }),
7905 ]))
7906 })
7907 .next()
7908 .await;
7909
7910 let action = actions.await.unwrap()[0].clone();
7911 let apply = project.update(cx, |project, cx| {
7912 project.apply_code_action(buffer.clone(), action, true, cx)
7913 });
7914
7915 // Resolving the code action does not populate its edits. In absence of
7916 // edits, we must execute the given command.
7917 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7918 |action, _| async move { Ok(action) },
7919 );
7920
7921 // While executing the command, the language server sends the editor
7922 // a `workspaceEdit` request.
7923 fake_server
7924 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7925 let fake = fake_server.clone();
7926 move |params, _| {
7927 assert_eq!(params.command, "_the/command");
7928 let fake = fake.clone();
7929 async move {
7930 fake.server
7931 .request::<lsp::request::ApplyWorkspaceEdit>(
7932 lsp::ApplyWorkspaceEditParams {
7933 label: None,
7934 edit: lsp::WorkspaceEdit {
7935 changes: Some(
7936 [(
7937 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7938 vec![lsp::TextEdit {
7939 range: lsp::Range::new(
7940 lsp::Position::new(0, 0),
7941 lsp::Position::new(0, 0),
7942 ),
7943 new_text: "X".into(),
7944 }],
7945 )]
7946 .into_iter()
7947 .collect(),
7948 ),
7949 ..Default::default()
7950 },
7951 },
7952 )
7953 .await
7954 .unwrap();
7955 Ok(Some(json!(null)))
7956 }
7957 }
7958 })
7959 .next()
7960 .await;
7961
7962 // Applying the code action returns a project transaction containing the edits
7963 // sent by the language server in its `workspaceEdit` request.
7964 let transaction = apply.await.unwrap();
7965 assert!(transaction.0.contains_key(&buffer));
7966 buffer.update(cx, |buffer, cx| {
7967 assert_eq!(buffer.text(), "Xa");
7968 buffer.undo(cx);
7969 assert_eq!(buffer.text(), "a");
7970 });
7971 }
7972
7973 #[gpui::test]
7974 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7975 let fs = FakeFs::new(cx.background());
7976 fs.insert_tree(
7977 "/dir",
7978 json!({
7979 "file1": "the old contents",
7980 }),
7981 )
7982 .await;
7983
7984 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7985 let buffer = project
7986 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7987 .await
7988 .unwrap();
7989 buffer
7990 .update(cx, |buffer, cx| {
7991 assert_eq!(buffer.text(), "the old contents");
7992 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7993 buffer.save(cx)
7994 })
7995 .await
7996 .unwrap();
7997
7998 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7999 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8000 }
8001
8002 #[gpui::test]
8003 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
8004 let fs = FakeFs::new(cx.background());
8005 fs.insert_tree(
8006 "/dir",
8007 json!({
8008 "file1": "the old contents",
8009 }),
8010 )
8011 .await;
8012
8013 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
8014 let buffer = project
8015 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8016 .await
8017 .unwrap();
8018 buffer
8019 .update(cx, |buffer, cx| {
8020 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8021 buffer.save(cx)
8022 })
8023 .await
8024 .unwrap();
8025
8026 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8027 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8028 }
8029
8030 #[gpui::test]
8031 async fn test_save_as(cx: &mut gpui::TestAppContext) {
8032 let fs = FakeFs::new(cx.background());
8033 fs.insert_tree("/dir", json!({})).await;
8034
8035 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8036 let buffer = project.update(cx, |project, cx| {
8037 project.create_buffer("", None, cx).unwrap()
8038 });
8039 buffer.update(cx, |buffer, cx| {
8040 buffer.edit([(0..0, "abc")], cx);
8041 assert!(buffer.is_dirty());
8042 assert!(!buffer.has_conflict());
8043 });
8044 project
8045 .update(cx, |project, cx| {
8046 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
8047 })
8048 .await
8049 .unwrap();
8050 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
8051 buffer.read_with(cx, |buffer, cx| {
8052 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
8053 assert!(!buffer.is_dirty());
8054 assert!(!buffer.has_conflict());
8055 });
8056
8057 let opened_buffer = project
8058 .update(cx, |project, cx| {
8059 project.open_local_buffer("/dir/file1", cx)
8060 })
8061 .await
8062 .unwrap();
8063 assert_eq!(opened_buffer, buffer);
8064 }
8065
8066 #[gpui::test(retries = 5)]
8067 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
8068 let dir = temp_tree(json!({
8069 "a": {
8070 "file1": "",
8071 "file2": "",
8072 "file3": "",
8073 },
8074 "b": {
8075 "c": {
8076 "file4": "",
8077 "file5": "",
8078 }
8079 }
8080 }));
8081
8082 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8083 let rpc = project.read_with(cx, |p, _| p.client.clone());
8084
8085 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8086 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8087 async move { buffer.await.unwrap() }
8088 };
8089 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8090 project.read_with(cx, |project, cx| {
8091 let tree = project.worktrees(cx).next().unwrap();
8092 tree.read(cx)
8093 .entry_for_path(path)
8094 .expect(&format!("no entry for path {}", path))
8095 .id
8096 })
8097 };
8098
8099 let buffer2 = buffer_for_path("a/file2", cx).await;
8100 let buffer3 = buffer_for_path("a/file3", cx).await;
8101 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8102 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8103
8104 let file2_id = id_for_path("a/file2", &cx);
8105 let file3_id = id_for_path("a/file3", &cx);
8106 let file4_id = id_for_path("b/c/file4", &cx);
8107
8108 // Create a remote copy of this worktree.
8109 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8110 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8111 let (remote, load_task) = cx.update(|cx| {
8112 Worktree::remote(
8113 1,
8114 1,
8115 initial_snapshot.to_proto(&Default::default(), true),
8116 rpc.clone(),
8117 cx,
8118 )
8119 });
8120 // tree
8121 load_task.await;
8122
8123 cx.read(|cx| {
8124 assert!(!buffer2.read(cx).is_dirty());
8125 assert!(!buffer3.read(cx).is_dirty());
8126 assert!(!buffer4.read(cx).is_dirty());
8127 assert!(!buffer5.read(cx).is_dirty());
8128 });
8129
8130 // Rename and delete files and directories.
8131 tree.flush_fs_events(&cx).await;
8132 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8133 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8134 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8135 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8136 tree.flush_fs_events(&cx).await;
8137
8138 let expected_paths = vec![
8139 "a",
8140 "a/file1",
8141 "a/file2.new",
8142 "b",
8143 "d",
8144 "d/file3",
8145 "d/file4",
8146 ];
8147
8148 cx.read(|app| {
8149 assert_eq!(
8150 tree.read(app)
8151 .paths()
8152 .map(|p| p.to_str().unwrap())
8153 .collect::<Vec<_>>(),
8154 expected_paths
8155 );
8156
8157 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8158 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8159 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8160
8161 assert_eq!(
8162 buffer2.read(app).file().unwrap().path().as_ref(),
8163 Path::new("a/file2.new")
8164 );
8165 assert_eq!(
8166 buffer3.read(app).file().unwrap().path().as_ref(),
8167 Path::new("d/file3")
8168 );
8169 assert_eq!(
8170 buffer4.read(app).file().unwrap().path().as_ref(),
8171 Path::new("d/file4")
8172 );
8173 assert_eq!(
8174 buffer5.read(app).file().unwrap().path().as_ref(),
8175 Path::new("b/c/file5")
8176 );
8177
8178 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8179 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8180 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8181 assert!(buffer5.read(app).file().unwrap().is_deleted());
8182 });
8183
8184 // Update the remote worktree. Check that it becomes consistent with the
8185 // local worktree.
8186 remote.update(cx, |remote, cx| {
8187 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8188 &initial_snapshot,
8189 1,
8190 1,
8191 true,
8192 );
8193 remote
8194 .as_remote_mut()
8195 .unwrap()
8196 .snapshot
8197 .apply_remote_update(update_message)
8198 .unwrap();
8199
8200 assert_eq!(
8201 remote
8202 .paths()
8203 .map(|p| p.to_str().unwrap())
8204 .collect::<Vec<_>>(),
8205 expected_paths
8206 );
8207 });
8208 }
8209
8210 #[gpui::test]
8211 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8212 let fs = FakeFs::new(cx.background());
8213 fs.insert_tree(
8214 "/dir",
8215 json!({
8216 "a.txt": "a-contents",
8217 "b.txt": "b-contents",
8218 }),
8219 )
8220 .await;
8221
8222 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8223
8224 // Spawn multiple tasks to open paths, repeating some paths.
8225 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8226 (
8227 p.open_local_buffer("/dir/a.txt", cx),
8228 p.open_local_buffer("/dir/b.txt", cx),
8229 p.open_local_buffer("/dir/a.txt", cx),
8230 )
8231 });
8232
8233 let buffer_a_1 = buffer_a_1.await.unwrap();
8234 let buffer_a_2 = buffer_a_2.await.unwrap();
8235 let buffer_b = buffer_b.await.unwrap();
8236 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8237 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8238
8239 // There is only one buffer per path.
8240 let buffer_a_id = buffer_a_1.id();
8241 assert_eq!(buffer_a_2.id(), buffer_a_id);
8242
8243 // Open the same path again while it is still open.
8244 drop(buffer_a_1);
8245 let buffer_a_3 = project
8246 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8247 .await
8248 .unwrap();
8249
8250 // There's still only one buffer per path.
8251 assert_eq!(buffer_a_3.id(), buffer_a_id);
8252 }
8253
8254 #[gpui::test]
8255 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8256 let fs = FakeFs::new(cx.background());
8257 fs.insert_tree(
8258 "/dir",
8259 json!({
8260 "file1": "abc",
8261 "file2": "def",
8262 "file3": "ghi",
8263 }),
8264 )
8265 .await;
8266
8267 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8268
8269 let buffer1 = project
8270 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8271 .await
8272 .unwrap();
8273 let events = Rc::new(RefCell::new(Vec::new()));
8274
8275 // initially, the buffer isn't dirty.
8276 buffer1.update(cx, |buffer, cx| {
8277 cx.subscribe(&buffer1, {
8278 let events = events.clone();
8279 move |_, _, event, _| match event {
8280 BufferEvent::Operation(_) => {}
8281 _ => events.borrow_mut().push(event.clone()),
8282 }
8283 })
8284 .detach();
8285
8286 assert!(!buffer.is_dirty());
8287 assert!(events.borrow().is_empty());
8288
8289 buffer.edit([(1..2, "")], cx);
8290 });
8291
8292 // after the first edit, the buffer is dirty, and emits a dirtied event.
8293 buffer1.update(cx, |buffer, cx| {
8294 assert!(buffer.text() == "ac");
8295 assert!(buffer.is_dirty());
8296 assert_eq!(
8297 *events.borrow(),
8298 &[language::Event::Edited, language::Event::DirtyChanged]
8299 );
8300 events.borrow_mut().clear();
8301 buffer.did_save(
8302 buffer.version(),
8303 buffer.as_rope().fingerprint(),
8304 buffer.file().unwrap().mtime(),
8305 None,
8306 cx,
8307 );
8308 });
8309
8310 // after saving, the buffer is not dirty, and emits a saved event.
8311 buffer1.update(cx, |buffer, cx| {
8312 assert!(!buffer.is_dirty());
8313 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8314 events.borrow_mut().clear();
8315
8316 buffer.edit([(1..1, "B")], cx);
8317 buffer.edit([(2..2, "D")], cx);
8318 });
8319
8320 // after editing again, the buffer is dirty, and emits another dirty event.
8321 buffer1.update(cx, |buffer, cx| {
8322 assert!(buffer.text() == "aBDc");
8323 assert!(buffer.is_dirty());
8324 assert_eq!(
8325 *events.borrow(),
8326 &[
8327 language::Event::Edited,
8328 language::Event::DirtyChanged,
8329 language::Event::Edited,
8330 ],
8331 );
8332 events.borrow_mut().clear();
8333
8334 // After restoring the buffer to its previously-saved state,
8335 // the buffer is not considered dirty anymore.
8336 buffer.edit([(1..3, "")], cx);
8337 assert!(buffer.text() == "ac");
8338 assert!(!buffer.is_dirty());
8339 });
8340
8341 assert_eq!(
8342 *events.borrow(),
8343 &[language::Event::Edited, language::Event::DirtyChanged]
8344 );
8345
8346 // When a file is deleted, the buffer is considered dirty.
8347 let events = Rc::new(RefCell::new(Vec::new()));
8348 let buffer2 = project
8349 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8350 .await
8351 .unwrap();
8352 buffer2.update(cx, |_, cx| {
8353 cx.subscribe(&buffer2, {
8354 let events = events.clone();
8355 move |_, _, event, _| events.borrow_mut().push(event.clone())
8356 })
8357 .detach();
8358 });
8359
8360 fs.remove_file("/dir/file2".as_ref(), Default::default())
8361 .await
8362 .unwrap();
8363 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8364 assert_eq!(
8365 *events.borrow(),
8366 &[
8367 language::Event::DirtyChanged,
8368 language::Event::FileHandleChanged
8369 ]
8370 );
8371
8372 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8373 let events = Rc::new(RefCell::new(Vec::new()));
8374 let buffer3 = project
8375 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8376 .await
8377 .unwrap();
8378 buffer3.update(cx, |_, cx| {
8379 cx.subscribe(&buffer3, {
8380 let events = events.clone();
8381 move |_, _, event, _| events.borrow_mut().push(event.clone())
8382 })
8383 .detach();
8384 });
8385
8386 buffer3.update(cx, |buffer, cx| {
8387 buffer.edit([(0..0, "x")], cx);
8388 });
8389 events.borrow_mut().clear();
8390 fs.remove_file("/dir/file3".as_ref(), Default::default())
8391 .await
8392 .unwrap();
8393 buffer3
8394 .condition(&cx, |_, _| !events.borrow().is_empty())
8395 .await;
8396 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8397 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8398 }
8399
8400 #[gpui::test]
8401 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8402 let initial_contents = "aaa\nbbbbb\nc\n";
8403 let fs = FakeFs::new(cx.background());
8404 fs.insert_tree(
8405 "/dir",
8406 json!({
8407 "the-file": initial_contents,
8408 }),
8409 )
8410 .await;
8411 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8412 let buffer = project
8413 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8414 .await
8415 .unwrap();
8416
8417 let anchors = (0..3)
8418 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8419 .collect::<Vec<_>>();
8420
8421 // Change the file on disk, adding two new lines of text, and removing
8422 // one line.
8423 buffer.read_with(cx, |buffer, _| {
8424 assert!(!buffer.is_dirty());
8425 assert!(!buffer.has_conflict());
8426 });
8427 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8428 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8429 .await
8430 .unwrap();
8431
8432 // Because the buffer was not modified, it is reloaded from disk. Its
8433 // contents are edited according to the diff between the old and new
8434 // file contents.
8435 buffer
8436 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8437 .await;
8438
8439 buffer.update(cx, |buffer, _| {
8440 assert_eq!(buffer.text(), new_contents);
8441 assert!(!buffer.is_dirty());
8442 assert!(!buffer.has_conflict());
8443
8444 let anchor_positions = anchors
8445 .iter()
8446 .map(|anchor| anchor.to_point(&*buffer))
8447 .collect::<Vec<_>>();
8448 assert_eq!(
8449 anchor_positions,
8450 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8451 );
8452 });
8453
8454 // Modify the buffer
8455 buffer.update(cx, |buffer, cx| {
8456 buffer.edit([(0..0, " ")], cx);
8457 assert!(buffer.is_dirty());
8458 assert!(!buffer.has_conflict());
8459 });
8460
8461 // Change the file on disk again, adding blank lines to the beginning.
8462 fs.save(
8463 "/dir/the-file".as_ref(),
8464 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8465 )
8466 .await
8467 .unwrap();
8468
8469 // Because the buffer is modified, it doesn't reload from disk, but is
8470 // marked as having a conflict.
8471 buffer
8472 .condition(&cx, |buffer, _| buffer.has_conflict())
8473 .await;
8474 }
8475
8476 #[gpui::test]
8477 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8478 cx.foreground().forbid_parking();
8479
8480 let fs = FakeFs::new(cx.background());
8481 fs.insert_tree(
8482 "/the-dir",
8483 json!({
8484 "a.rs": "
8485 fn foo(mut v: Vec<usize>) {
8486 for x in &v {
8487 v.push(1);
8488 }
8489 }
8490 "
8491 .unindent(),
8492 }),
8493 )
8494 .await;
8495
8496 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8497 let buffer = project
8498 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8499 .await
8500 .unwrap();
8501
8502 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8503 let message = lsp::PublishDiagnosticsParams {
8504 uri: buffer_uri.clone(),
8505 diagnostics: vec![
8506 lsp::Diagnostic {
8507 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8508 severity: Some(DiagnosticSeverity::WARNING),
8509 message: "error 1".to_string(),
8510 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8511 location: lsp::Location {
8512 uri: buffer_uri.clone(),
8513 range: lsp::Range::new(
8514 lsp::Position::new(1, 8),
8515 lsp::Position::new(1, 9),
8516 ),
8517 },
8518 message: "error 1 hint 1".to_string(),
8519 }]),
8520 ..Default::default()
8521 },
8522 lsp::Diagnostic {
8523 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8524 severity: Some(DiagnosticSeverity::HINT),
8525 message: "error 1 hint 1".to_string(),
8526 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8527 location: lsp::Location {
8528 uri: buffer_uri.clone(),
8529 range: lsp::Range::new(
8530 lsp::Position::new(1, 8),
8531 lsp::Position::new(1, 9),
8532 ),
8533 },
8534 message: "original diagnostic".to_string(),
8535 }]),
8536 ..Default::default()
8537 },
8538 lsp::Diagnostic {
8539 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8540 severity: Some(DiagnosticSeverity::ERROR),
8541 message: "error 2".to_string(),
8542 related_information: Some(vec![
8543 lsp::DiagnosticRelatedInformation {
8544 location: lsp::Location {
8545 uri: buffer_uri.clone(),
8546 range: lsp::Range::new(
8547 lsp::Position::new(1, 13),
8548 lsp::Position::new(1, 15),
8549 ),
8550 },
8551 message: "error 2 hint 1".to_string(),
8552 },
8553 lsp::DiagnosticRelatedInformation {
8554 location: lsp::Location {
8555 uri: buffer_uri.clone(),
8556 range: lsp::Range::new(
8557 lsp::Position::new(1, 13),
8558 lsp::Position::new(1, 15),
8559 ),
8560 },
8561 message: "error 2 hint 2".to_string(),
8562 },
8563 ]),
8564 ..Default::default()
8565 },
8566 lsp::Diagnostic {
8567 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8568 severity: Some(DiagnosticSeverity::HINT),
8569 message: "error 2 hint 1".to_string(),
8570 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8571 location: lsp::Location {
8572 uri: buffer_uri.clone(),
8573 range: lsp::Range::new(
8574 lsp::Position::new(2, 8),
8575 lsp::Position::new(2, 17),
8576 ),
8577 },
8578 message: "original diagnostic".to_string(),
8579 }]),
8580 ..Default::default()
8581 },
8582 lsp::Diagnostic {
8583 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8584 severity: Some(DiagnosticSeverity::HINT),
8585 message: "error 2 hint 2".to_string(),
8586 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8587 location: lsp::Location {
8588 uri: buffer_uri.clone(),
8589 range: lsp::Range::new(
8590 lsp::Position::new(2, 8),
8591 lsp::Position::new(2, 17),
8592 ),
8593 },
8594 message: "original diagnostic".to_string(),
8595 }]),
8596 ..Default::default()
8597 },
8598 ],
8599 version: None,
8600 };
8601
8602 project
8603 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8604 .unwrap();
8605 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8606
8607 assert_eq!(
8608 buffer
8609 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8610 .collect::<Vec<_>>(),
8611 &[
8612 DiagnosticEntry {
8613 range: Point::new(1, 8)..Point::new(1, 9),
8614 diagnostic: Diagnostic {
8615 severity: DiagnosticSeverity::WARNING,
8616 message: "error 1".to_string(),
8617 group_id: 0,
8618 is_primary: true,
8619 ..Default::default()
8620 }
8621 },
8622 DiagnosticEntry {
8623 range: Point::new(1, 8)..Point::new(1, 9),
8624 diagnostic: Diagnostic {
8625 severity: DiagnosticSeverity::HINT,
8626 message: "error 1 hint 1".to_string(),
8627 group_id: 0,
8628 is_primary: false,
8629 ..Default::default()
8630 }
8631 },
8632 DiagnosticEntry {
8633 range: Point::new(1, 13)..Point::new(1, 15),
8634 diagnostic: Diagnostic {
8635 severity: DiagnosticSeverity::HINT,
8636 message: "error 2 hint 1".to_string(),
8637 group_id: 1,
8638 is_primary: false,
8639 ..Default::default()
8640 }
8641 },
8642 DiagnosticEntry {
8643 range: Point::new(1, 13)..Point::new(1, 15),
8644 diagnostic: Diagnostic {
8645 severity: DiagnosticSeverity::HINT,
8646 message: "error 2 hint 2".to_string(),
8647 group_id: 1,
8648 is_primary: false,
8649 ..Default::default()
8650 }
8651 },
8652 DiagnosticEntry {
8653 range: Point::new(2, 8)..Point::new(2, 17),
8654 diagnostic: Diagnostic {
8655 severity: DiagnosticSeverity::ERROR,
8656 message: "error 2".to_string(),
8657 group_id: 1,
8658 is_primary: true,
8659 ..Default::default()
8660 }
8661 }
8662 ]
8663 );
8664
8665 assert_eq!(
8666 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8667 &[
8668 DiagnosticEntry {
8669 range: Point::new(1, 8)..Point::new(1, 9),
8670 diagnostic: Diagnostic {
8671 severity: DiagnosticSeverity::WARNING,
8672 message: "error 1".to_string(),
8673 group_id: 0,
8674 is_primary: true,
8675 ..Default::default()
8676 }
8677 },
8678 DiagnosticEntry {
8679 range: Point::new(1, 8)..Point::new(1, 9),
8680 diagnostic: Diagnostic {
8681 severity: DiagnosticSeverity::HINT,
8682 message: "error 1 hint 1".to_string(),
8683 group_id: 0,
8684 is_primary: false,
8685 ..Default::default()
8686 }
8687 },
8688 ]
8689 );
8690 assert_eq!(
8691 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8692 &[
8693 DiagnosticEntry {
8694 range: Point::new(1, 13)..Point::new(1, 15),
8695 diagnostic: Diagnostic {
8696 severity: DiagnosticSeverity::HINT,
8697 message: "error 2 hint 1".to_string(),
8698 group_id: 1,
8699 is_primary: false,
8700 ..Default::default()
8701 }
8702 },
8703 DiagnosticEntry {
8704 range: Point::new(1, 13)..Point::new(1, 15),
8705 diagnostic: Diagnostic {
8706 severity: DiagnosticSeverity::HINT,
8707 message: "error 2 hint 2".to_string(),
8708 group_id: 1,
8709 is_primary: false,
8710 ..Default::default()
8711 }
8712 },
8713 DiagnosticEntry {
8714 range: Point::new(2, 8)..Point::new(2, 17),
8715 diagnostic: Diagnostic {
8716 severity: DiagnosticSeverity::ERROR,
8717 message: "error 2".to_string(),
8718 group_id: 1,
8719 is_primary: true,
8720 ..Default::default()
8721 }
8722 }
8723 ]
8724 );
8725 }
8726
8727 #[gpui::test]
8728 async fn test_rename(cx: &mut gpui::TestAppContext) {
8729 cx.foreground().forbid_parking();
8730
8731 let mut language = Language::new(
8732 LanguageConfig {
8733 name: "Rust".into(),
8734 path_suffixes: vec!["rs".to_string()],
8735 ..Default::default()
8736 },
8737 Some(tree_sitter_rust::language()),
8738 );
8739 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8740 capabilities: lsp::ServerCapabilities {
8741 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8742 prepare_provider: Some(true),
8743 work_done_progress_options: Default::default(),
8744 })),
8745 ..Default::default()
8746 },
8747 ..Default::default()
8748 });
8749
8750 let fs = FakeFs::new(cx.background());
8751 fs.insert_tree(
8752 "/dir",
8753 json!({
8754 "one.rs": "const ONE: usize = 1;",
8755 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8756 }),
8757 )
8758 .await;
8759
8760 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8761 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8762 let buffer = project
8763 .update(cx, |project, cx| {
8764 project.open_local_buffer("/dir/one.rs", cx)
8765 })
8766 .await
8767 .unwrap();
8768
8769 let fake_server = fake_servers.next().await.unwrap();
8770
8771 let response = project.update(cx, |project, cx| {
8772 project.prepare_rename(buffer.clone(), 7, cx)
8773 });
8774 fake_server
8775 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8776 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8777 assert_eq!(params.position, lsp::Position::new(0, 7));
8778 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8779 lsp::Position::new(0, 6),
8780 lsp::Position::new(0, 9),
8781 ))))
8782 })
8783 .next()
8784 .await
8785 .unwrap();
8786 let range = response.await.unwrap().unwrap();
8787 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8788 assert_eq!(range, 6..9);
8789
8790 let response = project.update(cx, |project, cx| {
8791 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8792 });
8793 fake_server
8794 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8795 assert_eq!(
8796 params.text_document_position.text_document.uri.as_str(),
8797 "file:///dir/one.rs"
8798 );
8799 assert_eq!(
8800 params.text_document_position.position,
8801 lsp::Position::new(0, 7)
8802 );
8803 assert_eq!(params.new_name, "THREE");
8804 Ok(Some(lsp::WorkspaceEdit {
8805 changes: Some(
8806 [
8807 (
8808 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8809 vec![lsp::TextEdit::new(
8810 lsp::Range::new(
8811 lsp::Position::new(0, 6),
8812 lsp::Position::new(0, 9),
8813 ),
8814 "THREE".to_string(),
8815 )],
8816 ),
8817 (
8818 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8819 vec![
8820 lsp::TextEdit::new(
8821 lsp::Range::new(
8822 lsp::Position::new(0, 24),
8823 lsp::Position::new(0, 27),
8824 ),
8825 "THREE".to_string(),
8826 ),
8827 lsp::TextEdit::new(
8828 lsp::Range::new(
8829 lsp::Position::new(0, 35),
8830 lsp::Position::new(0, 38),
8831 ),
8832 "THREE".to_string(),
8833 ),
8834 ],
8835 ),
8836 ]
8837 .into_iter()
8838 .collect(),
8839 ),
8840 ..Default::default()
8841 }))
8842 })
8843 .next()
8844 .await
8845 .unwrap();
8846 let mut transaction = response.await.unwrap().0;
8847 assert_eq!(transaction.len(), 2);
8848 assert_eq!(
8849 transaction
8850 .remove_entry(&buffer)
8851 .unwrap()
8852 .0
8853 .read_with(cx, |buffer, _| buffer.text()),
8854 "const THREE: usize = 1;"
8855 );
8856 assert_eq!(
8857 transaction
8858 .into_keys()
8859 .next()
8860 .unwrap()
8861 .read_with(cx, |buffer, _| buffer.text()),
8862 "const TWO: usize = one::THREE + one::THREE;"
8863 );
8864 }
8865
8866 #[gpui::test]
8867 async fn test_search(cx: &mut gpui::TestAppContext) {
8868 let fs = FakeFs::new(cx.background());
8869 fs.insert_tree(
8870 "/dir",
8871 json!({
8872 "one.rs": "const ONE: usize = 1;",
8873 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8874 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8875 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8876 }),
8877 )
8878 .await;
8879 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8880 assert_eq!(
8881 search(&project, SearchQuery::text("TWO", false, true), cx)
8882 .await
8883 .unwrap(),
8884 HashMap::from_iter([
8885 ("two.rs".to_string(), vec![6..9]),
8886 ("three.rs".to_string(), vec![37..40])
8887 ])
8888 );
8889
8890 let buffer_4 = project
8891 .update(cx, |project, cx| {
8892 project.open_local_buffer("/dir/four.rs", cx)
8893 })
8894 .await
8895 .unwrap();
8896 buffer_4.update(cx, |buffer, cx| {
8897 let text = "two::TWO";
8898 buffer.edit([(20..28, text), (31..43, text)], cx);
8899 });
8900
8901 assert_eq!(
8902 search(&project, SearchQuery::text("TWO", false, true), cx)
8903 .await
8904 .unwrap(),
8905 HashMap::from_iter([
8906 ("two.rs".to_string(), vec![6..9]),
8907 ("three.rs".to_string(), vec![37..40]),
8908 ("four.rs".to_string(), vec![25..28, 36..39])
8909 ])
8910 );
8911
8912 async fn search(
8913 project: &ModelHandle<Project>,
8914 query: SearchQuery,
8915 cx: &mut gpui::TestAppContext,
8916 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8917 let results = project
8918 .update(cx, |project, cx| project.search(query, cx))
8919 .await?;
8920
8921 Ok(results
8922 .into_iter()
8923 .map(|(buffer, ranges)| {
8924 buffer.read_with(cx, |buffer, _| {
8925 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8926 let ranges = ranges
8927 .into_iter()
8928 .map(|range| range.to_offset(buffer))
8929 .collect::<Vec<_>>();
8930 (path, ranges)
8931 })
8932 })
8933 .collect())
8934 }
8935 }
8936}