1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 opened_buffers: HashMap<u64, OpenBuffer>,
68 nonce: u128,
69}
70
71enum OpenBuffer {
72 Strong(ModelHandle<Buffer>),
73 Weak(WeakModelHandle<Buffer>),
74 Loading(Vec<Operation>),
75}
76
77enum WorktreeHandle {
78 Strong(ModelHandle<Worktree>),
79 Weak(WeakModelHandle<Worktree>),
80}
81
82enum ProjectClientState {
83 Local {
84 is_shared: bool,
85 remote_id_tx: watch::Sender<Option<u64>>,
86 remote_id_rx: watch::Receiver<Option<u64>>,
87 _maintain_remote_id_task: Task<Option<()>>,
88 },
89 Remote {
90 sharing_has_stopped: bool,
91 remote_id: u64,
92 replica_id: ReplicaId,
93 },
94}
95
96#[derive(Clone, Debug)]
97pub struct Collaborator {
98 pub user: Arc<User>,
99 pub peer_id: PeerId,
100 pub replica_id: ReplicaId,
101}
102
103#[derive(Clone, Debug, PartialEq)]
104pub enum Event {
105 ActiveEntryChanged(Option<ProjectEntry>),
106 WorktreeRemoved(WorktreeId),
107 DiskBasedDiagnosticsStarted,
108 DiskBasedDiagnosticsUpdated,
109 DiskBasedDiagnosticsFinished,
110 DiagnosticsUpdated(ProjectPath),
111}
112
113#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
114pub struct ProjectPath {
115 pub worktree_id: WorktreeId,
116 pub path: Arc<Path>,
117}
118
119#[derive(Clone, Debug, Default, PartialEq)]
120pub struct DiagnosticSummary {
121 pub error_count: usize,
122 pub warning_count: usize,
123 pub info_count: usize,
124 pub hint_count: usize,
125}
126
127#[derive(Debug)]
128pub struct Location {
129 pub buffer: ModelHandle<Buffer>,
130 pub range: Range<language::Anchor>,
131}
132
133#[derive(Debug)]
134pub struct DocumentHighlight {
135 pub range: Range<language::Anchor>,
136 pub kind: DocumentHighlightKind,
137}
138
139#[derive(Clone, Debug)]
140pub struct Symbol {
141 pub source_worktree_id: WorktreeId,
142 pub worktree_id: WorktreeId,
143 pub language_name: String,
144 pub path: PathBuf,
145 pub label: CodeLabel,
146 pub name: String,
147 pub kind: lsp::SymbolKind,
148 pub range: Range<PointUtf16>,
149 pub signature: [u8; 32],
150}
151
152#[derive(Default)]
153pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
154
155impl DiagnosticSummary {
156 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
157 let mut this = Self {
158 error_count: 0,
159 warning_count: 0,
160 info_count: 0,
161 hint_count: 0,
162 };
163
164 for entry in diagnostics {
165 if entry.diagnostic.is_primary {
166 match entry.diagnostic.severity {
167 DiagnosticSeverity::ERROR => this.error_count += 1,
168 DiagnosticSeverity::WARNING => this.warning_count += 1,
169 DiagnosticSeverity::INFORMATION => this.info_count += 1,
170 DiagnosticSeverity::HINT => this.hint_count += 1,
171 _ => {}
172 }
173 }
174 }
175
176 this
177 }
178
179 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
180 proto::DiagnosticSummary {
181 path: path.to_string_lossy().to_string(),
182 error_count: self.error_count as u32,
183 warning_count: self.warning_count as u32,
184 info_count: self.info_count as u32,
185 hint_count: self.hint_count as u32,
186 }
187 }
188}
189
190#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
191pub struct ProjectEntry {
192 pub worktree_id: WorktreeId,
193 pub entry_id: usize,
194}
195
196impl Project {
197 pub fn init(client: &Arc<Client>) {
198 client.add_entity_message_handler(Self::handle_add_collaborator);
199 client.add_entity_message_handler(Self::handle_buffer_reloaded);
200 client.add_entity_message_handler(Self::handle_buffer_saved);
201 client.add_entity_message_handler(Self::handle_close_buffer);
202 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
203 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
204 client.add_entity_message_handler(Self::handle_remove_collaborator);
205 client.add_entity_message_handler(Self::handle_register_worktree);
206 client.add_entity_message_handler(Self::handle_unregister_worktree);
207 client.add_entity_message_handler(Self::handle_unshare_project);
208 client.add_entity_message_handler(Self::handle_update_buffer_file);
209 client.add_entity_message_handler(Self::handle_update_buffer);
210 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
211 client.add_entity_message_handler(Self::handle_update_worktree);
212 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
213 client.add_entity_request_handler(Self::handle_apply_code_action);
214 client.add_entity_request_handler(Self::handle_format_buffers);
215 client.add_entity_request_handler(Self::handle_get_code_actions);
216 client.add_entity_request_handler(Self::handle_get_completions);
217 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
218 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
222 client.add_entity_request_handler(Self::handle_search_project);
223 client.add_entity_request_handler(Self::handle_get_project_symbols);
224 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
225 client.add_entity_request_handler(Self::handle_open_buffer);
226 client.add_entity_request_handler(Self::handle_save_buffer);
227 }
228
229 pub fn local(
230 client: Arc<Client>,
231 user_store: ModelHandle<UserStore>,
232 languages: Arc<LanguageRegistry>,
233 fs: Arc<dyn Fs>,
234 cx: &mut MutableAppContext,
235 ) -> ModelHandle<Self> {
236 cx.add_model(|cx: &mut ModelContext<Self>| {
237 let (remote_id_tx, remote_id_rx) = watch::channel();
238 let _maintain_remote_id_task = cx.spawn_weak({
239 let rpc = client.clone();
240 move |this, mut cx| {
241 async move {
242 let mut status = rpc.status();
243 while let Some(status) = status.next().await {
244 if let Some(this) = this.upgrade(&cx) {
245 let remote_id = if let client::Status::Connected { .. } = status {
246 let response = rpc.request(proto::RegisterProject {}).await?;
247 Some(response.project_id)
248 } else {
249 None
250 };
251
252 if let Some(project_id) = remote_id {
253 let mut registrations = Vec::new();
254 this.update(&mut cx, |this, cx| {
255 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
256 registrations.push(worktree.update(
257 cx,
258 |worktree, cx| {
259 let worktree = worktree.as_local_mut().unwrap();
260 worktree.register(project_id, cx)
261 },
262 ));
263 }
264 });
265 for registration in registrations {
266 registration.await?;
267 }
268 }
269 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
270 }
271 }
272 Ok(())
273 }
274 .log_err()
275 }
276 });
277
278 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
279 Self {
280 worktrees: Default::default(),
281 collaborators: Default::default(),
282 opened_buffers: Default::default(),
283 shared_buffers: Default::default(),
284 loading_buffers: Default::default(),
285 client_state: ProjectClientState::Local {
286 is_shared: false,
287 remote_id_tx,
288 remote_id_rx,
289 _maintain_remote_id_task,
290 },
291 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
292 subscriptions: Vec::new(),
293 active_entry: None,
294 languages,
295 client,
296 user_store,
297 fs,
298 language_servers_with_diagnostics_running: 0,
299 language_servers: Default::default(),
300 started_language_servers: Default::default(),
301 nonce: StdRng::from_entropy().gen(),
302 }
303 })
304 }
305
306 pub async fn remote(
307 remote_id: u64,
308 client: Arc<Client>,
309 user_store: ModelHandle<UserStore>,
310 languages: Arc<LanguageRegistry>,
311 fs: Arc<dyn Fs>,
312 cx: &mut AsyncAppContext,
313 ) -> Result<ModelHandle<Self>> {
314 client.authenticate_and_connect(&cx).await?;
315
316 let response = client
317 .request(proto::JoinProject {
318 project_id: remote_id,
319 })
320 .await?;
321
322 let replica_id = response.replica_id as ReplicaId;
323
324 let mut worktrees = Vec::new();
325 for worktree in response.worktrees {
326 let (worktree, load_task) = cx
327 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
328 worktrees.push(worktree);
329 load_task.detach();
330 }
331
332 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
333 let this = cx.add_model(|cx| {
334 let mut this = Self {
335 worktrees: Vec::new(),
336 loading_buffers: Default::default(),
337 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
338 shared_buffers: Default::default(),
339 active_entry: None,
340 collaborators: Default::default(),
341 languages,
342 user_store: user_store.clone(),
343 fs,
344 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
345 client,
346 client_state: ProjectClientState::Remote {
347 sharing_has_stopped: false,
348 remote_id,
349 replica_id,
350 },
351 language_servers_with_diagnostics_running: 0,
352 language_servers: Default::default(),
353 started_language_servers: Default::default(),
354 opened_buffers: Default::default(),
355 nonce: StdRng::from_entropy().gen(),
356 };
357 for worktree in worktrees {
358 this.add_worktree(&worktree, cx);
359 }
360 this
361 });
362
363 let user_ids = response
364 .collaborators
365 .iter()
366 .map(|peer| peer.user_id)
367 .collect();
368 user_store
369 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
370 .await?;
371 let mut collaborators = HashMap::default();
372 for message in response.collaborators {
373 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
374 collaborators.insert(collaborator.peer_id, collaborator);
375 }
376
377 this.update(cx, |this, _| {
378 this.collaborators = collaborators;
379 });
380
381 Ok(this)
382 }
383
384 #[cfg(any(test, feature = "test-support"))]
385 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
386 let languages = Arc::new(LanguageRegistry::new());
387 let http_client = client::test::FakeHttpClient::with_404_response();
388 let client = client::Client::new(http_client.clone());
389 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
390 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
391 }
392
393 #[cfg(any(test, feature = "test-support"))]
394 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
395 self.opened_buffers
396 .get(&remote_id)
397 .and_then(|buffer| buffer.upgrade(cx))
398 }
399
400 #[cfg(any(test, feature = "test-support"))]
401 pub fn has_deferred_operations(&self, cx: &AppContext) -> bool {
402 self.opened_buffers.values().any(|buffer| match buffer {
403 OpenBuffer::Strong(buffer) => buffer.read(cx).deferred_ops_len() > 0,
404 OpenBuffer::Weak(buffer) => buffer
405 .upgrade(cx)
406 .map_or(false, |buffer| buffer.read(cx).deferred_ops_len() > 0),
407 OpenBuffer::Loading(_) => false,
408 })
409 }
410
411 #[cfg(any(test, feature = "test-support"))]
412 pub fn languages(&self) -> &Arc<LanguageRegistry> {
413 &self.languages
414 }
415
416 pub fn fs(&self) -> &Arc<dyn Fs> {
417 &self.fs
418 }
419
420 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
421 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
422 *remote_id_tx.borrow_mut() = remote_id;
423 }
424
425 self.subscriptions.clear();
426 if let Some(remote_id) = remote_id {
427 self.subscriptions
428 .push(self.client.add_model_for_remote_entity(remote_id, cx));
429 }
430 }
431
432 pub fn remote_id(&self) -> Option<u64> {
433 match &self.client_state {
434 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
435 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
436 }
437 }
438
439 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
440 let mut id = None;
441 let mut watch = None;
442 match &self.client_state {
443 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
444 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
445 }
446
447 async move {
448 if let Some(id) = id {
449 return id;
450 }
451 let mut watch = watch.unwrap();
452 loop {
453 let id = *watch.borrow();
454 if let Some(id) = id {
455 return id;
456 }
457 watch.next().await;
458 }
459 }
460 }
461
462 pub fn replica_id(&self) -> ReplicaId {
463 match &self.client_state {
464 ProjectClientState::Local { .. } => 0,
465 ProjectClientState::Remote { replica_id, .. } => *replica_id,
466 }
467 }
468
469 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
470 &self.collaborators
471 }
472
473 pub fn worktrees<'a>(
474 &'a self,
475 cx: &'a AppContext,
476 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
477 self.worktrees
478 .iter()
479 .filter_map(move |worktree| worktree.upgrade(cx))
480 }
481
482 pub fn strong_worktrees<'a>(
483 &'a self,
484 cx: &'a AppContext,
485 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
486 self.worktrees.iter().filter_map(|worktree| {
487 worktree.upgrade(cx).and_then(|worktree| {
488 if worktree.read(cx).is_weak() {
489 None
490 } else {
491 Some(worktree)
492 }
493 })
494 })
495 }
496
497 pub fn worktree_for_id(
498 &self,
499 id: WorktreeId,
500 cx: &AppContext,
501 ) -> Option<ModelHandle<Worktree>> {
502 self.worktrees(cx)
503 .find(|worktree| worktree.read(cx).id() == id)
504 }
505
506 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
507 let rpc = self.client.clone();
508 cx.spawn(|this, mut cx| async move {
509 let project_id = this.update(&mut cx, |this, cx| {
510 if let ProjectClientState::Local {
511 is_shared,
512 remote_id_rx,
513 ..
514 } = &mut this.client_state
515 {
516 *is_shared = true;
517 for open_buffer in this.opened_buffers.values_mut() {
518 match open_buffer {
519 OpenBuffer::Strong(_) => {}
520 OpenBuffer::Weak(buffer) => {
521 if let Some(buffer) = buffer.upgrade(cx) {
522 *open_buffer = OpenBuffer::Strong(buffer);
523 }
524 }
525 OpenBuffer::Loading(_) => unreachable!(),
526 }
527 }
528 remote_id_rx
529 .borrow()
530 .ok_or_else(|| anyhow!("no project id"))
531 } else {
532 Err(anyhow!("can't share a remote project"))
533 }
534 })?;
535
536 rpc.request(proto::ShareProject { project_id }).await?;
537
538 let mut tasks = Vec::new();
539 this.update(&mut cx, |this, cx| {
540 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
541 worktree.update(cx, |worktree, cx| {
542 let worktree = worktree.as_local_mut().unwrap();
543 tasks.push(worktree.share(project_id, cx));
544 });
545 }
546 });
547 for task in tasks {
548 task.await?;
549 }
550 this.update(&mut cx, |_, cx| cx.notify());
551 Ok(())
552 })
553 }
554
555 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
556 let rpc = self.client.clone();
557 cx.spawn(|this, mut cx| async move {
558 let project_id = this.update(&mut cx, |this, _| {
559 if let ProjectClientState::Local {
560 is_shared,
561 remote_id_rx,
562 ..
563 } = &mut this.client_state
564 {
565 *is_shared = false;
566 for open_buffer in this.opened_buffers.values_mut() {
567 match open_buffer {
568 OpenBuffer::Strong(buffer) => {
569 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
570 }
571 OpenBuffer::Weak(_) => {}
572 OpenBuffer::Loading(_) => unreachable!(),
573 }
574 }
575 remote_id_rx
576 .borrow()
577 .ok_or_else(|| anyhow!("no project id"))
578 } else {
579 Err(anyhow!("can't share a remote project"))
580 }
581 })?;
582
583 rpc.send(proto::UnshareProject { project_id })?;
584 this.update(&mut cx, |this, cx| {
585 this.collaborators.clear();
586 this.shared_buffers.clear();
587 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
588 worktree.update(cx, |worktree, _| {
589 worktree.as_local_mut().unwrap().unshare();
590 });
591 }
592 cx.notify()
593 });
594 Ok(())
595 })
596 }
597
598 pub fn is_read_only(&self) -> bool {
599 match &self.client_state {
600 ProjectClientState::Local { .. } => false,
601 ProjectClientState::Remote {
602 sharing_has_stopped,
603 ..
604 } => *sharing_has_stopped,
605 }
606 }
607
608 pub fn is_local(&self) -> bool {
609 match &self.client_state {
610 ProjectClientState::Local { .. } => true,
611 ProjectClientState::Remote { .. } => false,
612 }
613 }
614
615 pub fn is_remote(&self) -> bool {
616 !self.is_local()
617 }
618
619 pub fn open_buffer(
620 &mut self,
621 path: impl Into<ProjectPath>,
622 cx: &mut ModelContext<Self>,
623 ) -> Task<Result<ModelHandle<Buffer>>> {
624 let project_path = path.into();
625 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
626 worktree
627 } else {
628 return Task::ready(Err(anyhow!("no such worktree")));
629 };
630
631 // If there is already a buffer for the given path, then return it.
632 let existing_buffer = self.get_open_buffer(&project_path, cx);
633 if let Some(existing_buffer) = existing_buffer {
634 return Task::ready(Ok(existing_buffer));
635 }
636
637 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
638 // If the given path is already being loaded, then wait for that existing
639 // task to complete and return the same buffer.
640 hash_map::Entry::Occupied(e) => e.get().clone(),
641
642 // Otherwise, record the fact that this path is now being loaded.
643 hash_map::Entry::Vacant(entry) => {
644 let (mut tx, rx) = postage::watch::channel();
645 entry.insert(rx.clone());
646
647 let load_buffer = if worktree.read(cx).is_local() {
648 self.open_local_buffer(&project_path.path, &worktree, cx)
649 } else {
650 self.open_remote_buffer(&project_path.path, &worktree, cx)
651 };
652
653 cx.spawn(move |this, mut cx| async move {
654 let load_result = load_buffer.await;
655 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
656 // Record the fact that the buffer is no longer loading.
657 this.loading_buffers.remove(&project_path);
658 let buffer = load_result.map_err(Arc::new)?;
659 Ok(buffer)
660 }));
661 })
662 .detach();
663 rx
664 }
665 };
666
667 cx.foreground().spawn(async move {
668 loop {
669 if let Some(result) = loading_watch.borrow().as_ref() {
670 match result {
671 Ok(buffer) => return Ok(buffer.clone()),
672 Err(error) => return Err(anyhow!("{}", error)),
673 }
674 }
675 loading_watch.next().await;
676 }
677 })
678 }
679
680 fn open_local_buffer(
681 &mut self,
682 path: &Arc<Path>,
683 worktree: &ModelHandle<Worktree>,
684 cx: &mut ModelContext<Self>,
685 ) -> Task<Result<ModelHandle<Buffer>>> {
686 let load_buffer = worktree.update(cx, |worktree, cx| {
687 let worktree = worktree.as_local_mut().unwrap();
688 worktree.load_buffer(path, cx)
689 });
690 let worktree = worktree.downgrade();
691 cx.spawn(|this, mut cx| async move {
692 let buffer = load_buffer.await?;
693 let worktree = worktree
694 .upgrade(&cx)
695 .ok_or_else(|| anyhow!("worktree was removed"))?;
696 this.update(&mut cx, |this, cx| {
697 this.register_buffer(&buffer, Some(&worktree), cx)
698 })?;
699 Ok(buffer)
700 })
701 }
702
703 fn open_remote_buffer(
704 &mut self,
705 path: &Arc<Path>,
706 worktree: &ModelHandle<Worktree>,
707 cx: &mut ModelContext<Self>,
708 ) -> Task<Result<ModelHandle<Buffer>>> {
709 let rpc = self.client.clone();
710 let project_id = self.remote_id().unwrap();
711 let remote_worktree_id = worktree.read(cx).id();
712 let path = path.clone();
713 let path_string = path.to_string_lossy().to_string();
714 cx.spawn(|this, mut cx| async move {
715 let response = rpc
716 .request(proto::OpenBuffer {
717 project_id,
718 worktree_id: remote_worktree_id.to_proto(),
719 path: path_string,
720 })
721 .await?;
722 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
723 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
724 .await
725 })
726 }
727
728 fn open_local_buffer_via_lsp(
729 &mut self,
730 abs_path: lsp::Url,
731 lang_name: String,
732 lang_server: Arc<LanguageServer>,
733 cx: &mut ModelContext<Self>,
734 ) -> Task<Result<ModelHandle<Buffer>>> {
735 cx.spawn(|this, mut cx| async move {
736 let abs_path = abs_path
737 .to_file_path()
738 .map_err(|_| anyhow!("can't convert URI to path"))?;
739 let (worktree, relative_path) = if let Some(result) =
740 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
741 {
742 result
743 } else {
744 let worktree = this
745 .update(&mut cx, |this, cx| {
746 this.create_local_worktree(&abs_path, true, cx)
747 })
748 .await?;
749 this.update(&mut cx, |this, cx| {
750 this.language_servers
751 .insert((worktree.read(cx).id(), lang_name), lang_server);
752 });
753 (worktree, PathBuf::new())
754 };
755
756 let project_path = ProjectPath {
757 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
758 path: relative_path.into(),
759 };
760 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
761 .await
762 })
763 }
764
765 pub fn save_buffer_as(
766 &self,
767 buffer: ModelHandle<Buffer>,
768 abs_path: PathBuf,
769 cx: &mut ModelContext<Project>,
770 ) -> Task<Result<()>> {
771 let worktree_task = self.find_or_create_local_worktree(&abs_path, false, cx);
772 cx.spawn(|this, mut cx| async move {
773 let (worktree, path) = worktree_task.await?;
774 worktree
775 .update(&mut cx, |worktree, cx| {
776 worktree
777 .as_local_mut()
778 .unwrap()
779 .save_buffer_as(buffer.clone(), path, cx)
780 })
781 .await?;
782 this.update(&mut cx, |this, cx| {
783 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
784 });
785 Ok(())
786 })
787 }
788
789 #[cfg(any(test, feature = "test-support"))]
790 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
791 let path = path.into();
792 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
793 self.opened_buffers.iter().any(|(_, buffer)| {
794 if let Some(buffer) = buffer.upgrade(cx) {
795 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
796 if file.worktree == worktree && file.path() == &path.path {
797 return true;
798 }
799 }
800 }
801 false
802 })
803 } else {
804 false
805 }
806 }
807
808 pub fn get_open_buffer(
809 &mut self,
810 path: &ProjectPath,
811 cx: &mut ModelContext<Self>,
812 ) -> Option<ModelHandle<Buffer>> {
813 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
814 self.opened_buffers.values().find_map(|buffer| {
815 let buffer = buffer.upgrade(cx)?;
816 let file = File::from_dyn(buffer.read(cx).file())?;
817 if file.worktree == worktree && file.path() == &path.path {
818 Some(buffer)
819 } else {
820 None
821 }
822 })
823 }
824
825 fn register_buffer(
826 &mut self,
827 buffer: &ModelHandle<Buffer>,
828 worktree: Option<&ModelHandle<Worktree>>,
829 cx: &mut ModelContext<Self>,
830 ) -> Result<()> {
831 let remote_id = buffer.read(cx).remote_id();
832 let open_buffer = if self.is_remote() || self.is_shared() {
833 OpenBuffer::Strong(buffer.clone())
834 } else {
835 OpenBuffer::Weak(buffer.downgrade())
836 };
837
838 match self.opened_buffers.insert(remote_id, open_buffer) {
839 None => {}
840 Some(OpenBuffer::Loading(operations)) => {
841 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
842 }
843 Some(OpenBuffer::Weak(existing_handle)) => {
844 if existing_handle.upgrade(cx).is_some() {
845 Err(anyhow!(
846 "already registered buffer with remote id {}",
847 remote_id
848 ))?
849 }
850 }
851 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
852 "already registered buffer with remote id {}",
853 remote_id
854 ))?,
855 }
856 self.assign_language_to_buffer(&buffer, worktree, cx);
857 Ok(())
858 }
859
860 fn assign_language_to_buffer(
861 &mut self,
862 buffer: &ModelHandle<Buffer>,
863 worktree: Option<&ModelHandle<Worktree>>,
864 cx: &mut ModelContext<Self>,
865 ) -> Option<()> {
866 let (path, full_path) = {
867 let file = buffer.read(cx).file()?;
868 (file.path().clone(), file.full_path(cx))
869 };
870
871 // If the buffer has a language, set it and start/assign the language server
872 if let Some(language) = self.languages.select_language(&full_path) {
873 buffer.update(cx, |buffer, cx| {
874 buffer.set_language(Some(language.clone()), cx);
875 });
876
877 // For local worktrees, start a language server if needed.
878 // Also assign the language server and any previously stored diagnostics to the buffer.
879 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
880 let worktree_id = local_worktree.id();
881 let worktree_abs_path = local_worktree.abs_path().clone();
882 let buffer = buffer.downgrade();
883 let language_server =
884 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
885
886 cx.spawn_weak(|_, mut cx| async move {
887 if let Some(language_server) = language_server.await {
888 if let Some(buffer) = buffer.upgrade(&cx) {
889 buffer.update(&mut cx, |buffer, cx| {
890 buffer.set_language_server(Some(language_server), cx);
891 });
892 }
893 }
894 })
895 .detach();
896 }
897 }
898
899 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
900 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
901 buffer.update(cx, |buffer, cx| {
902 buffer.update_diagnostics(diagnostics, None, cx).log_err();
903 });
904 }
905 }
906
907 None
908 }
909
910 fn start_language_server(
911 &mut self,
912 worktree_id: WorktreeId,
913 worktree_path: Arc<Path>,
914 language: Arc<Language>,
915 cx: &mut ModelContext<Self>,
916 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
917 enum LspEvent {
918 DiagnosticsStart,
919 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
920 DiagnosticsFinish,
921 }
922
923 let key = (worktree_id, language.name().to_string());
924 self.started_language_servers
925 .entry(key.clone())
926 .or_insert_with(|| {
927 let language_server = self.languages.start_language_server(
928 &language,
929 worktree_path,
930 self.client.http_client(),
931 cx,
932 );
933 let rpc = self.client.clone();
934 cx.spawn_weak(|this, mut cx| async move {
935 let language_server = language_server?.await.log_err()?;
936 if let Some(this) = this.upgrade(&cx) {
937 this.update(&mut cx, |this, _| {
938 this.language_servers.insert(key, language_server.clone());
939 });
940 }
941
942 let disk_based_sources = language
943 .disk_based_diagnostic_sources()
944 .cloned()
945 .unwrap_or_default();
946 let disk_based_diagnostics_progress_token =
947 language.disk_based_diagnostics_progress_token().cloned();
948 let has_disk_based_diagnostic_progress_token =
949 disk_based_diagnostics_progress_token.is_some();
950 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
951
952 // Listen for `PublishDiagnostics` notifications.
953 language_server
954 .on_notification::<lsp::notification::PublishDiagnostics, _>({
955 let diagnostics_tx = diagnostics_tx.clone();
956 move |params| {
957 if !has_disk_based_diagnostic_progress_token {
958 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
959 }
960 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
961 .ok();
962 if !has_disk_based_diagnostic_progress_token {
963 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
964 }
965 }
966 })
967 .detach();
968
969 // Listen for `Progress` notifications. Send an event when the language server
970 // transitions between running jobs and not running any jobs.
971 let mut running_jobs_for_this_server: i32 = 0;
972 language_server
973 .on_notification::<lsp::notification::Progress, _>(move |params| {
974 let token = match params.token {
975 lsp::NumberOrString::Number(_) => None,
976 lsp::NumberOrString::String(token) => Some(token),
977 };
978
979 if token == disk_based_diagnostics_progress_token {
980 match params.value {
981 lsp::ProgressParamsValue::WorkDone(progress) => {
982 match progress {
983 lsp::WorkDoneProgress::Begin(_) => {
984 running_jobs_for_this_server += 1;
985 if running_jobs_for_this_server == 1 {
986 block_on(
987 diagnostics_tx
988 .send(LspEvent::DiagnosticsStart),
989 )
990 .ok();
991 }
992 }
993 lsp::WorkDoneProgress::End(_) => {
994 running_jobs_for_this_server -= 1;
995 if running_jobs_for_this_server == 0 {
996 block_on(
997 diagnostics_tx
998 .send(LspEvent::DiagnosticsFinish),
999 )
1000 .ok();
1001 }
1002 }
1003 _ => {}
1004 }
1005 }
1006 }
1007 }
1008 })
1009 .detach();
1010
1011 // Process all the LSP events.
1012 cx.spawn(|mut cx| async move {
1013 while let Ok(message) = diagnostics_rx.recv().await {
1014 let this = this.upgrade(&cx)?;
1015 match message {
1016 LspEvent::DiagnosticsStart => {
1017 this.update(&mut cx, |this, cx| {
1018 this.disk_based_diagnostics_started(cx);
1019 if let Some(project_id) = this.remote_id() {
1020 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1021 project_id,
1022 })
1023 .log_err();
1024 }
1025 });
1026 }
1027 LspEvent::DiagnosticsUpdate(mut params) => {
1028 language.process_diagnostics(&mut params);
1029 this.update(&mut cx, |this, cx| {
1030 this.update_diagnostics(params, &disk_based_sources, cx)
1031 .log_err();
1032 });
1033 }
1034 LspEvent::DiagnosticsFinish => {
1035 this.update(&mut cx, |this, cx| {
1036 this.disk_based_diagnostics_finished(cx);
1037 if let Some(project_id) = this.remote_id() {
1038 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1039 project_id,
1040 })
1041 .log_err();
1042 }
1043 });
1044 }
1045 }
1046 }
1047 Some(())
1048 })
1049 .detach();
1050
1051 Some(language_server)
1052 })
1053 .shared()
1054 })
1055 .clone()
1056 }
1057
1058 pub fn update_diagnostics(
1059 &mut self,
1060 params: lsp::PublishDiagnosticsParams,
1061 disk_based_sources: &HashSet<String>,
1062 cx: &mut ModelContext<Self>,
1063 ) -> Result<()> {
1064 let abs_path = params
1065 .uri
1066 .to_file_path()
1067 .map_err(|_| anyhow!("URI is not a file"))?;
1068 let mut next_group_id = 0;
1069 let mut diagnostics = Vec::default();
1070 let mut primary_diagnostic_group_ids = HashMap::default();
1071 let mut sources_by_group_id = HashMap::default();
1072 let mut supporting_diagnostic_severities = HashMap::default();
1073 for diagnostic in ¶ms.diagnostics {
1074 let source = diagnostic.source.as_ref();
1075 let code = diagnostic.code.as_ref().map(|code| match code {
1076 lsp::NumberOrString::Number(code) => code.to_string(),
1077 lsp::NumberOrString::String(code) => code.clone(),
1078 });
1079 let range = range_from_lsp(diagnostic.range);
1080 let is_supporting = diagnostic
1081 .related_information
1082 .as_ref()
1083 .map_or(false, |infos| {
1084 infos.iter().any(|info| {
1085 primary_diagnostic_group_ids.contains_key(&(
1086 source,
1087 code.clone(),
1088 range_from_lsp(info.location.range),
1089 ))
1090 })
1091 });
1092
1093 if is_supporting {
1094 if let Some(severity) = diagnostic.severity {
1095 supporting_diagnostic_severities
1096 .insert((source, code.clone(), range), severity);
1097 }
1098 } else {
1099 let group_id = post_inc(&mut next_group_id);
1100 let is_disk_based =
1101 source.map_or(false, |source| disk_based_sources.contains(source));
1102
1103 sources_by_group_id.insert(group_id, source);
1104 primary_diagnostic_group_ids
1105 .insert((source, code.clone(), range.clone()), group_id);
1106
1107 diagnostics.push(DiagnosticEntry {
1108 range,
1109 diagnostic: Diagnostic {
1110 code: code.clone(),
1111 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1112 message: diagnostic.message.clone(),
1113 group_id,
1114 is_primary: true,
1115 is_valid: true,
1116 is_disk_based,
1117 },
1118 });
1119 if let Some(infos) = &diagnostic.related_information {
1120 for info in infos {
1121 if info.location.uri == params.uri && !info.message.is_empty() {
1122 let range = range_from_lsp(info.location.range);
1123 diagnostics.push(DiagnosticEntry {
1124 range,
1125 diagnostic: Diagnostic {
1126 code: code.clone(),
1127 severity: DiagnosticSeverity::INFORMATION,
1128 message: info.message.clone(),
1129 group_id,
1130 is_primary: false,
1131 is_valid: true,
1132 is_disk_based,
1133 },
1134 });
1135 }
1136 }
1137 }
1138 }
1139 }
1140
1141 for entry in &mut diagnostics {
1142 let diagnostic = &mut entry.diagnostic;
1143 if !diagnostic.is_primary {
1144 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1145 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1146 source,
1147 diagnostic.code.clone(),
1148 entry.range.clone(),
1149 )) {
1150 diagnostic.severity = severity;
1151 }
1152 }
1153 }
1154
1155 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1156 Ok(())
1157 }
1158
1159 pub fn update_diagnostic_entries(
1160 &mut self,
1161 abs_path: PathBuf,
1162 version: Option<i32>,
1163 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1164 cx: &mut ModelContext<Project>,
1165 ) -> Result<(), anyhow::Error> {
1166 let (worktree, relative_path) = self
1167 .find_local_worktree(&abs_path, cx)
1168 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1169 let project_path = ProjectPath {
1170 worktree_id: worktree.read(cx).id(),
1171 path: relative_path.into(),
1172 };
1173
1174 for buffer in self.opened_buffers.values() {
1175 if let Some(buffer) = buffer.upgrade(cx) {
1176 if buffer
1177 .read(cx)
1178 .file()
1179 .map_or(false, |file| *file.path() == project_path.path)
1180 {
1181 buffer.update(cx, |buffer, cx| {
1182 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1183 })?;
1184 break;
1185 }
1186 }
1187 }
1188 worktree.update(cx, |worktree, cx| {
1189 worktree
1190 .as_local_mut()
1191 .ok_or_else(|| anyhow!("not a local worktree"))?
1192 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1193 })?;
1194 cx.emit(Event::DiagnosticsUpdated(project_path));
1195 Ok(())
1196 }
1197
1198 pub fn format(
1199 &self,
1200 buffers: HashSet<ModelHandle<Buffer>>,
1201 push_to_history: bool,
1202 cx: &mut ModelContext<Project>,
1203 ) -> Task<Result<ProjectTransaction>> {
1204 let mut local_buffers = Vec::new();
1205 let mut remote_buffers = None;
1206 for buffer_handle in buffers {
1207 let buffer = buffer_handle.read(cx);
1208 let worktree;
1209 if let Some(file) = File::from_dyn(buffer.file()) {
1210 worktree = file.worktree.clone();
1211 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1212 let lang_server;
1213 if let Some(lang) = buffer.language() {
1214 if let Some(server) = self
1215 .language_servers
1216 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1217 {
1218 lang_server = server.clone();
1219 } else {
1220 return Task::ready(Ok(Default::default()));
1221 };
1222 } else {
1223 return Task::ready(Ok(Default::default()));
1224 }
1225
1226 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1227 } else {
1228 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1229 }
1230 } else {
1231 return Task::ready(Ok(Default::default()));
1232 }
1233 }
1234
1235 let remote_buffers = self.remote_id().zip(remote_buffers);
1236 let client = self.client.clone();
1237
1238 cx.spawn(|this, mut cx| async move {
1239 let mut project_transaction = ProjectTransaction::default();
1240
1241 if let Some((project_id, remote_buffers)) = remote_buffers {
1242 let response = client
1243 .request(proto::FormatBuffers {
1244 project_id,
1245 buffer_ids: remote_buffers
1246 .iter()
1247 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1248 .collect(),
1249 })
1250 .await?
1251 .transaction
1252 .ok_or_else(|| anyhow!("missing transaction"))?;
1253 project_transaction = this
1254 .update(&mut cx, |this, cx| {
1255 this.deserialize_project_transaction(response, push_to_history, cx)
1256 })
1257 .await?;
1258 }
1259
1260 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1261 let lsp_edits = lang_server
1262 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1263 text_document: lsp::TextDocumentIdentifier::new(
1264 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1265 ),
1266 options: Default::default(),
1267 work_done_progress_params: Default::default(),
1268 })
1269 .await?;
1270
1271 if let Some(lsp_edits) = lsp_edits {
1272 let edits = buffer
1273 .update(&mut cx, |buffer, cx| {
1274 buffer.edits_from_lsp(lsp_edits, None, cx)
1275 })
1276 .await?;
1277 buffer.update(&mut cx, |buffer, cx| {
1278 buffer.finalize_last_transaction();
1279 buffer.start_transaction();
1280 for (range, text) in edits {
1281 buffer.edit([range], text, cx);
1282 }
1283 if buffer.end_transaction(cx).is_some() {
1284 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1285 if !push_to_history {
1286 buffer.forget_transaction(transaction.id);
1287 }
1288 project_transaction.0.insert(cx.handle(), transaction);
1289 }
1290 });
1291 }
1292 }
1293
1294 Ok(project_transaction)
1295 })
1296 }
1297
1298 pub fn definition<T: ToPointUtf16>(
1299 &self,
1300 buffer: &ModelHandle<Buffer>,
1301 position: T,
1302 cx: &mut ModelContext<Self>,
1303 ) -> Task<Result<Vec<Location>>> {
1304 let position = position.to_point_utf16(buffer.read(cx));
1305 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1306 }
1307
1308 pub fn references<T: ToPointUtf16>(
1309 &self,
1310 buffer: &ModelHandle<Buffer>,
1311 position: T,
1312 cx: &mut ModelContext<Self>,
1313 ) -> Task<Result<Vec<Location>>> {
1314 let position = position.to_point_utf16(buffer.read(cx));
1315 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1316 }
1317
1318 pub fn document_highlights<T: ToPointUtf16>(
1319 &self,
1320 buffer: &ModelHandle<Buffer>,
1321 position: T,
1322 cx: &mut ModelContext<Self>,
1323 ) -> Task<Result<Vec<DocumentHighlight>>> {
1324 let position = position.to_point_utf16(buffer.read(cx));
1325 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1326 }
1327
1328 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1329 if self.is_local() {
1330 let mut language_servers = HashMap::default();
1331 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1332 if let Some((worktree, language)) = self
1333 .worktree_for_id(*worktree_id, cx)
1334 .and_then(|worktree| worktree.read(cx).as_local())
1335 .zip(self.languages.get_language(language_name))
1336 {
1337 language_servers
1338 .entry(Arc::as_ptr(language_server))
1339 .or_insert((
1340 language_server.clone(),
1341 *worktree_id,
1342 worktree.abs_path().clone(),
1343 language.clone(),
1344 ));
1345 }
1346 }
1347
1348 let mut requests = Vec::new();
1349 for (language_server, _, _, _) in language_servers.values() {
1350 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1351 lsp::WorkspaceSymbolParams {
1352 query: query.to_string(),
1353 ..Default::default()
1354 },
1355 ));
1356 }
1357
1358 cx.spawn_weak(|this, cx| async move {
1359 let responses = futures::future::try_join_all(requests).await?;
1360
1361 let mut symbols = Vec::new();
1362 if let Some(this) = this.upgrade(&cx) {
1363 this.read_with(&cx, |this, cx| {
1364 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1365 language_servers.into_values().zip(responses)
1366 {
1367 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1368 |lsp_symbol| {
1369 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1370 let mut worktree_id = source_worktree_id;
1371 let path;
1372 if let Some((worktree, rel_path)) =
1373 this.find_local_worktree(&abs_path, cx)
1374 {
1375 worktree_id = worktree.read(cx).id();
1376 path = rel_path;
1377 } else {
1378 path = relativize_path(&worktree_abs_path, &abs_path);
1379 }
1380
1381 let label = language
1382 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1383 .unwrap_or_else(|| {
1384 CodeLabel::plain(lsp_symbol.name.clone(), None)
1385 });
1386 let signature = this.symbol_signature(worktree_id, &path);
1387
1388 Some(Symbol {
1389 source_worktree_id,
1390 worktree_id,
1391 language_name: language.name().to_string(),
1392 name: lsp_symbol.name,
1393 kind: lsp_symbol.kind,
1394 label,
1395 path,
1396 range: range_from_lsp(lsp_symbol.location.range),
1397 signature,
1398 })
1399 },
1400 ));
1401 }
1402 })
1403 }
1404
1405 Ok(symbols)
1406 })
1407 } else if let Some(project_id) = self.remote_id() {
1408 let request = self.client.request(proto::GetProjectSymbols {
1409 project_id,
1410 query: query.to_string(),
1411 });
1412 cx.spawn_weak(|this, cx| async move {
1413 let response = request.await?;
1414 let mut symbols = Vec::new();
1415 if let Some(this) = this.upgrade(&cx) {
1416 this.read_with(&cx, |this, _| {
1417 symbols.extend(
1418 response
1419 .symbols
1420 .into_iter()
1421 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1422 );
1423 })
1424 }
1425 Ok(symbols)
1426 })
1427 } else {
1428 Task::ready(Ok(Default::default()))
1429 }
1430 }
1431
1432 pub fn open_buffer_for_symbol(
1433 &mut self,
1434 symbol: &Symbol,
1435 cx: &mut ModelContext<Self>,
1436 ) -> Task<Result<ModelHandle<Buffer>>> {
1437 if self.is_local() {
1438 let language_server = if let Some(server) = self
1439 .language_servers
1440 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1441 {
1442 server.clone()
1443 } else {
1444 return Task::ready(Err(anyhow!(
1445 "language server for worktree and language not found"
1446 )));
1447 };
1448
1449 let worktree_abs_path = if let Some(worktree_abs_path) = self
1450 .worktree_for_id(symbol.worktree_id, cx)
1451 .and_then(|worktree| worktree.read(cx).as_local())
1452 .map(|local_worktree| local_worktree.abs_path())
1453 {
1454 worktree_abs_path
1455 } else {
1456 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1457 };
1458 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1459 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1460 uri
1461 } else {
1462 return Task::ready(Err(anyhow!("invalid symbol path")));
1463 };
1464
1465 self.open_local_buffer_via_lsp(
1466 symbol_uri,
1467 symbol.language_name.clone(),
1468 language_server,
1469 cx,
1470 )
1471 } else if let Some(project_id) = self.remote_id() {
1472 let request = self.client.request(proto::OpenBufferForSymbol {
1473 project_id,
1474 symbol: Some(serialize_symbol(symbol)),
1475 });
1476 cx.spawn(|this, mut cx| async move {
1477 let response = request.await?;
1478 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1479 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1480 .await
1481 })
1482 } else {
1483 Task::ready(Err(anyhow!("project does not have a remote id")))
1484 }
1485 }
1486
1487 pub fn completions<T: ToPointUtf16>(
1488 &self,
1489 source_buffer_handle: &ModelHandle<Buffer>,
1490 position: T,
1491 cx: &mut ModelContext<Self>,
1492 ) -> Task<Result<Vec<Completion>>> {
1493 let source_buffer_handle = source_buffer_handle.clone();
1494 let source_buffer = source_buffer_handle.read(cx);
1495 let buffer_id = source_buffer.remote_id();
1496 let language = source_buffer.language().cloned();
1497 let worktree;
1498 let buffer_abs_path;
1499 if let Some(file) = File::from_dyn(source_buffer.file()) {
1500 worktree = file.worktree.clone();
1501 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1502 } else {
1503 return Task::ready(Ok(Default::default()));
1504 };
1505
1506 let position = position.to_point_utf16(source_buffer);
1507 let anchor = source_buffer.anchor_after(position);
1508
1509 if worktree.read(cx).as_local().is_some() {
1510 let buffer_abs_path = buffer_abs_path.unwrap();
1511 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1512 server
1513 } else {
1514 return Task::ready(Ok(Default::default()));
1515 };
1516
1517 cx.spawn(|_, cx| async move {
1518 let completions = lang_server
1519 .request::<lsp::request::Completion>(lsp::CompletionParams {
1520 text_document_position: lsp::TextDocumentPositionParams::new(
1521 lsp::TextDocumentIdentifier::new(
1522 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1523 ),
1524 position.to_lsp_position(),
1525 ),
1526 context: Default::default(),
1527 work_done_progress_params: Default::default(),
1528 partial_result_params: Default::default(),
1529 })
1530 .await
1531 .context("lsp completion request failed")?;
1532
1533 let completions = if let Some(completions) = completions {
1534 match completions {
1535 lsp::CompletionResponse::Array(completions) => completions,
1536 lsp::CompletionResponse::List(list) => list.items,
1537 }
1538 } else {
1539 Default::default()
1540 };
1541
1542 source_buffer_handle.read_with(&cx, |this, _| {
1543 Ok(completions
1544 .into_iter()
1545 .filter_map(|lsp_completion| {
1546 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1547 lsp::CompletionTextEdit::Edit(edit) => {
1548 (range_from_lsp(edit.range), edit.new_text.clone())
1549 }
1550 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1551 log::info!("unsupported insert/replace completion");
1552 return None;
1553 }
1554 };
1555
1556 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1557 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1558 if clipped_start == old_range.start && clipped_end == old_range.end {
1559 Some(Completion {
1560 old_range: this.anchor_before(old_range.start)
1561 ..this.anchor_after(old_range.end),
1562 new_text,
1563 label: language
1564 .as_ref()
1565 .and_then(|l| l.label_for_completion(&lsp_completion))
1566 .unwrap_or_else(|| {
1567 CodeLabel::plain(
1568 lsp_completion.label.clone(),
1569 lsp_completion.filter_text.as_deref(),
1570 )
1571 }),
1572 lsp_completion,
1573 })
1574 } else {
1575 None
1576 }
1577 })
1578 .collect())
1579 })
1580 })
1581 } else if let Some(project_id) = self.remote_id() {
1582 let rpc = self.client.clone();
1583 let message = proto::GetCompletions {
1584 project_id,
1585 buffer_id,
1586 position: Some(language::proto::serialize_anchor(&anchor)),
1587 version: (&source_buffer.version()).into(),
1588 };
1589 cx.spawn_weak(|_, mut cx| async move {
1590 let response = rpc.request(message).await?;
1591
1592 source_buffer_handle
1593 .update(&mut cx, |buffer, _| {
1594 buffer.wait_for_version(response.version.into())
1595 })
1596 .await;
1597
1598 response
1599 .completions
1600 .into_iter()
1601 .map(|completion| {
1602 language::proto::deserialize_completion(completion, language.as_ref())
1603 })
1604 .collect()
1605 })
1606 } else {
1607 Task::ready(Ok(Default::default()))
1608 }
1609 }
1610
1611 pub fn apply_additional_edits_for_completion(
1612 &self,
1613 buffer_handle: ModelHandle<Buffer>,
1614 completion: Completion,
1615 push_to_history: bool,
1616 cx: &mut ModelContext<Self>,
1617 ) -> Task<Result<Option<Transaction>>> {
1618 let buffer = buffer_handle.read(cx);
1619 let buffer_id = buffer.remote_id();
1620
1621 if self.is_local() {
1622 let lang_server = if let Some(language_server) = buffer.language_server() {
1623 language_server.clone()
1624 } else {
1625 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1626 };
1627
1628 cx.spawn(|_, mut cx| async move {
1629 let resolved_completion = lang_server
1630 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1631 .await?;
1632 if let Some(edits) = resolved_completion.additional_text_edits {
1633 let edits = buffer_handle
1634 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1635 .await?;
1636 buffer_handle.update(&mut cx, |buffer, cx| {
1637 buffer.finalize_last_transaction();
1638 buffer.start_transaction();
1639 for (range, text) in edits {
1640 buffer.edit([range], text, cx);
1641 }
1642 let transaction = if buffer.end_transaction(cx).is_some() {
1643 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1644 if !push_to_history {
1645 buffer.forget_transaction(transaction.id);
1646 }
1647 Some(transaction)
1648 } else {
1649 None
1650 };
1651 Ok(transaction)
1652 })
1653 } else {
1654 Ok(None)
1655 }
1656 })
1657 } else if let Some(project_id) = self.remote_id() {
1658 let client = self.client.clone();
1659 cx.spawn(|_, mut cx| async move {
1660 let response = client
1661 .request(proto::ApplyCompletionAdditionalEdits {
1662 project_id,
1663 buffer_id,
1664 completion: Some(language::proto::serialize_completion(&completion)),
1665 })
1666 .await?;
1667
1668 if let Some(transaction) = response.transaction {
1669 let transaction = language::proto::deserialize_transaction(transaction)?;
1670 buffer_handle
1671 .update(&mut cx, |buffer, _| {
1672 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1673 })
1674 .await;
1675 if push_to_history {
1676 buffer_handle.update(&mut cx, |buffer, _| {
1677 buffer.push_transaction(transaction.clone(), Instant::now());
1678 });
1679 }
1680 Ok(Some(transaction))
1681 } else {
1682 Ok(None)
1683 }
1684 })
1685 } else {
1686 Task::ready(Err(anyhow!("project does not have a remote id")))
1687 }
1688 }
1689
1690 pub fn code_actions<T: ToOffset>(
1691 &self,
1692 buffer_handle: &ModelHandle<Buffer>,
1693 range: Range<T>,
1694 cx: &mut ModelContext<Self>,
1695 ) -> Task<Result<Vec<CodeAction>>> {
1696 let buffer_handle = buffer_handle.clone();
1697 let buffer = buffer_handle.read(cx);
1698 let buffer_id = buffer.remote_id();
1699 let worktree;
1700 let buffer_abs_path;
1701 if let Some(file) = File::from_dyn(buffer.file()) {
1702 worktree = file.worktree.clone();
1703 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1704 } else {
1705 return Task::ready(Ok(Default::default()));
1706 };
1707 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1708
1709 if worktree.read(cx).as_local().is_some() {
1710 let buffer_abs_path = buffer_abs_path.unwrap();
1711 let lang_name;
1712 let lang_server;
1713 if let Some(lang) = buffer.language() {
1714 lang_name = lang.name().to_string();
1715 if let Some(server) = self
1716 .language_servers
1717 .get(&(worktree.read(cx).id(), lang_name.clone()))
1718 {
1719 lang_server = server.clone();
1720 } else {
1721 return Task::ready(Ok(Default::default()));
1722 };
1723 } else {
1724 return Task::ready(Ok(Default::default()));
1725 }
1726
1727 let lsp_range = lsp::Range::new(
1728 range.start.to_point_utf16(buffer).to_lsp_position(),
1729 range.end.to_point_utf16(buffer).to_lsp_position(),
1730 );
1731 cx.foreground().spawn(async move {
1732 Ok(lang_server
1733 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1734 text_document: lsp::TextDocumentIdentifier::new(
1735 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1736 ),
1737 range: lsp_range,
1738 work_done_progress_params: Default::default(),
1739 partial_result_params: Default::default(),
1740 context: lsp::CodeActionContext {
1741 diagnostics: Default::default(),
1742 only: Some(vec![
1743 lsp::CodeActionKind::QUICKFIX,
1744 lsp::CodeActionKind::REFACTOR,
1745 lsp::CodeActionKind::REFACTOR_EXTRACT,
1746 ]),
1747 },
1748 })
1749 .await?
1750 .unwrap_or_default()
1751 .into_iter()
1752 .filter_map(|entry| {
1753 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1754 Some(CodeAction {
1755 range: range.clone(),
1756 lsp_action,
1757 })
1758 } else {
1759 None
1760 }
1761 })
1762 .collect())
1763 })
1764 } else if let Some(project_id) = self.remote_id() {
1765 let rpc = self.client.clone();
1766 cx.spawn_weak(|_, mut cx| async move {
1767 let response = rpc
1768 .request(proto::GetCodeActions {
1769 project_id,
1770 buffer_id,
1771 start: Some(language::proto::serialize_anchor(&range.start)),
1772 end: Some(language::proto::serialize_anchor(&range.end)),
1773 })
1774 .await?;
1775
1776 buffer_handle
1777 .update(&mut cx, |buffer, _| {
1778 buffer.wait_for_version(response.version.into())
1779 })
1780 .await;
1781
1782 response
1783 .actions
1784 .into_iter()
1785 .map(language::proto::deserialize_code_action)
1786 .collect()
1787 })
1788 } else {
1789 Task::ready(Ok(Default::default()))
1790 }
1791 }
1792
1793 pub fn apply_code_action(
1794 &self,
1795 buffer_handle: ModelHandle<Buffer>,
1796 mut action: CodeAction,
1797 push_to_history: bool,
1798 cx: &mut ModelContext<Self>,
1799 ) -> Task<Result<ProjectTransaction>> {
1800 if self.is_local() {
1801 let buffer = buffer_handle.read(cx);
1802 let lang_name = if let Some(lang) = buffer.language() {
1803 lang.name().to_string()
1804 } else {
1805 return Task::ready(Ok(Default::default()));
1806 };
1807 let lang_server = if let Some(language_server) = buffer.language_server() {
1808 language_server.clone()
1809 } else {
1810 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1811 };
1812 let range = action.range.to_point_utf16(buffer);
1813
1814 cx.spawn(|this, mut cx| async move {
1815 if let Some(lsp_range) = action
1816 .lsp_action
1817 .data
1818 .as_mut()
1819 .and_then(|d| d.get_mut("codeActionParams"))
1820 .and_then(|d| d.get_mut("range"))
1821 {
1822 *lsp_range = serde_json::to_value(&lsp::Range::new(
1823 range.start.to_lsp_position(),
1824 range.end.to_lsp_position(),
1825 ))
1826 .unwrap();
1827 action.lsp_action = lang_server
1828 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1829 .await?;
1830 } else {
1831 let actions = this
1832 .update(&mut cx, |this, cx| {
1833 this.code_actions(&buffer_handle, action.range, cx)
1834 })
1835 .await?;
1836 action.lsp_action = actions
1837 .into_iter()
1838 .find(|a| a.lsp_action.title == action.lsp_action.title)
1839 .ok_or_else(|| anyhow!("code action is outdated"))?
1840 .lsp_action;
1841 }
1842
1843 if let Some(edit) = action.lsp_action.edit {
1844 Self::deserialize_workspace_edit(
1845 this,
1846 edit,
1847 push_to_history,
1848 lang_name,
1849 lang_server,
1850 &mut cx,
1851 )
1852 .await
1853 } else {
1854 Ok(ProjectTransaction::default())
1855 }
1856 })
1857 } else if let Some(project_id) = self.remote_id() {
1858 let client = self.client.clone();
1859 let request = proto::ApplyCodeAction {
1860 project_id,
1861 buffer_id: buffer_handle.read(cx).remote_id(),
1862 action: Some(language::proto::serialize_code_action(&action)),
1863 };
1864 cx.spawn(|this, mut cx| async move {
1865 let response = client
1866 .request(request)
1867 .await?
1868 .transaction
1869 .ok_or_else(|| anyhow!("missing transaction"))?;
1870 this.update(&mut cx, |this, cx| {
1871 this.deserialize_project_transaction(response, push_to_history, cx)
1872 })
1873 .await
1874 })
1875 } else {
1876 Task::ready(Err(anyhow!("project does not have a remote id")))
1877 }
1878 }
1879
1880 async fn deserialize_workspace_edit(
1881 this: ModelHandle<Self>,
1882 edit: lsp::WorkspaceEdit,
1883 push_to_history: bool,
1884 language_name: String,
1885 language_server: Arc<LanguageServer>,
1886 cx: &mut AsyncAppContext,
1887 ) -> Result<ProjectTransaction> {
1888 let fs = this.read_with(cx, |this, _| this.fs.clone());
1889 let mut operations = Vec::new();
1890 if let Some(document_changes) = edit.document_changes {
1891 match document_changes {
1892 lsp::DocumentChanges::Edits(edits) => {
1893 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1894 }
1895 lsp::DocumentChanges::Operations(ops) => operations = ops,
1896 }
1897 } else if let Some(changes) = edit.changes {
1898 operations.extend(changes.into_iter().map(|(uri, edits)| {
1899 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1900 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1901 uri,
1902 version: None,
1903 },
1904 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1905 })
1906 }));
1907 }
1908
1909 let mut project_transaction = ProjectTransaction::default();
1910 for operation in operations {
1911 match operation {
1912 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1913 let abs_path = op
1914 .uri
1915 .to_file_path()
1916 .map_err(|_| anyhow!("can't convert URI to path"))?;
1917
1918 if let Some(parent_path) = abs_path.parent() {
1919 fs.create_dir(parent_path).await?;
1920 }
1921 if abs_path.ends_with("/") {
1922 fs.create_dir(&abs_path).await?;
1923 } else {
1924 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1925 .await?;
1926 }
1927 }
1928 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1929 let source_abs_path = op
1930 .old_uri
1931 .to_file_path()
1932 .map_err(|_| anyhow!("can't convert URI to path"))?;
1933 let target_abs_path = op
1934 .new_uri
1935 .to_file_path()
1936 .map_err(|_| anyhow!("can't convert URI to path"))?;
1937 fs.rename(
1938 &source_abs_path,
1939 &target_abs_path,
1940 op.options.map(Into::into).unwrap_or_default(),
1941 )
1942 .await?;
1943 }
1944 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1945 let abs_path = op
1946 .uri
1947 .to_file_path()
1948 .map_err(|_| anyhow!("can't convert URI to path"))?;
1949 let options = op.options.map(Into::into).unwrap_or_default();
1950 if abs_path.ends_with("/") {
1951 fs.remove_dir(&abs_path, options).await?;
1952 } else {
1953 fs.remove_file(&abs_path, options).await?;
1954 }
1955 }
1956 lsp::DocumentChangeOperation::Edit(op) => {
1957 let buffer_to_edit = this
1958 .update(cx, |this, cx| {
1959 this.open_local_buffer_via_lsp(
1960 op.text_document.uri,
1961 language_name.clone(),
1962 language_server.clone(),
1963 cx,
1964 )
1965 })
1966 .await?;
1967
1968 let edits = buffer_to_edit
1969 .update(cx, |buffer, cx| {
1970 let edits = op.edits.into_iter().map(|edit| match edit {
1971 lsp::OneOf::Left(edit) => edit,
1972 lsp::OneOf::Right(edit) => edit.text_edit,
1973 });
1974 buffer.edits_from_lsp(edits, op.text_document.version, cx)
1975 })
1976 .await?;
1977
1978 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
1979 buffer.finalize_last_transaction();
1980 buffer.start_transaction();
1981 for (range, text) in edits {
1982 buffer.edit([range], text, cx);
1983 }
1984 let transaction = if buffer.end_transaction(cx).is_some() {
1985 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1986 if !push_to_history {
1987 buffer.forget_transaction(transaction.id);
1988 }
1989 Some(transaction)
1990 } else {
1991 None
1992 };
1993
1994 transaction
1995 });
1996 if let Some(transaction) = transaction {
1997 project_transaction.0.insert(buffer_to_edit, transaction);
1998 }
1999 }
2000 }
2001 }
2002
2003 Ok(project_transaction)
2004 }
2005
2006 pub fn prepare_rename<T: ToPointUtf16>(
2007 &self,
2008 buffer: ModelHandle<Buffer>,
2009 position: T,
2010 cx: &mut ModelContext<Self>,
2011 ) -> Task<Result<Option<Range<Anchor>>>> {
2012 let position = position.to_point_utf16(buffer.read(cx));
2013 self.request_lsp(buffer, PrepareRename { position }, cx)
2014 }
2015
2016 pub fn perform_rename<T: ToPointUtf16>(
2017 &self,
2018 buffer: ModelHandle<Buffer>,
2019 position: T,
2020 new_name: String,
2021 push_to_history: bool,
2022 cx: &mut ModelContext<Self>,
2023 ) -> Task<Result<ProjectTransaction>> {
2024 let position = position.to_point_utf16(buffer.read(cx));
2025 self.request_lsp(
2026 buffer,
2027 PerformRename {
2028 position,
2029 new_name,
2030 push_to_history,
2031 },
2032 cx,
2033 )
2034 }
2035
2036 pub fn search(
2037 &self,
2038 query: SearchQuery,
2039 cx: &mut ModelContext<Self>,
2040 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2041 if self.is_local() {
2042 let snapshots = self
2043 .strong_worktrees(cx)
2044 .filter_map(|tree| {
2045 let tree = tree.read(cx).as_local()?;
2046 Some(tree.snapshot())
2047 })
2048 .collect::<Vec<_>>();
2049
2050 let background = cx.background().clone();
2051 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2052 if path_count == 0 {
2053 return Task::ready(Ok(Default::default()));
2054 }
2055 let workers = background.num_cpus().min(path_count);
2056 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2057 cx.background()
2058 .spawn({
2059 let fs = self.fs.clone();
2060 let background = cx.background().clone();
2061 let query = query.clone();
2062 async move {
2063 let fs = &fs;
2064 let query = &query;
2065 let matching_paths_tx = &matching_paths_tx;
2066 let paths_per_worker = (path_count + workers - 1) / workers;
2067 let snapshots = &snapshots;
2068 background
2069 .scoped(|scope| {
2070 for worker_ix in 0..workers {
2071 let worker_start_ix = worker_ix * paths_per_worker;
2072 let worker_end_ix = worker_start_ix + paths_per_worker;
2073 scope.spawn(async move {
2074 let mut snapshot_start_ix = 0;
2075 let mut abs_path = PathBuf::new();
2076 for snapshot in snapshots {
2077 let snapshot_end_ix =
2078 snapshot_start_ix + snapshot.visible_file_count();
2079 if worker_end_ix <= snapshot_start_ix {
2080 break;
2081 } else if worker_start_ix > snapshot_end_ix {
2082 snapshot_start_ix = snapshot_end_ix;
2083 continue;
2084 } else {
2085 let start_in_snapshot = worker_start_ix
2086 .saturating_sub(snapshot_start_ix);
2087 let end_in_snapshot =
2088 cmp::min(worker_end_ix, snapshot_end_ix)
2089 - snapshot_start_ix;
2090
2091 for entry in snapshot
2092 .files(false, start_in_snapshot)
2093 .take(end_in_snapshot - start_in_snapshot)
2094 {
2095 if matching_paths_tx.is_closed() {
2096 break;
2097 }
2098
2099 abs_path.clear();
2100 abs_path.push(&snapshot.abs_path());
2101 abs_path.push(&entry.path);
2102 let matches = if let Some(file) =
2103 fs.open_sync(&abs_path).await.log_err()
2104 {
2105 query.detect(file).unwrap_or(false)
2106 } else {
2107 false
2108 };
2109
2110 if matches {
2111 let project_path =
2112 (snapshot.id(), entry.path.clone());
2113 if matching_paths_tx
2114 .send(project_path)
2115 .await
2116 .is_err()
2117 {
2118 break;
2119 }
2120 }
2121 }
2122
2123 snapshot_start_ix = snapshot_end_ix;
2124 }
2125 }
2126 });
2127 }
2128 })
2129 .await;
2130 }
2131 })
2132 .detach();
2133
2134 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2135 let open_buffers = self
2136 .opened_buffers
2137 .values()
2138 .filter_map(|b| b.upgrade(cx))
2139 .collect::<HashSet<_>>();
2140 cx.spawn(|this, cx| async move {
2141 for buffer in &open_buffers {
2142 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2143 buffers_tx.send((buffer.clone(), snapshot)).await?;
2144 }
2145
2146 let open_buffers = Rc::new(RefCell::new(open_buffers));
2147 while let Some(project_path) = matching_paths_rx.next().await {
2148 if buffers_tx.is_closed() {
2149 break;
2150 }
2151
2152 let this = this.clone();
2153 let open_buffers = open_buffers.clone();
2154 let buffers_tx = buffers_tx.clone();
2155 cx.spawn(|mut cx| async move {
2156 if let Some(buffer) = this
2157 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2158 .await
2159 .log_err()
2160 {
2161 if open_buffers.borrow_mut().insert(buffer.clone()) {
2162 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2163 buffers_tx.send((buffer, snapshot)).await?;
2164 }
2165 }
2166
2167 Ok::<_, anyhow::Error>(())
2168 })
2169 .detach();
2170 }
2171
2172 Ok::<_, anyhow::Error>(())
2173 })
2174 .detach_and_log_err(cx);
2175
2176 let background = cx.background().clone();
2177 cx.background().spawn(async move {
2178 let query = &query;
2179 let mut matched_buffers = Vec::new();
2180 for _ in 0..workers {
2181 matched_buffers.push(HashMap::default());
2182 }
2183 background
2184 .scoped(|scope| {
2185 for worker_matched_buffers in matched_buffers.iter_mut() {
2186 let mut buffers_rx = buffers_rx.clone();
2187 scope.spawn(async move {
2188 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2189 let buffer_matches = query
2190 .search(snapshot.as_rope())
2191 .await
2192 .iter()
2193 .map(|range| {
2194 snapshot.anchor_before(range.start)
2195 ..snapshot.anchor_after(range.end)
2196 })
2197 .collect::<Vec<_>>();
2198 if !buffer_matches.is_empty() {
2199 worker_matched_buffers
2200 .insert(buffer.clone(), buffer_matches);
2201 }
2202 }
2203 });
2204 }
2205 })
2206 .await;
2207 Ok(matched_buffers.into_iter().flatten().collect())
2208 })
2209 } else if let Some(project_id) = self.remote_id() {
2210 let request = self.client.request(query.to_proto(project_id));
2211 cx.spawn(|this, mut cx| async move {
2212 let response = request.await?;
2213 let mut result = HashMap::default();
2214 for location in response.locations {
2215 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2216 let target_buffer = this
2217 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2218 .await?;
2219 let start = location
2220 .start
2221 .and_then(deserialize_anchor)
2222 .ok_or_else(|| anyhow!("missing target start"))?;
2223 let end = location
2224 .end
2225 .and_then(deserialize_anchor)
2226 .ok_or_else(|| anyhow!("missing target end"))?;
2227 result
2228 .entry(target_buffer)
2229 .or_insert(Vec::new())
2230 .push(start..end)
2231 }
2232 Ok(result)
2233 })
2234 } else {
2235 Task::ready(Ok(Default::default()))
2236 }
2237 }
2238
2239 fn request_lsp<R: LspCommand>(
2240 &self,
2241 buffer_handle: ModelHandle<Buffer>,
2242 request: R,
2243 cx: &mut ModelContext<Self>,
2244 ) -> Task<Result<R::Response>>
2245 where
2246 <R::LspRequest as lsp::request::Request>::Result: Send,
2247 {
2248 let buffer = buffer_handle.read(cx);
2249 if self.is_local() {
2250 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2251 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2252 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2253 return cx.spawn(|this, cx| async move {
2254 let response = language_server
2255 .request::<R::LspRequest>(lsp_params)
2256 .await
2257 .context("lsp request failed")?;
2258 request
2259 .response_from_lsp(response, this, buffer_handle, cx)
2260 .await
2261 });
2262 }
2263 } else if let Some(project_id) = self.remote_id() {
2264 let rpc = self.client.clone();
2265 let message = request.to_proto(project_id, buffer);
2266 return cx.spawn(|this, cx| async move {
2267 let response = rpc.request(message).await?;
2268 request
2269 .response_from_proto(response, this, buffer_handle, cx)
2270 .await
2271 });
2272 }
2273 Task::ready(Ok(Default::default()))
2274 }
2275
2276 pub fn find_or_create_local_worktree(
2277 &self,
2278 abs_path: impl AsRef<Path>,
2279 weak: bool,
2280 cx: &mut ModelContext<Self>,
2281 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2282 let abs_path = abs_path.as_ref();
2283 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2284 Task::ready(Ok((tree.clone(), relative_path.into())))
2285 } else {
2286 let worktree = self.create_local_worktree(abs_path, weak, cx);
2287 cx.foreground()
2288 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2289 }
2290 }
2291
2292 pub fn find_local_worktree(
2293 &self,
2294 abs_path: &Path,
2295 cx: &AppContext,
2296 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2297 for tree in self.worktrees(cx) {
2298 if let Some(relative_path) = tree
2299 .read(cx)
2300 .as_local()
2301 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2302 {
2303 return Some((tree.clone(), relative_path.into()));
2304 }
2305 }
2306 None
2307 }
2308
2309 pub fn is_shared(&self) -> bool {
2310 match &self.client_state {
2311 ProjectClientState::Local { is_shared, .. } => *is_shared,
2312 ProjectClientState::Remote { .. } => false,
2313 }
2314 }
2315
2316 fn create_local_worktree(
2317 &self,
2318 abs_path: impl AsRef<Path>,
2319 weak: bool,
2320 cx: &mut ModelContext<Self>,
2321 ) -> Task<Result<ModelHandle<Worktree>>> {
2322 let fs = self.fs.clone();
2323 let client = self.client.clone();
2324 let path = Arc::from(abs_path.as_ref());
2325 cx.spawn(|project, mut cx| async move {
2326 let worktree = Worktree::local(client.clone(), path, weak, fs, &mut cx).await?;
2327
2328 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
2329 project.add_worktree(&worktree, cx);
2330 (project.remote_id(), project.is_shared())
2331 });
2332
2333 if let Some(project_id) = remote_project_id {
2334 worktree
2335 .update(&mut cx, |worktree, cx| {
2336 worktree.as_local_mut().unwrap().register(project_id, cx)
2337 })
2338 .await?;
2339 if is_shared {
2340 worktree
2341 .update(&mut cx, |worktree, cx| {
2342 worktree.as_local_mut().unwrap().share(project_id, cx)
2343 })
2344 .await?;
2345 }
2346 }
2347
2348 Ok(worktree)
2349 })
2350 }
2351
2352 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2353 self.worktrees.retain(|worktree| {
2354 worktree
2355 .upgrade(cx)
2356 .map_or(false, |w| w.read(cx).id() != id)
2357 });
2358 cx.notify();
2359 }
2360
2361 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2362 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2363 if worktree.read(cx).is_local() {
2364 cx.subscribe(&worktree, |this, worktree, _, cx| {
2365 this.update_local_worktree_buffers(worktree, cx);
2366 })
2367 .detach();
2368 }
2369
2370 let push_weak_handle = {
2371 let worktree = worktree.read(cx);
2372 worktree.is_local() && worktree.is_weak()
2373 };
2374 if push_weak_handle {
2375 cx.observe_release(&worktree, |this, cx| {
2376 this.worktrees
2377 .retain(|worktree| worktree.upgrade(cx).is_some());
2378 cx.notify();
2379 })
2380 .detach();
2381 self.worktrees
2382 .push(WorktreeHandle::Weak(worktree.downgrade()));
2383 } else {
2384 self.worktrees
2385 .push(WorktreeHandle::Strong(worktree.clone()));
2386 }
2387 cx.notify();
2388 }
2389
2390 fn update_local_worktree_buffers(
2391 &mut self,
2392 worktree_handle: ModelHandle<Worktree>,
2393 cx: &mut ModelContext<Self>,
2394 ) {
2395 let snapshot = worktree_handle.read(cx).snapshot();
2396 let mut buffers_to_delete = Vec::new();
2397 for (buffer_id, buffer) in &self.opened_buffers {
2398 if let Some(buffer) = buffer.upgrade(cx) {
2399 buffer.update(cx, |buffer, cx| {
2400 if let Some(old_file) = File::from_dyn(buffer.file()) {
2401 if old_file.worktree != worktree_handle {
2402 return;
2403 }
2404
2405 let new_file = if let Some(entry) = old_file
2406 .entry_id
2407 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2408 {
2409 File {
2410 is_local: true,
2411 entry_id: Some(entry.id),
2412 mtime: entry.mtime,
2413 path: entry.path.clone(),
2414 worktree: worktree_handle.clone(),
2415 }
2416 } else if let Some(entry) =
2417 snapshot.entry_for_path(old_file.path().as_ref())
2418 {
2419 File {
2420 is_local: true,
2421 entry_id: Some(entry.id),
2422 mtime: entry.mtime,
2423 path: entry.path.clone(),
2424 worktree: worktree_handle.clone(),
2425 }
2426 } else {
2427 File {
2428 is_local: true,
2429 entry_id: None,
2430 path: old_file.path().clone(),
2431 mtime: old_file.mtime(),
2432 worktree: worktree_handle.clone(),
2433 }
2434 };
2435
2436 if let Some(project_id) = self.remote_id() {
2437 self.client
2438 .send(proto::UpdateBufferFile {
2439 project_id,
2440 buffer_id: *buffer_id as u64,
2441 file: Some(new_file.to_proto()),
2442 })
2443 .log_err();
2444 }
2445 buffer.file_updated(Box::new(new_file), cx).detach();
2446 }
2447 });
2448 } else {
2449 buffers_to_delete.push(*buffer_id);
2450 }
2451 }
2452
2453 for buffer_id in buffers_to_delete {
2454 self.opened_buffers.remove(&buffer_id);
2455 }
2456 }
2457
2458 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2459 let new_active_entry = entry.and_then(|project_path| {
2460 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2461 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2462 Some(ProjectEntry {
2463 worktree_id: project_path.worktree_id,
2464 entry_id: entry.id,
2465 })
2466 });
2467 if new_active_entry != self.active_entry {
2468 self.active_entry = new_active_entry;
2469 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2470 }
2471 }
2472
2473 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2474 self.language_servers_with_diagnostics_running > 0
2475 }
2476
2477 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2478 let mut summary = DiagnosticSummary::default();
2479 for (_, path_summary) in self.diagnostic_summaries(cx) {
2480 summary.error_count += path_summary.error_count;
2481 summary.warning_count += path_summary.warning_count;
2482 summary.info_count += path_summary.info_count;
2483 summary.hint_count += path_summary.hint_count;
2484 }
2485 summary
2486 }
2487
2488 pub fn diagnostic_summaries<'a>(
2489 &'a self,
2490 cx: &'a AppContext,
2491 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2492 self.worktrees(cx).flat_map(move |worktree| {
2493 let worktree = worktree.read(cx);
2494 let worktree_id = worktree.id();
2495 worktree
2496 .diagnostic_summaries()
2497 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2498 })
2499 }
2500
2501 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2502 self.language_servers_with_diagnostics_running += 1;
2503 if self.language_servers_with_diagnostics_running == 1 {
2504 cx.emit(Event::DiskBasedDiagnosticsStarted);
2505 }
2506 }
2507
2508 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2509 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2510 self.language_servers_with_diagnostics_running -= 1;
2511 if self.language_servers_with_diagnostics_running == 0 {
2512 cx.emit(Event::DiskBasedDiagnosticsFinished);
2513 }
2514 }
2515
2516 pub fn active_entry(&self) -> Option<ProjectEntry> {
2517 self.active_entry
2518 }
2519
2520 // RPC message handlers
2521
2522 async fn handle_unshare_project(
2523 this: ModelHandle<Self>,
2524 _: TypedEnvelope<proto::UnshareProject>,
2525 _: Arc<Client>,
2526 mut cx: AsyncAppContext,
2527 ) -> Result<()> {
2528 this.update(&mut cx, |this, cx| {
2529 if let ProjectClientState::Remote {
2530 sharing_has_stopped,
2531 ..
2532 } = &mut this.client_state
2533 {
2534 *sharing_has_stopped = true;
2535 this.collaborators.clear();
2536 cx.notify();
2537 } else {
2538 unreachable!()
2539 }
2540 });
2541
2542 Ok(())
2543 }
2544
2545 async fn handle_add_collaborator(
2546 this: ModelHandle<Self>,
2547 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2548 _: Arc<Client>,
2549 mut cx: AsyncAppContext,
2550 ) -> Result<()> {
2551 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2552 let collaborator = envelope
2553 .payload
2554 .collaborator
2555 .take()
2556 .ok_or_else(|| anyhow!("empty collaborator"))?;
2557
2558 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2559 this.update(&mut cx, |this, cx| {
2560 this.collaborators
2561 .insert(collaborator.peer_id, collaborator);
2562 cx.notify();
2563 });
2564
2565 Ok(())
2566 }
2567
2568 async fn handle_remove_collaborator(
2569 this: ModelHandle<Self>,
2570 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2571 _: Arc<Client>,
2572 mut cx: AsyncAppContext,
2573 ) -> Result<()> {
2574 this.update(&mut cx, |this, cx| {
2575 let peer_id = PeerId(envelope.payload.peer_id);
2576 let replica_id = this
2577 .collaborators
2578 .remove(&peer_id)
2579 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2580 .replica_id;
2581 for (_, buffer) in &this.opened_buffers {
2582 if let Some(buffer) = buffer.upgrade(cx) {
2583 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2584 }
2585 }
2586 cx.notify();
2587 Ok(())
2588 })
2589 }
2590
2591 async fn handle_register_worktree(
2592 this: ModelHandle<Self>,
2593 envelope: TypedEnvelope<proto::RegisterWorktree>,
2594 client: Arc<Client>,
2595 mut cx: AsyncAppContext,
2596 ) -> Result<()> {
2597 this.update(&mut cx, |this, cx| {
2598 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2599 let replica_id = this.replica_id();
2600 let worktree = proto::Worktree {
2601 id: envelope.payload.worktree_id,
2602 root_name: envelope.payload.root_name,
2603 entries: Default::default(),
2604 diagnostic_summaries: Default::default(),
2605 weak: envelope.payload.weak,
2606 };
2607 let (worktree, load_task) =
2608 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2609 this.add_worktree(&worktree, cx);
2610 load_task.detach();
2611 Ok(())
2612 })
2613 }
2614
2615 async fn handle_unregister_worktree(
2616 this: ModelHandle<Self>,
2617 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2618 _: Arc<Client>,
2619 mut cx: AsyncAppContext,
2620 ) -> Result<()> {
2621 this.update(&mut cx, |this, cx| {
2622 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2623 this.remove_worktree(worktree_id, cx);
2624 Ok(())
2625 })
2626 }
2627
2628 async fn handle_update_worktree(
2629 this: ModelHandle<Self>,
2630 envelope: TypedEnvelope<proto::UpdateWorktree>,
2631 _: Arc<Client>,
2632 mut cx: AsyncAppContext,
2633 ) -> Result<()> {
2634 this.update(&mut cx, |this, cx| {
2635 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2636 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2637 worktree.update(cx, |worktree, _| {
2638 let worktree = worktree.as_remote_mut().unwrap();
2639 worktree.update_from_remote(envelope)
2640 })?;
2641 }
2642 Ok(())
2643 })
2644 }
2645
2646 async fn handle_update_diagnostic_summary(
2647 this: ModelHandle<Self>,
2648 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2649 _: Arc<Client>,
2650 mut cx: AsyncAppContext,
2651 ) -> Result<()> {
2652 this.update(&mut cx, |this, cx| {
2653 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2654 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2655 if let Some(summary) = envelope.payload.summary {
2656 let project_path = ProjectPath {
2657 worktree_id,
2658 path: Path::new(&summary.path).into(),
2659 };
2660 worktree.update(cx, |worktree, _| {
2661 worktree
2662 .as_remote_mut()
2663 .unwrap()
2664 .update_diagnostic_summary(project_path.path.clone(), &summary);
2665 });
2666 cx.emit(Event::DiagnosticsUpdated(project_path));
2667 }
2668 }
2669 Ok(())
2670 })
2671 }
2672
2673 async fn handle_disk_based_diagnostics_updating(
2674 this: ModelHandle<Self>,
2675 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2676 _: Arc<Client>,
2677 mut cx: AsyncAppContext,
2678 ) -> Result<()> {
2679 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2680 Ok(())
2681 }
2682
2683 async fn handle_disk_based_diagnostics_updated(
2684 this: ModelHandle<Self>,
2685 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2686 _: Arc<Client>,
2687 mut cx: AsyncAppContext,
2688 ) -> Result<()> {
2689 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2690 Ok(())
2691 }
2692
2693 async fn handle_update_buffer(
2694 this: ModelHandle<Self>,
2695 envelope: TypedEnvelope<proto::UpdateBuffer>,
2696 _: Arc<Client>,
2697 mut cx: AsyncAppContext,
2698 ) -> Result<()> {
2699 this.update(&mut cx, |this, cx| {
2700 let payload = envelope.payload.clone();
2701 let buffer_id = payload.buffer_id;
2702 let ops = payload
2703 .operations
2704 .into_iter()
2705 .map(|op| language::proto::deserialize_operation(op))
2706 .collect::<Result<Vec<_>, _>>()?;
2707 match this.opened_buffers.entry(buffer_id) {
2708 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2709 OpenBuffer::Strong(buffer) => {
2710 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2711 }
2712 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2713 _ => unreachable!(),
2714 },
2715 hash_map::Entry::Vacant(e) => {
2716 e.insert(OpenBuffer::Loading(ops));
2717 }
2718 }
2719 Ok(())
2720 })
2721 }
2722
2723 async fn handle_update_buffer_file(
2724 this: ModelHandle<Self>,
2725 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2726 _: Arc<Client>,
2727 mut cx: AsyncAppContext,
2728 ) -> Result<()> {
2729 this.update(&mut cx, |this, cx| {
2730 let payload = envelope.payload.clone();
2731 let buffer_id = payload.buffer_id;
2732 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2733 let worktree = this
2734 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2735 .ok_or_else(|| anyhow!("no such worktree"))?;
2736 let file = File::from_proto(file, worktree.clone(), cx)?;
2737 let buffer = this
2738 .opened_buffers
2739 .get_mut(&buffer_id)
2740 .and_then(|b| b.upgrade(cx))
2741 .ok_or_else(|| anyhow!("no such buffer"))?;
2742 buffer.update(cx, |buffer, cx| {
2743 buffer.file_updated(Box::new(file), cx).detach();
2744 });
2745 Ok(())
2746 })
2747 }
2748
2749 async fn handle_save_buffer(
2750 this: ModelHandle<Self>,
2751 envelope: TypedEnvelope<proto::SaveBuffer>,
2752 _: Arc<Client>,
2753 mut cx: AsyncAppContext,
2754 ) -> Result<proto::BufferSaved> {
2755 let buffer_id = envelope.payload.buffer_id;
2756 let requested_version = envelope.payload.version.try_into()?;
2757
2758 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2759 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2760 let buffer = this
2761 .opened_buffers
2762 .get(&buffer_id)
2763 .map(|buffer| buffer.upgrade(cx).unwrap())
2764 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2765 Ok::<_, anyhow::Error>((project_id, buffer))
2766 })?;
2767
2768 if !buffer
2769 .read_with(&cx, |buffer, _| buffer.version())
2770 .observed_all(&requested_version)
2771 {
2772 Err(anyhow!("save request depends on unreceived edits"))?;
2773 }
2774
2775 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2776 Ok(proto::BufferSaved {
2777 project_id,
2778 buffer_id,
2779 version: (&saved_version).into(),
2780 mtime: Some(mtime.into()),
2781 })
2782 }
2783
2784 async fn handle_format_buffers(
2785 this: ModelHandle<Self>,
2786 envelope: TypedEnvelope<proto::FormatBuffers>,
2787 _: Arc<Client>,
2788 mut cx: AsyncAppContext,
2789 ) -> Result<proto::FormatBuffersResponse> {
2790 let sender_id = envelope.original_sender_id()?;
2791 let format = this.update(&mut cx, |this, cx| {
2792 let mut buffers = HashSet::default();
2793 for buffer_id in &envelope.payload.buffer_ids {
2794 buffers.insert(
2795 this.opened_buffers
2796 .get(buffer_id)
2797 .map(|buffer| buffer.upgrade(cx).unwrap())
2798 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2799 );
2800 }
2801 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2802 })?;
2803
2804 let project_transaction = format.await?;
2805 let project_transaction = this.update(&mut cx, |this, cx| {
2806 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2807 });
2808 Ok(proto::FormatBuffersResponse {
2809 transaction: Some(project_transaction),
2810 })
2811 }
2812
2813 async fn handle_get_completions(
2814 this: ModelHandle<Self>,
2815 envelope: TypedEnvelope<proto::GetCompletions>,
2816 _: Arc<Client>,
2817 mut cx: AsyncAppContext,
2818 ) -> Result<proto::GetCompletionsResponse> {
2819 let position = envelope
2820 .payload
2821 .position
2822 .and_then(language::proto::deserialize_anchor)
2823 .ok_or_else(|| anyhow!("invalid position"))?;
2824 let version = clock::Global::from(envelope.payload.version);
2825 let buffer = this.read_with(&cx, |this, cx| {
2826 this.opened_buffers
2827 .get(&envelope.payload.buffer_id)
2828 .map(|buffer| buffer.upgrade(cx).unwrap())
2829 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2830 })?;
2831 if !buffer
2832 .read_with(&cx, |buffer, _| buffer.version())
2833 .observed_all(&version)
2834 {
2835 Err(anyhow!("completion request depends on unreceived edits"))?;
2836 }
2837 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2838 let completions = this
2839 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2840 .await?;
2841
2842 Ok(proto::GetCompletionsResponse {
2843 completions: completions
2844 .iter()
2845 .map(language::proto::serialize_completion)
2846 .collect(),
2847 version: (&version).into(),
2848 })
2849 }
2850
2851 async fn handle_apply_additional_edits_for_completion(
2852 this: ModelHandle<Self>,
2853 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2854 _: Arc<Client>,
2855 mut cx: AsyncAppContext,
2856 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2857 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2858 let buffer = this
2859 .opened_buffers
2860 .get(&envelope.payload.buffer_id)
2861 .map(|buffer| buffer.upgrade(cx).unwrap())
2862 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2863 let language = buffer.read(cx).language();
2864 let completion = language::proto::deserialize_completion(
2865 envelope
2866 .payload
2867 .completion
2868 .ok_or_else(|| anyhow!("invalid completion"))?,
2869 language,
2870 )?;
2871 Ok::<_, anyhow::Error>(
2872 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2873 )
2874 })?;
2875
2876 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2877 transaction: apply_additional_edits
2878 .await?
2879 .as_ref()
2880 .map(language::proto::serialize_transaction),
2881 })
2882 }
2883
2884 async fn handle_get_code_actions(
2885 this: ModelHandle<Self>,
2886 envelope: TypedEnvelope<proto::GetCodeActions>,
2887 _: Arc<Client>,
2888 mut cx: AsyncAppContext,
2889 ) -> Result<proto::GetCodeActionsResponse> {
2890 let start = envelope
2891 .payload
2892 .start
2893 .and_then(language::proto::deserialize_anchor)
2894 .ok_or_else(|| anyhow!("invalid start"))?;
2895 let end = envelope
2896 .payload
2897 .end
2898 .and_then(language::proto::deserialize_anchor)
2899 .ok_or_else(|| anyhow!("invalid end"))?;
2900 let buffer = this.update(&mut cx, |this, cx| {
2901 this.opened_buffers
2902 .get(&envelope.payload.buffer_id)
2903 .map(|buffer| buffer.upgrade(cx).unwrap())
2904 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2905 })?;
2906 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2907 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2908 Err(anyhow!("code action request references unreceived edits"))?;
2909 }
2910 let code_actions = this.update(&mut cx, |this, cx| {
2911 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2912 })?;
2913
2914 Ok(proto::GetCodeActionsResponse {
2915 actions: code_actions
2916 .await?
2917 .iter()
2918 .map(language::proto::serialize_code_action)
2919 .collect(),
2920 version: (&version).into(),
2921 })
2922 }
2923
2924 async fn handle_apply_code_action(
2925 this: ModelHandle<Self>,
2926 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2927 _: Arc<Client>,
2928 mut cx: AsyncAppContext,
2929 ) -> Result<proto::ApplyCodeActionResponse> {
2930 let sender_id = envelope.original_sender_id()?;
2931 let action = language::proto::deserialize_code_action(
2932 envelope
2933 .payload
2934 .action
2935 .ok_or_else(|| anyhow!("invalid action"))?,
2936 )?;
2937 let apply_code_action = this.update(&mut cx, |this, cx| {
2938 let buffer = this
2939 .opened_buffers
2940 .get(&envelope.payload.buffer_id)
2941 .map(|buffer| buffer.upgrade(cx).unwrap())
2942 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2943 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2944 })?;
2945
2946 let project_transaction = apply_code_action.await?;
2947 let project_transaction = this.update(&mut cx, |this, cx| {
2948 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2949 });
2950 Ok(proto::ApplyCodeActionResponse {
2951 transaction: Some(project_transaction),
2952 })
2953 }
2954
2955 async fn handle_lsp_command<T: LspCommand>(
2956 this: ModelHandle<Self>,
2957 envelope: TypedEnvelope<T::ProtoRequest>,
2958 _: Arc<Client>,
2959 mut cx: AsyncAppContext,
2960 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
2961 where
2962 <T::LspRequest as lsp::request::Request>::Result: Send,
2963 {
2964 let sender_id = envelope.original_sender_id()?;
2965 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
2966 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
2967 let buffer_handle = this
2968 .opened_buffers
2969 .get(&buffer_id)
2970 .map(|buffer| buffer.upgrade(cx).unwrap())
2971 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2972 let buffer = buffer_handle.read(cx);
2973 let buffer_version = buffer.version();
2974 let request = T::from_proto(envelope.payload, this, buffer)?;
2975 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
2976 })?;
2977 let response = request.await?;
2978 this.update(&mut cx, |this, cx| {
2979 Ok(T::response_to_proto(
2980 response,
2981 this,
2982 sender_id,
2983 &buffer_version,
2984 cx,
2985 ))
2986 })
2987 }
2988
2989 async fn handle_get_project_symbols(
2990 this: ModelHandle<Self>,
2991 envelope: TypedEnvelope<proto::GetProjectSymbols>,
2992 _: Arc<Client>,
2993 mut cx: AsyncAppContext,
2994 ) -> Result<proto::GetProjectSymbolsResponse> {
2995 let symbols = this
2996 .update(&mut cx, |this, cx| {
2997 this.symbols(&envelope.payload.query, cx)
2998 })
2999 .await?;
3000
3001 Ok(proto::GetProjectSymbolsResponse {
3002 symbols: symbols.iter().map(serialize_symbol).collect(),
3003 })
3004 }
3005
3006 async fn handle_search_project(
3007 this: ModelHandle<Self>,
3008 envelope: TypedEnvelope<proto::SearchProject>,
3009 _: Arc<Client>,
3010 mut cx: AsyncAppContext,
3011 ) -> Result<proto::SearchProjectResponse> {
3012 let peer_id = envelope.original_sender_id()?;
3013 let query = SearchQuery::from_proto(envelope.payload)?;
3014 let result = this
3015 .update(&mut cx, |this, cx| this.search(query, cx))
3016 .await?;
3017
3018 this.update(&mut cx, |this, cx| {
3019 let mut locations = Vec::new();
3020 for (buffer, ranges) in result {
3021 for range in ranges {
3022 let start = serialize_anchor(&range.start);
3023 let end = serialize_anchor(&range.end);
3024 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3025 locations.push(proto::Location {
3026 buffer: Some(buffer),
3027 start: Some(start),
3028 end: Some(end),
3029 });
3030 }
3031 }
3032 Ok(proto::SearchProjectResponse { locations })
3033 })
3034 }
3035
3036 async fn handle_open_buffer_for_symbol(
3037 this: ModelHandle<Self>,
3038 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3039 _: Arc<Client>,
3040 mut cx: AsyncAppContext,
3041 ) -> Result<proto::OpenBufferForSymbolResponse> {
3042 let peer_id = envelope.original_sender_id()?;
3043 let symbol = envelope
3044 .payload
3045 .symbol
3046 .ok_or_else(|| anyhow!("invalid symbol"))?;
3047 let symbol = this.read_with(&cx, |this, _| {
3048 let symbol = this.deserialize_symbol(symbol)?;
3049 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3050 if signature == symbol.signature {
3051 Ok(symbol)
3052 } else {
3053 Err(anyhow!("invalid symbol signature"))
3054 }
3055 })?;
3056 let buffer = this
3057 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3058 .await?;
3059
3060 Ok(proto::OpenBufferForSymbolResponse {
3061 buffer: Some(this.update(&mut cx, |this, cx| {
3062 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3063 })),
3064 })
3065 }
3066
3067 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3068 let mut hasher = Sha256::new();
3069 hasher.update(worktree_id.to_proto().to_be_bytes());
3070 hasher.update(path.to_string_lossy().as_bytes());
3071 hasher.update(self.nonce.to_be_bytes());
3072 hasher.finalize().as_slice().try_into().unwrap()
3073 }
3074
3075 async fn handle_open_buffer(
3076 this: ModelHandle<Self>,
3077 envelope: TypedEnvelope<proto::OpenBuffer>,
3078 _: Arc<Client>,
3079 mut cx: AsyncAppContext,
3080 ) -> Result<proto::OpenBufferResponse> {
3081 let peer_id = envelope.original_sender_id()?;
3082 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3083 let open_buffer = this.update(&mut cx, |this, cx| {
3084 this.open_buffer(
3085 ProjectPath {
3086 worktree_id,
3087 path: PathBuf::from(envelope.payload.path).into(),
3088 },
3089 cx,
3090 )
3091 });
3092
3093 let buffer = open_buffer.await?;
3094 this.update(&mut cx, |this, cx| {
3095 Ok(proto::OpenBufferResponse {
3096 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3097 })
3098 })
3099 }
3100
3101 fn serialize_project_transaction_for_peer(
3102 &mut self,
3103 project_transaction: ProjectTransaction,
3104 peer_id: PeerId,
3105 cx: &AppContext,
3106 ) -> proto::ProjectTransaction {
3107 let mut serialized_transaction = proto::ProjectTransaction {
3108 buffers: Default::default(),
3109 transactions: Default::default(),
3110 };
3111 for (buffer, transaction) in project_transaction.0 {
3112 serialized_transaction
3113 .buffers
3114 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3115 serialized_transaction
3116 .transactions
3117 .push(language::proto::serialize_transaction(&transaction));
3118 }
3119 serialized_transaction
3120 }
3121
3122 fn deserialize_project_transaction(
3123 &mut self,
3124 message: proto::ProjectTransaction,
3125 push_to_history: bool,
3126 cx: &mut ModelContext<Self>,
3127 ) -> Task<Result<ProjectTransaction>> {
3128 cx.spawn(|this, mut cx| async move {
3129 let mut project_transaction = ProjectTransaction::default();
3130 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3131 let buffer = this
3132 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3133 .await?;
3134 let transaction = language::proto::deserialize_transaction(transaction)?;
3135 project_transaction.0.insert(buffer, transaction);
3136 }
3137
3138 for (buffer, transaction) in &project_transaction.0 {
3139 buffer
3140 .update(&mut cx, |buffer, _| {
3141 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3142 })
3143 .await;
3144
3145 if push_to_history {
3146 buffer.update(&mut cx, |buffer, _| {
3147 buffer.push_transaction(transaction.clone(), Instant::now());
3148 });
3149 }
3150 }
3151
3152 Ok(project_transaction)
3153 })
3154 }
3155
3156 fn serialize_buffer_for_peer(
3157 &mut self,
3158 buffer: &ModelHandle<Buffer>,
3159 peer_id: PeerId,
3160 cx: &AppContext,
3161 ) -> proto::Buffer {
3162 let buffer_id = buffer.read(cx).remote_id();
3163 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3164 if shared_buffers.insert(buffer_id) {
3165 proto::Buffer {
3166 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3167 }
3168 } else {
3169 proto::Buffer {
3170 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3171 }
3172 }
3173 }
3174
3175 fn deserialize_buffer(
3176 &mut self,
3177 buffer: proto::Buffer,
3178 cx: &mut ModelContext<Self>,
3179 ) -> Task<Result<ModelHandle<Buffer>>> {
3180 let replica_id = self.replica_id();
3181
3182 let opened_buffer_tx = self.opened_buffer.0.clone();
3183 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3184 cx.spawn(|this, mut cx| async move {
3185 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3186 proto::buffer::Variant::Id(id) => {
3187 let buffer = loop {
3188 let buffer = this.read_with(&cx, |this, cx| {
3189 this.opened_buffers
3190 .get(&id)
3191 .and_then(|buffer| buffer.upgrade(cx))
3192 });
3193 if let Some(buffer) = buffer {
3194 break buffer;
3195 }
3196 opened_buffer_rx
3197 .next()
3198 .await
3199 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3200 };
3201 Ok(buffer)
3202 }
3203 proto::buffer::Variant::State(mut buffer) => {
3204 let mut buffer_worktree = None;
3205 let mut buffer_file = None;
3206 if let Some(file) = buffer.file.take() {
3207 this.read_with(&cx, |this, cx| {
3208 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3209 let worktree =
3210 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3211 anyhow!("no worktree found for id {}", file.worktree_id)
3212 })?;
3213 buffer_file =
3214 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3215 as Box<dyn language::File>);
3216 buffer_worktree = Some(worktree);
3217 Ok::<_, anyhow::Error>(())
3218 })?;
3219 }
3220
3221 let buffer = cx.add_model(|cx| {
3222 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3223 });
3224
3225 this.update(&mut cx, |this, cx| {
3226 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3227 })?;
3228
3229 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3230 Ok(buffer)
3231 }
3232 }
3233 })
3234 }
3235
3236 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3237 let language = self
3238 .languages
3239 .get_language(&serialized_symbol.language_name);
3240 let start = serialized_symbol
3241 .start
3242 .ok_or_else(|| anyhow!("invalid start"))?;
3243 let end = serialized_symbol
3244 .end
3245 .ok_or_else(|| anyhow!("invalid end"))?;
3246 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3247 Ok(Symbol {
3248 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3249 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3250 language_name: serialized_symbol.language_name.clone(),
3251 label: language
3252 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3253 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3254 name: serialized_symbol.name,
3255 path: PathBuf::from(serialized_symbol.path),
3256 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3257 kind,
3258 signature: serialized_symbol
3259 .signature
3260 .try_into()
3261 .map_err(|_| anyhow!("invalid signature"))?,
3262 })
3263 }
3264
3265 async fn handle_close_buffer(
3266 _: ModelHandle<Self>,
3267 _: TypedEnvelope<proto::CloseBuffer>,
3268 _: Arc<Client>,
3269 _: AsyncAppContext,
3270 ) -> Result<()> {
3271 // TODO: use this for following
3272 Ok(())
3273 }
3274
3275 async fn handle_buffer_saved(
3276 this: ModelHandle<Self>,
3277 envelope: TypedEnvelope<proto::BufferSaved>,
3278 _: Arc<Client>,
3279 mut cx: AsyncAppContext,
3280 ) -> Result<()> {
3281 let version = envelope.payload.version.try_into()?;
3282 let mtime = envelope
3283 .payload
3284 .mtime
3285 .ok_or_else(|| anyhow!("missing mtime"))?
3286 .into();
3287
3288 this.update(&mut cx, |this, cx| {
3289 let buffer = this
3290 .opened_buffers
3291 .get(&envelope.payload.buffer_id)
3292 .and_then(|buffer| buffer.upgrade(cx));
3293 if let Some(buffer) = buffer {
3294 buffer.update(cx, |buffer, cx| {
3295 buffer.did_save(version, mtime, None, cx);
3296 });
3297 }
3298 Ok(())
3299 })
3300 }
3301
3302 async fn handle_buffer_reloaded(
3303 this: ModelHandle<Self>,
3304 envelope: TypedEnvelope<proto::BufferReloaded>,
3305 _: Arc<Client>,
3306 mut cx: AsyncAppContext,
3307 ) -> Result<()> {
3308 let payload = envelope.payload.clone();
3309 let version = payload.version.try_into()?;
3310 let mtime = payload
3311 .mtime
3312 .ok_or_else(|| anyhow!("missing mtime"))?
3313 .into();
3314 this.update(&mut cx, |this, cx| {
3315 let buffer = this
3316 .opened_buffers
3317 .get(&payload.buffer_id)
3318 .and_then(|buffer| buffer.upgrade(cx));
3319 if let Some(buffer) = buffer {
3320 buffer.update(cx, |buffer, cx| {
3321 buffer.did_reload(version, mtime, cx);
3322 });
3323 }
3324 Ok(())
3325 })
3326 }
3327
3328 pub fn match_paths<'a>(
3329 &self,
3330 query: &'a str,
3331 include_ignored: bool,
3332 smart_case: bool,
3333 max_results: usize,
3334 cancel_flag: &'a AtomicBool,
3335 cx: &AppContext,
3336 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3337 let worktrees = self
3338 .worktrees(cx)
3339 .filter(|worktree| !worktree.read(cx).is_weak())
3340 .collect::<Vec<_>>();
3341 let include_root_name = worktrees.len() > 1;
3342 let candidate_sets = worktrees
3343 .into_iter()
3344 .map(|worktree| CandidateSet {
3345 snapshot: worktree.read(cx).snapshot(),
3346 include_ignored,
3347 include_root_name,
3348 })
3349 .collect::<Vec<_>>();
3350
3351 let background = cx.background().clone();
3352 async move {
3353 fuzzy::match_paths(
3354 candidate_sets.as_slice(),
3355 query,
3356 smart_case,
3357 max_results,
3358 cancel_flag,
3359 background,
3360 )
3361 .await
3362 }
3363 }
3364}
3365
3366impl WorktreeHandle {
3367 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3368 match self {
3369 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3370 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3371 }
3372 }
3373}
3374
3375impl OpenBuffer {
3376 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3377 match self {
3378 OpenBuffer::Strong(handle) => Some(handle.clone()),
3379 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3380 OpenBuffer::Loading(_) => None,
3381 }
3382 }
3383}
3384
3385struct CandidateSet {
3386 snapshot: Snapshot,
3387 include_ignored: bool,
3388 include_root_name: bool,
3389}
3390
3391impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3392 type Candidates = CandidateSetIter<'a>;
3393
3394 fn id(&self) -> usize {
3395 self.snapshot.id().to_usize()
3396 }
3397
3398 fn len(&self) -> usize {
3399 if self.include_ignored {
3400 self.snapshot.file_count()
3401 } else {
3402 self.snapshot.visible_file_count()
3403 }
3404 }
3405
3406 fn prefix(&self) -> Arc<str> {
3407 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3408 self.snapshot.root_name().into()
3409 } else if self.include_root_name {
3410 format!("{}/", self.snapshot.root_name()).into()
3411 } else {
3412 "".into()
3413 }
3414 }
3415
3416 fn candidates(&'a self, start: usize) -> Self::Candidates {
3417 CandidateSetIter {
3418 traversal: self.snapshot.files(self.include_ignored, start),
3419 }
3420 }
3421}
3422
3423struct CandidateSetIter<'a> {
3424 traversal: Traversal<'a>,
3425}
3426
3427impl<'a> Iterator for CandidateSetIter<'a> {
3428 type Item = PathMatchCandidate<'a>;
3429
3430 fn next(&mut self) -> Option<Self::Item> {
3431 self.traversal.next().map(|entry| {
3432 if let EntryKind::File(char_bag) = entry.kind {
3433 PathMatchCandidate {
3434 path: &entry.path,
3435 char_bag,
3436 }
3437 } else {
3438 unreachable!()
3439 }
3440 })
3441 }
3442}
3443
3444impl Entity for Project {
3445 type Event = Event;
3446
3447 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3448 match &self.client_state {
3449 ProjectClientState::Local { remote_id_rx, .. } => {
3450 if let Some(project_id) = *remote_id_rx.borrow() {
3451 self.client
3452 .send(proto::UnregisterProject { project_id })
3453 .log_err();
3454 }
3455 }
3456 ProjectClientState::Remote { remote_id, .. } => {
3457 self.client
3458 .send(proto::LeaveProject {
3459 project_id: *remote_id,
3460 })
3461 .log_err();
3462 }
3463 }
3464 }
3465
3466 fn app_will_quit(
3467 &mut self,
3468 _: &mut MutableAppContext,
3469 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3470 let shutdown_futures = self
3471 .language_servers
3472 .drain()
3473 .filter_map(|(_, server)| server.shutdown())
3474 .collect::<Vec<_>>();
3475 Some(
3476 async move {
3477 futures::future::join_all(shutdown_futures).await;
3478 }
3479 .boxed(),
3480 )
3481 }
3482}
3483
3484impl Collaborator {
3485 fn from_proto(
3486 message: proto::Collaborator,
3487 user_store: &ModelHandle<UserStore>,
3488 cx: &mut AsyncAppContext,
3489 ) -> impl Future<Output = Result<Self>> {
3490 let user = user_store.update(cx, |user_store, cx| {
3491 user_store.fetch_user(message.user_id, cx)
3492 });
3493
3494 async move {
3495 Ok(Self {
3496 peer_id: PeerId(message.peer_id),
3497 user: user.await?,
3498 replica_id: message.replica_id as ReplicaId,
3499 })
3500 }
3501 }
3502}
3503
3504impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3505 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3506 Self {
3507 worktree_id,
3508 path: path.as_ref().into(),
3509 }
3510 }
3511}
3512
3513impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3514 fn from(options: lsp::CreateFileOptions) -> Self {
3515 Self {
3516 overwrite: options.overwrite.unwrap_or(false),
3517 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3518 }
3519 }
3520}
3521
3522impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3523 fn from(options: lsp::RenameFileOptions) -> Self {
3524 Self {
3525 overwrite: options.overwrite.unwrap_or(false),
3526 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3527 }
3528 }
3529}
3530
3531impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3532 fn from(options: lsp::DeleteFileOptions) -> Self {
3533 Self {
3534 recursive: options.recursive.unwrap_or(false),
3535 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3536 }
3537 }
3538}
3539
3540fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3541 proto::Symbol {
3542 source_worktree_id: symbol.source_worktree_id.to_proto(),
3543 worktree_id: symbol.worktree_id.to_proto(),
3544 language_name: symbol.language_name.clone(),
3545 name: symbol.name.clone(),
3546 kind: unsafe { mem::transmute(symbol.kind) },
3547 path: symbol.path.to_string_lossy().to_string(),
3548 start: Some(proto::Point {
3549 row: symbol.range.start.row,
3550 column: symbol.range.start.column,
3551 }),
3552 end: Some(proto::Point {
3553 row: symbol.range.end.row,
3554 column: symbol.range.end.column,
3555 }),
3556 signature: symbol.signature.to_vec(),
3557 }
3558}
3559
3560fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3561 let mut path_components = path.components();
3562 let mut base_components = base.components();
3563 let mut components: Vec<Component> = Vec::new();
3564 loop {
3565 match (path_components.next(), base_components.next()) {
3566 (None, None) => break,
3567 (Some(a), None) => {
3568 components.push(a);
3569 components.extend(path_components.by_ref());
3570 break;
3571 }
3572 (None, _) => components.push(Component::ParentDir),
3573 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3574 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3575 (Some(a), Some(_)) => {
3576 components.push(Component::ParentDir);
3577 for _ in base_components {
3578 components.push(Component::ParentDir);
3579 }
3580 components.push(a);
3581 components.extend(path_components.by_ref());
3582 break;
3583 }
3584 }
3585 }
3586 components.iter().map(|c| c.as_os_str()).collect()
3587}
3588
3589#[cfg(test)]
3590mod tests {
3591 use super::{Event, *};
3592 use fs::RealFs;
3593 use futures::StreamExt;
3594 use gpui::test::subscribe;
3595 use language::{
3596 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3597 };
3598 use lsp::Url;
3599 use serde_json::json;
3600 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3601 use unindent::Unindent as _;
3602 use util::test::temp_tree;
3603 use worktree::WorktreeHandle as _;
3604
3605 #[gpui::test]
3606 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3607 let dir = temp_tree(json!({
3608 "root": {
3609 "apple": "",
3610 "banana": {
3611 "carrot": {
3612 "date": "",
3613 "endive": "",
3614 }
3615 },
3616 "fennel": {
3617 "grape": "",
3618 }
3619 }
3620 }));
3621
3622 let root_link_path = dir.path().join("root_link");
3623 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3624 unix::fs::symlink(
3625 &dir.path().join("root/fennel"),
3626 &dir.path().join("root/finnochio"),
3627 )
3628 .unwrap();
3629
3630 let project = Project::test(Arc::new(RealFs), cx);
3631
3632 let (tree, _) = project
3633 .update(cx, |project, cx| {
3634 project.find_or_create_local_worktree(&root_link_path, false, cx)
3635 })
3636 .await
3637 .unwrap();
3638
3639 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3640 .await;
3641 cx.read(|cx| {
3642 let tree = tree.read(cx);
3643 assert_eq!(tree.file_count(), 5);
3644 assert_eq!(
3645 tree.inode_for_path("fennel/grape"),
3646 tree.inode_for_path("finnochio/grape")
3647 );
3648 });
3649
3650 let cancel_flag = Default::default();
3651 let results = project
3652 .read_with(cx, |project, cx| {
3653 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3654 })
3655 .await;
3656 assert_eq!(
3657 results
3658 .into_iter()
3659 .map(|result| result.path)
3660 .collect::<Vec<Arc<Path>>>(),
3661 vec![
3662 PathBuf::from("banana/carrot/date").into(),
3663 PathBuf::from("banana/carrot/endive").into(),
3664 ]
3665 );
3666 }
3667
3668 #[gpui::test]
3669 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3670 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3671 let progress_token = language_server_config
3672 .disk_based_diagnostics_progress_token
3673 .clone()
3674 .unwrap();
3675
3676 let language = Arc::new(Language::new(
3677 LanguageConfig {
3678 name: "Rust".into(),
3679 path_suffixes: vec!["rs".to_string()],
3680 language_server: Some(language_server_config),
3681 ..Default::default()
3682 },
3683 Some(tree_sitter_rust::language()),
3684 ));
3685
3686 let fs = FakeFs::new(cx.background());
3687 fs.insert_tree(
3688 "/dir",
3689 json!({
3690 "a.rs": "fn a() { A }",
3691 "b.rs": "const y: i32 = 1",
3692 }),
3693 )
3694 .await;
3695
3696 let project = Project::test(fs, cx);
3697 project.update(cx, |project, _| {
3698 Arc::get_mut(&mut project.languages).unwrap().add(language);
3699 });
3700
3701 let (tree, _) = project
3702 .update(cx, |project, cx| {
3703 project.find_or_create_local_worktree("/dir", false, cx)
3704 })
3705 .await
3706 .unwrap();
3707 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3708
3709 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3710 .await;
3711
3712 // Cause worktree to start the fake language server
3713 let _buffer = project
3714 .update(cx, |project, cx| {
3715 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3716 })
3717 .await
3718 .unwrap();
3719
3720 let mut events = subscribe(&project, cx);
3721
3722 let mut fake_server = fake_servers.next().await.unwrap();
3723 fake_server.start_progress(&progress_token).await;
3724 assert_eq!(
3725 events.next().await.unwrap(),
3726 Event::DiskBasedDiagnosticsStarted
3727 );
3728
3729 fake_server.start_progress(&progress_token).await;
3730 fake_server.end_progress(&progress_token).await;
3731 fake_server.start_progress(&progress_token).await;
3732
3733 fake_server
3734 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3735 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3736 version: None,
3737 diagnostics: vec![lsp::Diagnostic {
3738 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3739 severity: Some(lsp::DiagnosticSeverity::ERROR),
3740 message: "undefined variable 'A'".to_string(),
3741 ..Default::default()
3742 }],
3743 })
3744 .await;
3745 assert_eq!(
3746 events.next().await.unwrap(),
3747 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3748 );
3749
3750 fake_server.end_progress(&progress_token).await;
3751 fake_server.end_progress(&progress_token).await;
3752 assert_eq!(
3753 events.next().await.unwrap(),
3754 Event::DiskBasedDiagnosticsUpdated
3755 );
3756 assert_eq!(
3757 events.next().await.unwrap(),
3758 Event::DiskBasedDiagnosticsFinished
3759 );
3760
3761 let buffer = project
3762 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3763 .await
3764 .unwrap();
3765
3766 buffer.read_with(cx, |buffer, _| {
3767 let snapshot = buffer.snapshot();
3768 let diagnostics = snapshot
3769 .diagnostics_in_range::<_, Point>(0..buffer.len())
3770 .collect::<Vec<_>>();
3771 assert_eq!(
3772 diagnostics,
3773 &[DiagnosticEntry {
3774 range: Point::new(0, 9)..Point::new(0, 10),
3775 diagnostic: Diagnostic {
3776 severity: lsp::DiagnosticSeverity::ERROR,
3777 message: "undefined variable 'A'".to_string(),
3778 group_id: 0,
3779 is_primary: true,
3780 ..Default::default()
3781 }
3782 }]
3783 )
3784 });
3785 }
3786
3787 #[gpui::test]
3788 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3789 let dir = temp_tree(json!({
3790 "root": {
3791 "dir1": {},
3792 "dir2": {
3793 "dir3": {}
3794 }
3795 }
3796 }));
3797
3798 let project = Project::test(Arc::new(RealFs), cx);
3799 let (tree, _) = project
3800 .update(cx, |project, cx| {
3801 project.find_or_create_local_worktree(&dir.path(), false, cx)
3802 })
3803 .await
3804 .unwrap();
3805
3806 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3807 .await;
3808
3809 let cancel_flag = Default::default();
3810 let results = project
3811 .read_with(cx, |project, cx| {
3812 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3813 })
3814 .await;
3815
3816 assert!(results.is_empty());
3817 }
3818
3819 #[gpui::test]
3820 async fn test_definition(cx: &mut gpui::TestAppContext) {
3821 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3822 let language = Arc::new(Language::new(
3823 LanguageConfig {
3824 name: "Rust".into(),
3825 path_suffixes: vec!["rs".to_string()],
3826 language_server: Some(language_server_config),
3827 ..Default::default()
3828 },
3829 Some(tree_sitter_rust::language()),
3830 ));
3831
3832 let fs = FakeFs::new(cx.background());
3833 fs.insert_tree(
3834 "/dir",
3835 json!({
3836 "a.rs": "const fn a() { A }",
3837 "b.rs": "const y: i32 = crate::a()",
3838 }),
3839 )
3840 .await;
3841
3842 let project = Project::test(fs, cx);
3843 project.update(cx, |project, _| {
3844 Arc::get_mut(&mut project.languages).unwrap().add(language);
3845 });
3846
3847 let (tree, _) = project
3848 .update(cx, |project, cx| {
3849 project.find_or_create_local_worktree("/dir/b.rs", false, cx)
3850 })
3851 .await
3852 .unwrap();
3853 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3854 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3855 .await;
3856
3857 let buffer = project
3858 .update(cx, |project, cx| {
3859 project.open_buffer(
3860 ProjectPath {
3861 worktree_id,
3862 path: Path::new("").into(),
3863 },
3864 cx,
3865 )
3866 })
3867 .await
3868 .unwrap();
3869
3870 let mut fake_server = fake_servers.next().await.unwrap();
3871 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3872 let params = params.text_document_position_params;
3873 assert_eq!(
3874 params.text_document.uri.to_file_path().unwrap(),
3875 Path::new("/dir/b.rs"),
3876 );
3877 assert_eq!(params.position, lsp::Position::new(0, 22));
3878
3879 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3880 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3881 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3882 )))
3883 });
3884
3885 let mut definitions = project
3886 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
3887 .await
3888 .unwrap();
3889
3890 assert_eq!(definitions.len(), 1);
3891 let definition = definitions.pop().unwrap();
3892 cx.update(|cx| {
3893 let target_buffer = definition.buffer.read(cx);
3894 assert_eq!(
3895 target_buffer
3896 .file()
3897 .unwrap()
3898 .as_local()
3899 .unwrap()
3900 .abs_path(cx),
3901 Path::new("/dir/a.rs"),
3902 );
3903 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
3904 assert_eq!(
3905 list_worktrees(&project, cx),
3906 [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)]
3907 );
3908
3909 drop(definition);
3910 });
3911 cx.read(|cx| {
3912 assert_eq!(
3913 list_worktrees(&project, cx),
3914 [("/dir/b.rs".as_ref(), false)]
3915 );
3916 });
3917
3918 fn list_worktrees<'a>(
3919 project: &'a ModelHandle<Project>,
3920 cx: &'a AppContext,
3921 ) -> Vec<(&'a Path, bool)> {
3922 project
3923 .read(cx)
3924 .worktrees(cx)
3925 .map(|worktree| {
3926 let worktree = worktree.read(cx);
3927 (
3928 worktree.as_local().unwrap().abs_path().as_ref(),
3929 worktree.is_weak(),
3930 )
3931 })
3932 .collect::<Vec<_>>()
3933 }
3934 }
3935
3936 #[gpui::test]
3937 async fn test_save_file(cx: &mut gpui::TestAppContext) {
3938 let fs = FakeFs::new(cx.background());
3939 fs.insert_tree(
3940 "/dir",
3941 json!({
3942 "file1": "the old contents",
3943 }),
3944 )
3945 .await;
3946
3947 let project = Project::test(fs.clone(), cx);
3948 let worktree_id = project
3949 .update(cx, |p, cx| {
3950 p.find_or_create_local_worktree("/dir", false, cx)
3951 })
3952 .await
3953 .unwrap()
3954 .0
3955 .read_with(cx, |tree, _| tree.id());
3956
3957 let buffer = project
3958 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
3959 .await
3960 .unwrap();
3961 buffer
3962 .update(cx, |buffer, cx| {
3963 assert_eq!(buffer.text(), "the old contents");
3964 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
3965 buffer.save(cx)
3966 })
3967 .await
3968 .unwrap();
3969
3970 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3971 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
3972 }
3973
3974 #[gpui::test]
3975 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3976 let fs = FakeFs::new(cx.background());
3977 fs.insert_tree(
3978 "/dir",
3979 json!({
3980 "file1": "the old contents",
3981 }),
3982 )
3983 .await;
3984
3985 let project = Project::test(fs.clone(), cx);
3986 let worktree_id = project
3987 .update(cx, |p, cx| {
3988 p.find_or_create_local_worktree("/dir/file1", false, cx)
3989 })
3990 .await
3991 .unwrap()
3992 .0
3993 .read_with(cx, |tree, _| tree.id());
3994
3995 let buffer = project
3996 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
3997 .await
3998 .unwrap();
3999 buffer
4000 .update(cx, |buffer, cx| {
4001 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4002 buffer.save(cx)
4003 })
4004 .await
4005 .unwrap();
4006
4007 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4008 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4009 }
4010
4011 #[gpui::test(retries = 5)]
4012 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4013 let dir = temp_tree(json!({
4014 "a": {
4015 "file1": "",
4016 "file2": "",
4017 "file3": "",
4018 },
4019 "b": {
4020 "c": {
4021 "file4": "",
4022 "file5": "",
4023 }
4024 }
4025 }));
4026
4027 let project = Project::test(Arc::new(RealFs), cx);
4028 let rpc = project.read_with(cx, |p, _| p.client.clone());
4029
4030 let (tree, _) = project
4031 .update(cx, |p, cx| {
4032 p.find_or_create_local_worktree(dir.path(), false, cx)
4033 })
4034 .await
4035 .unwrap();
4036 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4037
4038 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4039 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4040 async move { buffer.await.unwrap() }
4041 };
4042 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4043 tree.read_with(cx, |tree, _| {
4044 tree.entry_for_path(path)
4045 .expect(&format!("no entry for path {}", path))
4046 .id
4047 })
4048 };
4049
4050 let buffer2 = buffer_for_path("a/file2", cx).await;
4051 let buffer3 = buffer_for_path("a/file3", cx).await;
4052 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4053 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4054
4055 let file2_id = id_for_path("a/file2", &cx);
4056 let file3_id = id_for_path("a/file3", &cx);
4057 let file4_id = id_for_path("b/c/file4", &cx);
4058
4059 // Wait for the initial scan.
4060 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4061 .await;
4062
4063 // Create a remote copy of this worktree.
4064 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4065 let (remote, load_task) = cx.update(|cx| {
4066 Worktree::remote(
4067 1,
4068 1,
4069 initial_snapshot.to_proto(&Default::default(), Default::default()),
4070 rpc.clone(),
4071 cx,
4072 )
4073 });
4074 load_task.await;
4075
4076 cx.read(|cx| {
4077 assert!(!buffer2.read(cx).is_dirty());
4078 assert!(!buffer3.read(cx).is_dirty());
4079 assert!(!buffer4.read(cx).is_dirty());
4080 assert!(!buffer5.read(cx).is_dirty());
4081 });
4082
4083 // Rename and delete files and directories.
4084 tree.flush_fs_events(&cx).await;
4085 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4086 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4087 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4088 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4089 tree.flush_fs_events(&cx).await;
4090
4091 let expected_paths = vec![
4092 "a",
4093 "a/file1",
4094 "a/file2.new",
4095 "b",
4096 "d",
4097 "d/file3",
4098 "d/file4",
4099 ];
4100
4101 cx.read(|app| {
4102 assert_eq!(
4103 tree.read(app)
4104 .paths()
4105 .map(|p| p.to_str().unwrap())
4106 .collect::<Vec<_>>(),
4107 expected_paths
4108 );
4109
4110 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4111 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4112 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4113
4114 assert_eq!(
4115 buffer2.read(app).file().unwrap().path().as_ref(),
4116 Path::new("a/file2.new")
4117 );
4118 assert_eq!(
4119 buffer3.read(app).file().unwrap().path().as_ref(),
4120 Path::new("d/file3")
4121 );
4122 assert_eq!(
4123 buffer4.read(app).file().unwrap().path().as_ref(),
4124 Path::new("d/file4")
4125 );
4126 assert_eq!(
4127 buffer5.read(app).file().unwrap().path().as_ref(),
4128 Path::new("b/c/file5")
4129 );
4130
4131 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4132 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4133 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4134 assert!(buffer5.read(app).file().unwrap().is_deleted());
4135 });
4136
4137 // Update the remote worktree. Check that it becomes consistent with the
4138 // local worktree.
4139 remote.update(cx, |remote, cx| {
4140 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4141 &initial_snapshot,
4142 1,
4143 1,
4144 true,
4145 );
4146 remote
4147 .as_remote_mut()
4148 .unwrap()
4149 .snapshot
4150 .apply_remote_update(update_message)
4151 .unwrap();
4152
4153 assert_eq!(
4154 remote
4155 .paths()
4156 .map(|p| p.to_str().unwrap())
4157 .collect::<Vec<_>>(),
4158 expected_paths
4159 );
4160 });
4161 }
4162
4163 #[gpui::test]
4164 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4165 let fs = FakeFs::new(cx.background());
4166 fs.insert_tree(
4167 "/the-dir",
4168 json!({
4169 "a.txt": "a-contents",
4170 "b.txt": "b-contents",
4171 }),
4172 )
4173 .await;
4174
4175 let project = Project::test(fs.clone(), cx);
4176 let worktree_id = project
4177 .update(cx, |p, cx| {
4178 p.find_or_create_local_worktree("/the-dir", false, cx)
4179 })
4180 .await
4181 .unwrap()
4182 .0
4183 .read_with(cx, |tree, _| tree.id());
4184
4185 // Spawn multiple tasks to open paths, repeating some paths.
4186 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4187 (
4188 p.open_buffer((worktree_id, "a.txt"), cx),
4189 p.open_buffer((worktree_id, "b.txt"), cx),
4190 p.open_buffer((worktree_id, "a.txt"), cx),
4191 )
4192 });
4193
4194 let buffer_a_1 = buffer_a_1.await.unwrap();
4195 let buffer_a_2 = buffer_a_2.await.unwrap();
4196 let buffer_b = buffer_b.await.unwrap();
4197 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4198 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4199
4200 // There is only one buffer per path.
4201 let buffer_a_id = buffer_a_1.id();
4202 assert_eq!(buffer_a_2.id(), buffer_a_id);
4203
4204 // Open the same path again while it is still open.
4205 drop(buffer_a_1);
4206 let buffer_a_3 = project
4207 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4208 .await
4209 .unwrap();
4210
4211 // There's still only one buffer per path.
4212 assert_eq!(buffer_a_3.id(), buffer_a_id);
4213 }
4214
4215 #[gpui::test]
4216 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4217 use std::fs;
4218
4219 let dir = temp_tree(json!({
4220 "file1": "abc",
4221 "file2": "def",
4222 "file3": "ghi",
4223 }));
4224
4225 let project = Project::test(Arc::new(RealFs), cx);
4226 let (worktree, _) = project
4227 .update(cx, |p, cx| {
4228 p.find_or_create_local_worktree(dir.path(), false, cx)
4229 })
4230 .await
4231 .unwrap();
4232 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4233
4234 worktree.flush_fs_events(&cx).await;
4235 worktree
4236 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4237 .await;
4238
4239 let buffer1 = project
4240 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4241 .await
4242 .unwrap();
4243 let events = Rc::new(RefCell::new(Vec::new()));
4244
4245 // initially, the buffer isn't dirty.
4246 buffer1.update(cx, |buffer, cx| {
4247 cx.subscribe(&buffer1, {
4248 let events = events.clone();
4249 move |_, _, event, _| events.borrow_mut().push(event.clone())
4250 })
4251 .detach();
4252
4253 assert!(!buffer.is_dirty());
4254 assert!(events.borrow().is_empty());
4255
4256 buffer.edit(vec![1..2], "", cx);
4257 });
4258
4259 // after the first edit, the buffer is dirty, and emits a dirtied event.
4260 buffer1.update(cx, |buffer, cx| {
4261 assert!(buffer.text() == "ac");
4262 assert!(buffer.is_dirty());
4263 assert_eq!(
4264 *events.borrow(),
4265 &[language::Event::Edited, language::Event::Dirtied]
4266 );
4267 events.borrow_mut().clear();
4268 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4269 });
4270
4271 // after saving, the buffer is not dirty, and emits a saved event.
4272 buffer1.update(cx, |buffer, cx| {
4273 assert!(!buffer.is_dirty());
4274 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4275 events.borrow_mut().clear();
4276
4277 buffer.edit(vec![1..1], "B", cx);
4278 buffer.edit(vec![2..2], "D", cx);
4279 });
4280
4281 // after editing again, the buffer is dirty, and emits another dirty event.
4282 buffer1.update(cx, |buffer, cx| {
4283 assert!(buffer.text() == "aBDc");
4284 assert!(buffer.is_dirty());
4285 assert_eq!(
4286 *events.borrow(),
4287 &[
4288 language::Event::Edited,
4289 language::Event::Dirtied,
4290 language::Event::Edited,
4291 ],
4292 );
4293 events.borrow_mut().clear();
4294
4295 // TODO - currently, after restoring the buffer to its
4296 // previously-saved state, the is still considered dirty.
4297 buffer.edit([1..3], "", cx);
4298 assert!(buffer.text() == "ac");
4299 assert!(buffer.is_dirty());
4300 });
4301
4302 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4303
4304 // When a file is deleted, the buffer is considered dirty.
4305 let events = Rc::new(RefCell::new(Vec::new()));
4306 let buffer2 = project
4307 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4308 .await
4309 .unwrap();
4310 buffer2.update(cx, |_, cx| {
4311 cx.subscribe(&buffer2, {
4312 let events = events.clone();
4313 move |_, _, event, _| events.borrow_mut().push(event.clone())
4314 })
4315 .detach();
4316 });
4317
4318 fs::remove_file(dir.path().join("file2")).unwrap();
4319 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4320 assert_eq!(
4321 *events.borrow(),
4322 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4323 );
4324
4325 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4326 let events = Rc::new(RefCell::new(Vec::new()));
4327 let buffer3 = project
4328 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4329 .await
4330 .unwrap();
4331 buffer3.update(cx, |_, cx| {
4332 cx.subscribe(&buffer3, {
4333 let events = events.clone();
4334 move |_, _, event, _| events.borrow_mut().push(event.clone())
4335 })
4336 .detach();
4337 });
4338
4339 worktree.flush_fs_events(&cx).await;
4340 buffer3.update(cx, |buffer, cx| {
4341 buffer.edit(Some(0..0), "x", cx);
4342 });
4343 events.borrow_mut().clear();
4344 fs::remove_file(dir.path().join("file3")).unwrap();
4345 buffer3
4346 .condition(&cx, |_, _| !events.borrow().is_empty())
4347 .await;
4348 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4349 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4350 }
4351
4352 #[gpui::test]
4353 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4354 use std::fs;
4355
4356 let initial_contents = "aaa\nbbbbb\nc\n";
4357 let dir = temp_tree(json!({ "the-file": initial_contents }));
4358
4359 let project = Project::test(Arc::new(RealFs), cx);
4360 let (worktree, _) = project
4361 .update(cx, |p, cx| {
4362 p.find_or_create_local_worktree(dir.path(), false, cx)
4363 })
4364 .await
4365 .unwrap();
4366 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4367
4368 worktree
4369 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4370 .await;
4371
4372 let abs_path = dir.path().join("the-file");
4373 let buffer = project
4374 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4375 .await
4376 .unwrap();
4377
4378 // TODO
4379 // Add a cursor on each row.
4380 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4381 // assert!(!buffer.is_dirty());
4382 // buffer.add_selection_set(
4383 // &(0..3)
4384 // .map(|row| Selection {
4385 // id: row as usize,
4386 // start: Point::new(row, 1),
4387 // end: Point::new(row, 1),
4388 // reversed: false,
4389 // goal: SelectionGoal::None,
4390 // })
4391 // .collect::<Vec<_>>(),
4392 // cx,
4393 // )
4394 // });
4395
4396 // Change the file on disk, adding two new lines of text, and removing
4397 // one line.
4398 buffer.read_with(cx, |buffer, _| {
4399 assert!(!buffer.is_dirty());
4400 assert!(!buffer.has_conflict());
4401 });
4402 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4403 fs::write(&abs_path, new_contents).unwrap();
4404
4405 // Because the buffer was not modified, it is reloaded from disk. Its
4406 // contents are edited according to the diff between the old and new
4407 // file contents.
4408 buffer
4409 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4410 .await;
4411
4412 buffer.update(cx, |buffer, _| {
4413 assert_eq!(buffer.text(), new_contents);
4414 assert!(!buffer.is_dirty());
4415 assert!(!buffer.has_conflict());
4416
4417 // TODO
4418 // let cursor_positions = buffer
4419 // .selection_set(selection_set_id)
4420 // .unwrap()
4421 // .selections::<Point>(&*buffer)
4422 // .map(|selection| {
4423 // assert_eq!(selection.start, selection.end);
4424 // selection.start
4425 // })
4426 // .collect::<Vec<_>>();
4427 // assert_eq!(
4428 // cursor_positions,
4429 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4430 // );
4431 });
4432
4433 // Modify the buffer
4434 buffer.update(cx, |buffer, cx| {
4435 buffer.edit(vec![0..0], " ", cx);
4436 assert!(buffer.is_dirty());
4437 assert!(!buffer.has_conflict());
4438 });
4439
4440 // Change the file on disk again, adding blank lines to the beginning.
4441 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4442
4443 // Because the buffer is modified, it doesn't reload from disk, but is
4444 // marked as having a conflict.
4445 buffer
4446 .condition(&cx, |buffer, _| buffer.has_conflict())
4447 .await;
4448 }
4449
4450 #[gpui::test]
4451 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4452 let fs = FakeFs::new(cx.background());
4453 fs.insert_tree(
4454 "/the-dir",
4455 json!({
4456 "a.rs": "
4457 fn foo(mut v: Vec<usize>) {
4458 for x in &v {
4459 v.push(1);
4460 }
4461 }
4462 "
4463 .unindent(),
4464 }),
4465 )
4466 .await;
4467
4468 let project = Project::test(fs.clone(), cx);
4469 let (worktree, _) = project
4470 .update(cx, |p, cx| {
4471 p.find_or_create_local_worktree("/the-dir", false, cx)
4472 })
4473 .await
4474 .unwrap();
4475 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4476
4477 let buffer = project
4478 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4479 .await
4480 .unwrap();
4481
4482 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4483 let message = lsp::PublishDiagnosticsParams {
4484 uri: buffer_uri.clone(),
4485 diagnostics: vec![
4486 lsp::Diagnostic {
4487 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4488 severity: Some(DiagnosticSeverity::WARNING),
4489 message: "error 1".to_string(),
4490 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4491 location: lsp::Location {
4492 uri: buffer_uri.clone(),
4493 range: lsp::Range::new(
4494 lsp::Position::new(1, 8),
4495 lsp::Position::new(1, 9),
4496 ),
4497 },
4498 message: "error 1 hint 1".to_string(),
4499 }]),
4500 ..Default::default()
4501 },
4502 lsp::Diagnostic {
4503 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4504 severity: Some(DiagnosticSeverity::HINT),
4505 message: "error 1 hint 1".to_string(),
4506 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4507 location: lsp::Location {
4508 uri: buffer_uri.clone(),
4509 range: lsp::Range::new(
4510 lsp::Position::new(1, 8),
4511 lsp::Position::new(1, 9),
4512 ),
4513 },
4514 message: "original diagnostic".to_string(),
4515 }]),
4516 ..Default::default()
4517 },
4518 lsp::Diagnostic {
4519 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4520 severity: Some(DiagnosticSeverity::ERROR),
4521 message: "error 2".to_string(),
4522 related_information: Some(vec![
4523 lsp::DiagnosticRelatedInformation {
4524 location: lsp::Location {
4525 uri: buffer_uri.clone(),
4526 range: lsp::Range::new(
4527 lsp::Position::new(1, 13),
4528 lsp::Position::new(1, 15),
4529 ),
4530 },
4531 message: "error 2 hint 1".to_string(),
4532 },
4533 lsp::DiagnosticRelatedInformation {
4534 location: lsp::Location {
4535 uri: buffer_uri.clone(),
4536 range: lsp::Range::new(
4537 lsp::Position::new(1, 13),
4538 lsp::Position::new(1, 15),
4539 ),
4540 },
4541 message: "error 2 hint 2".to_string(),
4542 },
4543 ]),
4544 ..Default::default()
4545 },
4546 lsp::Diagnostic {
4547 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4548 severity: Some(DiagnosticSeverity::HINT),
4549 message: "error 2 hint 1".to_string(),
4550 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4551 location: lsp::Location {
4552 uri: buffer_uri.clone(),
4553 range: lsp::Range::new(
4554 lsp::Position::new(2, 8),
4555 lsp::Position::new(2, 17),
4556 ),
4557 },
4558 message: "original diagnostic".to_string(),
4559 }]),
4560 ..Default::default()
4561 },
4562 lsp::Diagnostic {
4563 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4564 severity: Some(DiagnosticSeverity::HINT),
4565 message: "error 2 hint 2".to_string(),
4566 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4567 location: lsp::Location {
4568 uri: buffer_uri.clone(),
4569 range: lsp::Range::new(
4570 lsp::Position::new(2, 8),
4571 lsp::Position::new(2, 17),
4572 ),
4573 },
4574 message: "original diagnostic".to_string(),
4575 }]),
4576 ..Default::default()
4577 },
4578 ],
4579 version: None,
4580 };
4581
4582 project
4583 .update(cx, |p, cx| {
4584 p.update_diagnostics(message, &Default::default(), cx)
4585 })
4586 .unwrap();
4587 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4588
4589 assert_eq!(
4590 buffer
4591 .diagnostics_in_range::<_, Point>(0..buffer.len())
4592 .collect::<Vec<_>>(),
4593 &[
4594 DiagnosticEntry {
4595 range: Point::new(1, 8)..Point::new(1, 9),
4596 diagnostic: Diagnostic {
4597 severity: DiagnosticSeverity::WARNING,
4598 message: "error 1".to_string(),
4599 group_id: 0,
4600 is_primary: true,
4601 ..Default::default()
4602 }
4603 },
4604 DiagnosticEntry {
4605 range: Point::new(1, 8)..Point::new(1, 9),
4606 diagnostic: Diagnostic {
4607 severity: DiagnosticSeverity::HINT,
4608 message: "error 1 hint 1".to_string(),
4609 group_id: 0,
4610 is_primary: false,
4611 ..Default::default()
4612 }
4613 },
4614 DiagnosticEntry {
4615 range: Point::new(1, 13)..Point::new(1, 15),
4616 diagnostic: Diagnostic {
4617 severity: DiagnosticSeverity::HINT,
4618 message: "error 2 hint 1".to_string(),
4619 group_id: 1,
4620 is_primary: false,
4621 ..Default::default()
4622 }
4623 },
4624 DiagnosticEntry {
4625 range: Point::new(1, 13)..Point::new(1, 15),
4626 diagnostic: Diagnostic {
4627 severity: DiagnosticSeverity::HINT,
4628 message: "error 2 hint 2".to_string(),
4629 group_id: 1,
4630 is_primary: false,
4631 ..Default::default()
4632 }
4633 },
4634 DiagnosticEntry {
4635 range: Point::new(2, 8)..Point::new(2, 17),
4636 diagnostic: Diagnostic {
4637 severity: DiagnosticSeverity::ERROR,
4638 message: "error 2".to_string(),
4639 group_id: 1,
4640 is_primary: true,
4641 ..Default::default()
4642 }
4643 }
4644 ]
4645 );
4646
4647 assert_eq!(
4648 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4649 &[
4650 DiagnosticEntry {
4651 range: Point::new(1, 8)..Point::new(1, 9),
4652 diagnostic: Diagnostic {
4653 severity: DiagnosticSeverity::WARNING,
4654 message: "error 1".to_string(),
4655 group_id: 0,
4656 is_primary: true,
4657 ..Default::default()
4658 }
4659 },
4660 DiagnosticEntry {
4661 range: Point::new(1, 8)..Point::new(1, 9),
4662 diagnostic: Diagnostic {
4663 severity: DiagnosticSeverity::HINT,
4664 message: "error 1 hint 1".to_string(),
4665 group_id: 0,
4666 is_primary: false,
4667 ..Default::default()
4668 }
4669 },
4670 ]
4671 );
4672 assert_eq!(
4673 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4674 &[
4675 DiagnosticEntry {
4676 range: Point::new(1, 13)..Point::new(1, 15),
4677 diagnostic: Diagnostic {
4678 severity: DiagnosticSeverity::HINT,
4679 message: "error 2 hint 1".to_string(),
4680 group_id: 1,
4681 is_primary: false,
4682 ..Default::default()
4683 }
4684 },
4685 DiagnosticEntry {
4686 range: Point::new(1, 13)..Point::new(1, 15),
4687 diagnostic: Diagnostic {
4688 severity: DiagnosticSeverity::HINT,
4689 message: "error 2 hint 2".to_string(),
4690 group_id: 1,
4691 is_primary: false,
4692 ..Default::default()
4693 }
4694 },
4695 DiagnosticEntry {
4696 range: Point::new(2, 8)..Point::new(2, 17),
4697 diagnostic: Diagnostic {
4698 severity: DiagnosticSeverity::ERROR,
4699 message: "error 2".to_string(),
4700 group_id: 1,
4701 is_primary: true,
4702 ..Default::default()
4703 }
4704 }
4705 ]
4706 );
4707 }
4708
4709 #[gpui::test]
4710 async fn test_rename(cx: &mut gpui::TestAppContext) {
4711 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4712 let language = Arc::new(Language::new(
4713 LanguageConfig {
4714 name: "Rust".into(),
4715 path_suffixes: vec!["rs".to_string()],
4716 language_server: Some(language_server_config),
4717 ..Default::default()
4718 },
4719 Some(tree_sitter_rust::language()),
4720 ));
4721
4722 let fs = FakeFs::new(cx.background());
4723 fs.insert_tree(
4724 "/dir",
4725 json!({
4726 "one.rs": "const ONE: usize = 1;",
4727 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4728 }),
4729 )
4730 .await;
4731
4732 let project = Project::test(fs.clone(), cx);
4733 project.update(cx, |project, _| {
4734 Arc::get_mut(&mut project.languages).unwrap().add(language);
4735 });
4736
4737 let (tree, _) = project
4738 .update(cx, |project, cx| {
4739 project.find_or_create_local_worktree("/dir", false, cx)
4740 })
4741 .await
4742 .unwrap();
4743 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4744 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4745 .await;
4746
4747 let buffer = project
4748 .update(cx, |project, cx| {
4749 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4750 })
4751 .await
4752 .unwrap();
4753
4754 let mut fake_server = fake_servers.next().await.unwrap();
4755
4756 let response = project.update(cx, |project, cx| {
4757 project.prepare_rename(buffer.clone(), 7, cx)
4758 });
4759 fake_server
4760 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4761 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4762 assert_eq!(params.position, lsp::Position::new(0, 7));
4763 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4764 lsp::Position::new(0, 6),
4765 lsp::Position::new(0, 9),
4766 )))
4767 })
4768 .next()
4769 .await
4770 .unwrap();
4771 let range = response.await.unwrap().unwrap();
4772 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4773 assert_eq!(range, 6..9);
4774
4775 let response = project.update(cx, |project, cx| {
4776 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4777 });
4778 fake_server
4779 .handle_request::<lsp::request::Rename, _>(|params, _| {
4780 assert_eq!(
4781 params.text_document_position.text_document.uri.as_str(),
4782 "file:///dir/one.rs"
4783 );
4784 assert_eq!(
4785 params.text_document_position.position,
4786 lsp::Position::new(0, 7)
4787 );
4788 assert_eq!(params.new_name, "THREE");
4789 Some(lsp::WorkspaceEdit {
4790 changes: Some(
4791 [
4792 (
4793 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4794 vec![lsp::TextEdit::new(
4795 lsp::Range::new(
4796 lsp::Position::new(0, 6),
4797 lsp::Position::new(0, 9),
4798 ),
4799 "THREE".to_string(),
4800 )],
4801 ),
4802 (
4803 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4804 vec![
4805 lsp::TextEdit::new(
4806 lsp::Range::new(
4807 lsp::Position::new(0, 24),
4808 lsp::Position::new(0, 27),
4809 ),
4810 "THREE".to_string(),
4811 ),
4812 lsp::TextEdit::new(
4813 lsp::Range::new(
4814 lsp::Position::new(0, 35),
4815 lsp::Position::new(0, 38),
4816 ),
4817 "THREE".to_string(),
4818 ),
4819 ],
4820 ),
4821 ]
4822 .into_iter()
4823 .collect(),
4824 ),
4825 ..Default::default()
4826 })
4827 })
4828 .next()
4829 .await
4830 .unwrap();
4831 let mut transaction = response.await.unwrap().0;
4832 assert_eq!(transaction.len(), 2);
4833 assert_eq!(
4834 transaction
4835 .remove_entry(&buffer)
4836 .unwrap()
4837 .0
4838 .read_with(cx, |buffer, _| buffer.text()),
4839 "const THREE: usize = 1;"
4840 );
4841 assert_eq!(
4842 transaction
4843 .into_keys()
4844 .next()
4845 .unwrap()
4846 .read_with(cx, |buffer, _| buffer.text()),
4847 "const TWO: usize = one::THREE + one::THREE;"
4848 );
4849 }
4850
4851 #[gpui::test]
4852 async fn test_search(cx: &mut gpui::TestAppContext) {
4853 let fs = FakeFs::new(cx.background());
4854 fs.insert_tree(
4855 "/dir",
4856 json!({
4857 "one.rs": "const ONE: usize = 1;",
4858 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4859 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4860 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4861 }),
4862 )
4863 .await;
4864 let project = Project::test(fs.clone(), cx);
4865 let (tree, _) = project
4866 .update(cx, |project, cx| {
4867 project.find_or_create_local_worktree("/dir", false, cx)
4868 })
4869 .await
4870 .unwrap();
4871 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4872 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4873 .await;
4874
4875 assert_eq!(
4876 search(&project, SearchQuery::text("TWO", false, true), cx)
4877 .await
4878 .unwrap(),
4879 HashMap::from_iter([
4880 ("two.rs".to_string(), vec![6..9]),
4881 ("three.rs".to_string(), vec![37..40])
4882 ])
4883 );
4884
4885 let buffer_4 = project
4886 .update(cx, |project, cx| {
4887 project.open_buffer((worktree_id, "four.rs"), cx)
4888 })
4889 .await
4890 .unwrap();
4891 buffer_4.update(cx, |buffer, cx| {
4892 buffer.edit([20..28, 31..43], "two::TWO", cx);
4893 });
4894
4895 assert_eq!(
4896 search(&project, SearchQuery::text("TWO", false, true), cx)
4897 .await
4898 .unwrap(),
4899 HashMap::from_iter([
4900 ("two.rs".to_string(), vec![6..9]),
4901 ("three.rs".to_string(), vec![37..40]),
4902 ("four.rs".to_string(), vec![25..28, 36..39])
4903 ])
4904 );
4905
4906 async fn search(
4907 project: &ModelHandle<Project>,
4908 query: SearchQuery,
4909 cx: &mut gpui::TestAppContext,
4910 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
4911 let results = project
4912 .update(cx, |project, cx| project.search(query, cx))
4913 .await?;
4914
4915 Ok(results
4916 .into_iter()
4917 .map(|(buffer, ranges)| {
4918 buffer.read_with(cx, |buffer, _| {
4919 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4920 let ranges = ranges
4921 .into_iter()
4922 .map(|range| range.to_offset(buffer))
4923 .collect::<Vec<_>>();
4924 (path, ranges)
4925 })
4926 })
4927 .collect())
4928 }
4929 }
4930}