1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use text::network::Network;
15use unindent::Unindent as _;
16use util::post_inc;
17
18#[cfg(test)]
19#[ctor::ctor]
20fn init_logger() {
21 if std::env::var("RUST_LOG").is_ok() {
22 env_logger::init();
23 }
24}
25
26#[gpui::test]
27fn test_select_language() {
28 let registry = LanguageRegistry::test();
29 registry.add(Arc::new(Language::new(
30 LanguageConfig {
31 name: "Rust".into(),
32 path_suffixes: vec!["rs".to_string()],
33 ..Default::default()
34 },
35 Some(tree_sitter_rust::language()),
36 )));
37 registry.add(Arc::new(Language::new(
38 LanguageConfig {
39 name: "Make".into(),
40 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
41 ..Default::default()
42 },
43 Some(tree_sitter_rust::language()),
44 )));
45
46 // matching file extension
47 assert_eq!(
48 registry.select_language("zed/lib.rs").map(|l| l.name()),
49 Some("Rust".into())
50 );
51 assert_eq!(
52 registry.select_language("zed/lib.mk").map(|l| l.name()),
53 Some("Make".into())
54 );
55
56 // matching filename
57 assert_eq!(
58 registry.select_language("zed/Makefile").map(|l| l.name()),
59 Some("Make".into())
60 );
61
62 // matching suffix that is not the full file extension or filename
63 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
64 assert_eq!(
65 registry.select_language("zed/a.cars").map(|l| l.name()),
66 None
67 );
68 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
69}
70
71#[gpui::test]
72fn test_edit_events(cx: &mut gpui::MutableAppContext) {
73 let mut now = Instant::now();
74 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
75 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
76
77 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
78 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
79 let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
80 buffer1.update(cx, {
81 let buffer1_ops = buffer1_ops.clone();
82 |buffer, cx| {
83 let buffer_1_events = buffer_1_events.clone();
84 cx.become_delegate(&buffer1, move |_, _, event, _| match event {
85 Event::Operation(op) => buffer1_ops.borrow_mut().push(op),
86 event @ _ => buffer_1_events.borrow_mut().push(event),
87 })
88 .detach();
89 let buffer_2_events = buffer_2_events.clone();
90 cx.subscribe(&buffer2, move |_, _, event, _| {
91 buffer_2_events.borrow_mut().push(event.clone())
92 })
93 .detach();
94
95 // An edit emits an edited event, followed by a dirtied event,
96 // since the buffer was previously in a clean state.
97 buffer.edit(Some(2..4), "XYZ", cx);
98
99 // An empty transaction does not emit any events.
100 buffer.start_transaction();
101 buffer.end_transaction(cx);
102
103 // A transaction containing two edits emits one edited event.
104 now += Duration::from_secs(1);
105 buffer.start_transaction_at(now);
106 buffer.edit(Some(5..5), "u", cx);
107 buffer.edit(Some(6..6), "w", cx);
108 buffer.end_transaction_at(now, cx);
109
110 // Undoing a transaction emits one edited event.
111 buffer.undo(cx);
112 }
113 });
114
115 // Incorporating a set of remote ops emits a single edited event,
116 // followed by a dirtied event.
117 buffer2.update(cx, |buffer, cx| {
118 buffer
119 .apply_ops(buffer1_ops.borrow_mut().drain(..), cx)
120 .unwrap();
121 });
122
123 let buffer_1_events = buffer_1_events.borrow();
124 assert_eq!(
125 *buffer_1_events,
126 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
127 );
128
129 let buffer_2_events = buffer_2_events.borrow();
130 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
131}
132
133#[gpui::test]
134async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
135 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
136 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
137
138 let text = "a\nccc\ndddd\nffffff\n";
139 let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
140 buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
141 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
142
143 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
144 let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
145 buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
146 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
147}
148
149#[gpui::test]
150async fn test_reparse(cx: &mut gpui::TestAppContext) {
151 let text = "fn a() {}";
152 let buffer =
153 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
154
155 // Wait for the initial text to parse
156 buffer
157 .condition(&cx, |buffer, _| !buffer.is_parsing())
158 .await;
159 assert_eq!(
160 get_tree_sexp(&buffer, &cx),
161 concat!(
162 "(source_file (function_item name: (identifier) ",
163 "parameters: (parameters) ",
164 "body: (block)))"
165 )
166 );
167
168 buffer.update(cx, |buffer, _| {
169 buffer.set_sync_parse_timeout(Duration::ZERO)
170 });
171
172 // Perform some edits (add parameter and variable reference)
173 // Parsing doesn't begin until the transaction is complete
174 buffer.update(cx, |buf, cx| {
175 buf.start_transaction();
176
177 let offset = buf.text().find(")").unwrap();
178 buf.edit(vec![offset..offset], "b: C", cx);
179 assert!(!buf.is_parsing());
180
181 let offset = buf.text().find("}").unwrap();
182 buf.edit(vec![offset..offset], " d; ", cx);
183 assert!(!buf.is_parsing());
184
185 buf.end_transaction(cx);
186 assert_eq!(buf.text(), "fn a(b: C) { d; }");
187 assert!(buf.is_parsing());
188 });
189 buffer
190 .condition(&cx, |buffer, _| !buffer.is_parsing())
191 .await;
192 assert_eq!(
193 get_tree_sexp(&buffer, &cx),
194 concat!(
195 "(source_file (function_item name: (identifier) ",
196 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
197 "body: (block (expression_statement (identifier)))))"
198 )
199 );
200
201 // Perform a series of edits without waiting for the current parse to complete:
202 // * turn identifier into a field expression
203 // * turn field expression into a method call
204 // * add a turbofish to the method call
205 buffer.update(cx, |buf, cx| {
206 let offset = buf.text().find(";").unwrap();
207 buf.edit(vec![offset..offset], ".e", cx);
208 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
209 assert!(buf.is_parsing());
210 });
211 buffer.update(cx, |buf, cx| {
212 let offset = buf.text().find(";").unwrap();
213 buf.edit(vec![offset..offset], "(f)", cx);
214 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
215 assert!(buf.is_parsing());
216 });
217 buffer.update(cx, |buf, cx| {
218 let offset = buf.text().find("(f)").unwrap();
219 buf.edit(vec![offset..offset], "::<G>", cx);
220 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
221 assert!(buf.is_parsing());
222 });
223 buffer
224 .condition(&cx, |buffer, _| !buffer.is_parsing())
225 .await;
226 assert_eq!(
227 get_tree_sexp(&buffer, &cx),
228 concat!(
229 "(source_file (function_item name: (identifier) ",
230 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
231 "body: (block (expression_statement (call_expression ",
232 "function: (generic_function ",
233 "function: (field_expression value: (identifier) field: (field_identifier)) ",
234 "type_arguments: (type_arguments (type_identifier))) ",
235 "arguments: (arguments (identifier)))))))",
236 )
237 );
238
239 buffer.update(cx, |buf, cx| {
240 buf.undo(cx);
241 assert_eq!(buf.text(), "fn a() {}");
242 assert!(buf.is_parsing());
243 });
244 buffer
245 .condition(&cx, |buffer, _| !buffer.is_parsing())
246 .await;
247 assert_eq!(
248 get_tree_sexp(&buffer, &cx),
249 concat!(
250 "(source_file (function_item name: (identifier) ",
251 "parameters: (parameters) ",
252 "body: (block)))"
253 )
254 );
255
256 buffer.update(cx, |buf, cx| {
257 buf.redo(cx);
258 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
259 assert!(buf.is_parsing());
260 });
261 buffer
262 .condition(&cx, |buffer, _| !buffer.is_parsing())
263 .await;
264 assert_eq!(
265 get_tree_sexp(&buffer, &cx),
266 concat!(
267 "(source_file (function_item name: (identifier) ",
268 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
269 "body: (block (expression_statement (call_expression ",
270 "function: (generic_function ",
271 "function: (field_expression value: (identifier) field: (field_identifier)) ",
272 "type_arguments: (type_arguments (type_identifier))) ",
273 "arguments: (arguments (identifier)))))))",
274 )
275 );
276
277 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
278 buffer.read_with(cx, |buffer, _| {
279 buffer.syntax_tree().unwrap().root_node().to_sexp()
280 })
281 }
282}
283
284#[gpui::test]
285async fn test_outline(cx: &mut gpui::TestAppContext) {
286 let language = Arc::new(
287 rust_lang()
288 .with_outline_query(
289 r#"
290 (struct_item
291 "struct" @context
292 name: (_) @name) @item
293 (enum_item
294 "enum" @context
295 name: (_) @name) @item
296 (enum_variant
297 name: (_) @name) @item
298 (field_declaration
299 name: (_) @name) @item
300 (impl_item
301 "impl" @context
302 trait: (_) @name
303 "for" @context
304 type: (_) @name) @item
305 (function_item
306 "fn" @context
307 name: (_) @name) @item
308 (mod_item
309 "mod" @context
310 name: (_) @name) @item
311 "#,
312 )
313 .unwrap(),
314 );
315
316 let text = r#"
317 struct Person {
318 name: String,
319 age: usize,
320 }
321
322 mod module {
323 enum LoginState {
324 LoggedOut,
325 LoggingOn,
326 LoggedIn {
327 person: Person,
328 time: Instant,
329 }
330 }
331 }
332
333 impl Eq for Person {}
334
335 impl Drop for Person {
336 fn drop(&mut self) {
337 println!("bye");
338 }
339 }
340 "#
341 .unindent();
342
343 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
344 let outline = buffer
345 .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
346 .unwrap();
347
348 assert_eq!(
349 outline
350 .items
351 .iter()
352 .map(|item| (item.text.as_str(), item.depth))
353 .collect::<Vec<_>>(),
354 &[
355 ("struct Person", 0),
356 ("name", 1),
357 ("age", 1),
358 ("mod module", 0),
359 ("enum LoginState", 1),
360 ("LoggedOut", 2),
361 ("LoggingOn", 2),
362 ("LoggedIn", 2),
363 ("person", 3),
364 ("time", 3),
365 ("impl Eq for Person", 0),
366 ("impl Drop for Person", 0),
367 ("fn drop", 1),
368 ]
369 );
370
371 // Without space, we only match on names
372 assert_eq!(
373 search(&outline, "oon", &cx).await,
374 &[
375 ("mod module", vec![]), // included as the parent of a match
376 ("enum LoginState", vec![]), // included as the parent of a match
377 ("LoggingOn", vec![1, 7, 8]), // matches
378 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
379 ]
380 );
381
382 assert_eq!(
383 search(&outline, "dp p", &cx).await,
384 &[
385 ("impl Drop for Person", vec![5, 8, 9, 14]),
386 ("fn drop", vec![]),
387 ]
388 );
389 assert_eq!(
390 search(&outline, "dpn", &cx).await,
391 &[("impl Drop for Person", vec![5, 14, 19])]
392 );
393 assert_eq!(
394 search(&outline, "impl ", &cx).await,
395 &[
396 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
397 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
398 ("fn drop", vec![]),
399 ]
400 );
401
402 async fn search<'a>(
403 outline: &'a Outline<Anchor>,
404 query: &str,
405 cx: &gpui::TestAppContext,
406 ) -> Vec<(&'a str, Vec<usize>)> {
407 let matches = cx
408 .read(|cx| outline.search(query, cx.background().clone()))
409 .await;
410 matches
411 .into_iter()
412 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
413 .collect::<Vec<_>>()
414 }
415}
416
417#[gpui::test]
418fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
419 let buffer = cx.add_model(|cx| {
420 let text = "
421 mod x {
422 mod y {
423
424 }
425 }
426 "
427 .unindent();
428 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
429 });
430 let buffer = buffer.read(cx);
431 assert_eq!(
432 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
433 Some((
434 Point::new(0, 6)..Point::new(0, 7),
435 Point::new(4, 0)..Point::new(4, 1)
436 ))
437 );
438 assert_eq!(
439 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
440 Some((
441 Point::new(1, 10)..Point::new(1, 11),
442 Point::new(3, 4)..Point::new(3, 5)
443 ))
444 );
445 assert_eq!(
446 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
447 Some((
448 Point::new(1, 10)..Point::new(1, 11),
449 Point::new(3, 4)..Point::new(3, 5)
450 ))
451 );
452}
453
454#[gpui::test]
455fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
456 cx.add_model(|cx| {
457 let text = "fn a() {}";
458 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
459
460 buffer.edit_with_autoindent([8..8], "\n\n", cx);
461 assert_eq!(buffer.text(), "fn a() {\n \n}");
462
463 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
464 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
465
466 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
467 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
468
469 buffer
470 });
471}
472
473#[gpui::test]
474fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
475 cx.add_model(|cx| {
476 let text = "
477 fn a() {
478 c;
479 d;
480 }
481 "
482 .unindent();
483
484 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
485
486 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
487 // their indentation is not adjusted.
488 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
489 assert_eq!(
490 buffer.text(),
491 "
492 fn a() {
493 c();
494 d();
495 }
496 "
497 .unindent()
498 );
499
500 // When appending new content after these lines, the indentation is based on the
501 // preceding lines' actual indentation.
502 buffer.edit_with_autoindent(
503 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
504 "\n.f\n.g",
505 cx,
506 );
507 assert_eq!(
508 buffer.text(),
509 "
510 fn a() {
511 c
512 .f
513 .g();
514 d
515 .f
516 .g();
517 }
518 "
519 .unindent()
520 );
521 buffer
522 });
523}
524
525#[gpui::test]
526fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
527 cx.add_model(|cx| {
528 let text = "
529 fn a() {}
530 "
531 .unindent();
532
533 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
534
535 buffer.edit_with_autoindent([5..5], "\nb", cx);
536 assert_eq!(
537 buffer.text(),
538 "
539 fn a(
540 b) {}
541 "
542 .unindent()
543 );
544
545 // The indentation suggestion changed because `@end` node (a close paren)
546 // is now at the beginning of the line.
547 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
548 assert_eq!(
549 buffer.text(),
550 "
551 fn a(
552 ) {}
553 "
554 .unindent()
555 );
556
557 buffer
558 });
559}
560
561#[gpui::test]
562async fn test_diagnostics(cx: &mut gpui::TestAppContext) {
563 let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
564 let mut rust_lang = rust_lang();
565 rust_lang.config.language_server = Some(LanguageServerConfig {
566 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
567 ..Default::default()
568 });
569
570 let text = "
571 fn a() { A }
572 fn b() { BB }
573 fn c() { CCC }
574 "
575 .unindent();
576
577 let buffer = cx.add_model(|cx| {
578 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
579 .with_language(Arc::new(rust_lang), cx)
580 .with_language_server(language_server, cx)
581 });
582
583 let open_notification = fake
584 .receive_notification::<lsp::notification::DidOpenTextDocument>()
585 .await;
586
587 // Edit the buffer, moving the content down
588 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
589 let change_notification_1 = fake
590 .receive_notification::<lsp::notification::DidChangeTextDocument>()
591 .await;
592 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
593
594 buffer.update(cx, |buffer, cx| {
595 // Receive diagnostics for an earlier version of the buffer.
596 buffer
597 .update_diagnostics(
598 vec![
599 DiagnosticEntry {
600 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
601 diagnostic: Diagnostic {
602 severity: DiagnosticSeverity::ERROR,
603 message: "undefined variable 'A'".to_string(),
604 is_disk_based: true,
605 group_id: 0,
606 is_primary: true,
607 ..Default::default()
608 },
609 },
610 DiagnosticEntry {
611 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
612 diagnostic: Diagnostic {
613 severity: DiagnosticSeverity::ERROR,
614 message: "undefined variable 'BB'".to_string(),
615 is_disk_based: true,
616 group_id: 1,
617 is_primary: true,
618 ..Default::default()
619 },
620 },
621 DiagnosticEntry {
622 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
623 diagnostic: Diagnostic {
624 severity: DiagnosticSeverity::ERROR,
625 is_disk_based: true,
626 message: "undefined variable 'CCC'".to_string(),
627 group_id: 2,
628 is_primary: true,
629 ..Default::default()
630 },
631 },
632 ],
633 Some(open_notification.text_document.version),
634 cx,
635 )
636 .unwrap();
637
638 // The diagnostics have moved down since they were created.
639 assert_eq!(
640 buffer
641 .snapshot()
642 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
643 .collect::<Vec<_>>(),
644 &[
645 DiagnosticEntry {
646 range: Point::new(3, 9)..Point::new(3, 11),
647 diagnostic: Diagnostic {
648 severity: DiagnosticSeverity::ERROR,
649 message: "undefined variable 'BB'".to_string(),
650 is_disk_based: true,
651 group_id: 1,
652 is_primary: true,
653 ..Default::default()
654 },
655 },
656 DiagnosticEntry {
657 range: Point::new(4, 9)..Point::new(4, 12),
658 diagnostic: Diagnostic {
659 severity: DiagnosticSeverity::ERROR,
660 message: "undefined variable 'CCC'".to_string(),
661 is_disk_based: true,
662 group_id: 2,
663 is_primary: true,
664 ..Default::default()
665 }
666 }
667 ]
668 );
669 assert_eq!(
670 chunks_with_diagnostics(buffer, 0..buffer.len()),
671 [
672 ("\n\nfn a() { ".to_string(), None),
673 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
674 (" }\nfn b() { ".to_string(), None),
675 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
676 (" }\nfn c() { ".to_string(), None),
677 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
678 (" }\n".to_string(), None),
679 ]
680 );
681 assert_eq!(
682 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
683 [
684 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
685 (" }\nfn c() { ".to_string(), None),
686 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
687 ]
688 );
689
690 // Ensure overlapping diagnostics are highlighted correctly.
691 buffer
692 .update_diagnostics(
693 vec![
694 DiagnosticEntry {
695 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
696 diagnostic: Diagnostic {
697 severity: DiagnosticSeverity::ERROR,
698 message: "undefined variable 'A'".to_string(),
699 is_disk_based: true,
700 group_id: 0,
701 is_primary: true,
702 ..Default::default()
703 },
704 },
705 DiagnosticEntry {
706 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
707 diagnostic: Diagnostic {
708 severity: DiagnosticSeverity::WARNING,
709 message: "unreachable statement".to_string(),
710 group_id: 1,
711 is_primary: true,
712 ..Default::default()
713 },
714 },
715 ],
716 Some(open_notification.text_document.version),
717 cx,
718 )
719 .unwrap();
720 assert_eq!(
721 buffer
722 .snapshot()
723 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
724 .collect::<Vec<_>>(),
725 &[
726 DiagnosticEntry {
727 range: Point::new(2, 9)..Point::new(2, 12),
728 diagnostic: Diagnostic {
729 severity: DiagnosticSeverity::WARNING,
730 message: "unreachable statement".to_string(),
731 group_id: 1,
732 is_primary: true,
733 ..Default::default()
734 }
735 },
736 DiagnosticEntry {
737 range: Point::new(2, 9)..Point::new(2, 10),
738 diagnostic: Diagnostic {
739 severity: DiagnosticSeverity::ERROR,
740 message: "undefined variable 'A'".to_string(),
741 is_disk_based: true,
742 group_id: 0,
743 is_primary: true,
744 ..Default::default()
745 },
746 }
747 ]
748 );
749 assert_eq!(
750 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
751 [
752 ("fn a() { ".to_string(), None),
753 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
754 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
755 ("\n".to_string(), None),
756 ]
757 );
758 assert_eq!(
759 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
760 [
761 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
762 ("\n".to_string(), None),
763 ]
764 );
765 });
766
767 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
768 // changes since the last save.
769 buffer.update(cx, |buffer, cx| {
770 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
771 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
772 });
773 let change_notification_2 = fake
774 .receive_notification::<lsp::notification::DidChangeTextDocument>()
775 .await;
776 assert!(
777 change_notification_2.text_document.version > change_notification_1.text_document.version
778 );
779
780 buffer.update(cx, |buffer, cx| {
781 buffer
782 .update_diagnostics(
783 vec![
784 DiagnosticEntry {
785 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
786 diagnostic: Diagnostic {
787 severity: DiagnosticSeverity::ERROR,
788 message: "undefined variable 'BB'".to_string(),
789 is_disk_based: true,
790 group_id: 1,
791 is_primary: true,
792 ..Default::default()
793 },
794 },
795 DiagnosticEntry {
796 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
797 diagnostic: Diagnostic {
798 severity: DiagnosticSeverity::ERROR,
799 message: "undefined variable 'A'".to_string(),
800 is_disk_based: true,
801 group_id: 0,
802 is_primary: true,
803 ..Default::default()
804 },
805 },
806 ],
807 Some(change_notification_2.text_document.version),
808 cx,
809 )
810 .unwrap();
811 assert_eq!(
812 buffer
813 .snapshot()
814 .diagnostics_in_range::<_, Point>(0..buffer.len())
815 .collect::<Vec<_>>(),
816 &[
817 DiagnosticEntry {
818 range: Point::new(2, 21)..Point::new(2, 22),
819 diagnostic: Diagnostic {
820 severity: DiagnosticSeverity::ERROR,
821 message: "undefined variable 'A'".to_string(),
822 is_disk_based: true,
823 group_id: 0,
824 is_primary: true,
825 ..Default::default()
826 }
827 },
828 DiagnosticEntry {
829 range: Point::new(3, 9)..Point::new(3, 11),
830 diagnostic: Diagnostic {
831 severity: DiagnosticSeverity::ERROR,
832 message: "undefined variable 'BB'".to_string(),
833 is_disk_based: true,
834 group_id: 1,
835 is_primary: true,
836 ..Default::default()
837 },
838 }
839 ]
840 );
841 });
842}
843
844#[gpui::test]
845async fn test_language_server_has_exited(cx: &mut gpui::TestAppContext) {
846 let (language_server, fake) = cx.update(lsp::LanguageServer::fake);
847
848 // Simulate the language server failing to start up.
849 drop(fake);
850
851 let buffer = cx.add_model(|cx| {
852 Buffer::from_file(0, "", Box::new(FakeFile::new("/some/path")), cx)
853 .with_language(Arc::new(rust_lang()), cx)
854 .with_language_server(language_server, cx)
855 });
856
857 // Run the buffer's task that retrieves the server's capabilities.
858 cx.foreground().advance_clock(Duration::from_millis(1));
859
860 buffer.read_with(cx, |buffer, _| {
861 assert!(buffer.language_server().is_none());
862 });
863}
864
865#[gpui::test]
866async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
867 let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
868
869 let text = "
870 fn a() {
871 f1();
872 }
873 fn b() {
874 f2();
875 }
876 fn c() {
877 f3();
878 }
879 "
880 .unindent();
881
882 let buffer = cx.add_model(|cx| {
883 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
884 .with_language(Arc::new(rust_lang()), cx)
885 .with_language_server(language_server, cx)
886 });
887
888 let lsp_document_version = fake
889 .receive_notification::<lsp::notification::DidOpenTextDocument>()
890 .await
891 .text_document
892 .version;
893
894 // Simulate editing the buffer after the language server computes some edits.
895 buffer.update(cx, |buffer, cx| {
896 buffer.edit(
897 [Point::new(0, 0)..Point::new(0, 0)],
898 "// above first function\n",
899 cx,
900 );
901 buffer.edit(
902 [Point::new(2, 0)..Point::new(2, 0)],
903 " // inside first function\n",
904 cx,
905 );
906 buffer.edit(
907 [Point::new(6, 4)..Point::new(6, 4)],
908 "// inside second function ",
909 cx,
910 );
911
912 assert_eq!(
913 buffer.text(),
914 "
915 // above first function
916 fn a() {
917 // inside first function
918 f1();
919 }
920 fn b() {
921 // inside second function f2();
922 }
923 fn c() {
924 f3();
925 }
926 "
927 .unindent()
928 );
929 });
930
931 let edits = buffer
932 .update(cx, |buffer, cx| {
933 buffer.edits_from_lsp(
934 vec![
935 // replace body of first function
936 lsp::TextEdit {
937 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
938 new_text: "
939 fn a() {
940 f10();
941 }
942 "
943 .unindent(),
944 },
945 // edit inside second function
946 lsp::TextEdit {
947 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
948 new_text: "00".into(),
949 },
950 // edit inside third function via two distinct edits
951 lsp::TextEdit {
952 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
953 new_text: "4000".into(),
954 },
955 lsp::TextEdit {
956 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
957 new_text: "".into(),
958 },
959 ],
960 Some(lsp_document_version),
961 cx,
962 )
963 })
964 .await
965 .unwrap();
966
967 buffer.update(cx, |buffer, cx| {
968 for (range, new_text) in edits {
969 buffer.edit([range], new_text, cx);
970 }
971 assert_eq!(
972 buffer.text(),
973 "
974 // above first function
975 fn a() {
976 // inside first function
977 f10();
978 }
979 fn b() {
980 // inside second function f200();
981 }
982 fn c() {
983 f4000();
984 }
985 "
986 .unindent()
987 );
988 });
989}
990
991#[gpui::test]
992async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
993 let text = "
994 use a::b;
995 use a::c;
996
997 fn f() {
998 b();
999 c();
1000 }
1001 "
1002 .unindent();
1003
1004 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
1005
1006 // Simulate the language server sending us a small edit in the form of a very large diff.
1007 // Rust-analyzer does this when performing a merge-imports code action.
1008 let edits = buffer
1009 .update(cx, |buffer, cx| {
1010 buffer.edits_from_lsp(
1011 [
1012 // Replace the first use statement without editing the semicolon.
1013 lsp::TextEdit {
1014 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1015 new_text: "a::{b, c}".into(),
1016 },
1017 // Reinsert the remainder of the file between the semicolon and the final
1018 // newline of the file.
1019 lsp::TextEdit {
1020 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1021 new_text: "\n\n".into(),
1022 },
1023 lsp::TextEdit {
1024 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1025 new_text: "
1026 fn f() {
1027 b();
1028 c();
1029 }"
1030 .unindent(),
1031 },
1032 // Delete everything after the first newline of the file.
1033 lsp::TextEdit {
1034 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1035 new_text: "".into(),
1036 },
1037 ],
1038 None,
1039 cx,
1040 )
1041 })
1042 .await
1043 .unwrap();
1044
1045 buffer.update(cx, |buffer, cx| {
1046 let edits = edits
1047 .into_iter()
1048 .map(|(range, text)| {
1049 (
1050 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1051 text,
1052 )
1053 })
1054 .collect::<Vec<_>>();
1055
1056 assert_eq!(
1057 edits,
1058 [
1059 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1060 (Point::new(1, 0)..Point::new(2, 0), "".into())
1061 ]
1062 );
1063
1064 for (range, new_text) in edits {
1065 buffer.edit([range], new_text, cx);
1066 }
1067 assert_eq!(
1068 buffer.text(),
1069 "
1070 use a::{b, c};
1071
1072 fn f() {
1073 b();
1074 c();
1075 }
1076 "
1077 .unindent()
1078 );
1079 });
1080}
1081
1082#[gpui::test]
1083async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1084 cx.add_model(|cx| {
1085 let text = concat!(
1086 "let one = ;\n", //
1087 "let two = \n",
1088 "let three = 3;\n",
1089 );
1090
1091 let mut buffer = Buffer::new(0, text, cx);
1092 buffer.set_language(Some(Arc::new(rust_lang())), cx);
1093 buffer
1094 .update_diagnostics(
1095 vec![
1096 DiagnosticEntry {
1097 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1098 diagnostic: Diagnostic {
1099 severity: DiagnosticSeverity::ERROR,
1100 message: "syntax error 1".to_string(),
1101 ..Default::default()
1102 },
1103 },
1104 DiagnosticEntry {
1105 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1106 diagnostic: Diagnostic {
1107 severity: DiagnosticSeverity::ERROR,
1108 message: "syntax error 2".to_string(),
1109 ..Default::default()
1110 },
1111 },
1112 ],
1113 None,
1114 cx,
1115 )
1116 .unwrap();
1117
1118 // An empty range is extended forward to include the following character.
1119 // At the end of a line, an empty range is extended backward to include
1120 // the preceding character.
1121 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1122 assert_eq!(
1123 chunks
1124 .iter()
1125 .map(|(s, d)| (s.as_str(), *d))
1126 .collect::<Vec<_>>(),
1127 &[
1128 ("let one = ", None),
1129 (";", Some(DiagnosticSeverity::ERROR)),
1130 ("\nlet two =", None),
1131 (" ", Some(DiagnosticSeverity::ERROR)),
1132 ("\nlet three = 3;\n", None)
1133 ]
1134 );
1135 buffer
1136 });
1137}
1138
1139#[gpui::test]
1140fn test_serialization(cx: &mut gpui::MutableAppContext) {
1141 let mut now = Instant::now();
1142
1143 let buffer1 = cx.add_model(|cx| {
1144 let mut buffer = Buffer::new(0, "abc", cx);
1145 buffer.edit([3..3], "D", cx);
1146
1147 now += Duration::from_secs(1);
1148 buffer.start_transaction_at(now);
1149 buffer.edit([4..4], "E", cx);
1150 buffer.end_transaction_at(now, cx);
1151 assert_eq!(buffer.text(), "abcDE");
1152
1153 buffer.undo(cx);
1154 assert_eq!(buffer.text(), "abcD");
1155
1156 buffer.edit([4..4], "F", cx);
1157 assert_eq!(buffer.text(), "abcDF");
1158 buffer
1159 });
1160 assert_eq!(buffer1.read(cx).text(), "abcDF");
1161
1162 let message = buffer1.read(cx).to_proto();
1163 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
1164 assert_eq!(buffer2.read(cx).text(), "abcDF");
1165}
1166
1167#[gpui::test(iterations = 100)]
1168fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
1169 let min_peers = env::var("MIN_PEERS")
1170 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
1171 .unwrap_or(1);
1172 let max_peers = env::var("MAX_PEERS")
1173 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
1174 .unwrap_or(5);
1175 let operations = env::var("OPERATIONS")
1176 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1177 .unwrap_or(10);
1178
1179 let base_text_len = rng.gen_range(0..10);
1180 let base_text = RandomCharIter::new(&mut rng)
1181 .take(base_text_len)
1182 .collect::<String>();
1183 let mut replica_ids = Vec::new();
1184 let mut buffers = Vec::new();
1185 let network = Rc::new(RefCell::new(Network::new(rng.clone())));
1186
1187 for i in 0..rng.gen_range(min_peers..=max_peers) {
1188 let buffer = cx.add_model(|cx| {
1189 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
1190 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1191 let network = network.clone();
1192 cx.become_delegate(&cx.handle(), move |buffer, _, event, _| {
1193 if let Event::Operation(op) = event {
1194 network
1195 .borrow_mut()
1196 .broadcast(buffer.replica_id(), vec![proto::serialize_operation(&op)]);
1197 }
1198 })
1199 .detach();
1200 buffer
1201 });
1202 buffers.push(buffer);
1203 replica_ids.push(i as ReplicaId);
1204 network.borrow_mut().add_peer(i as ReplicaId);
1205 log::info!("Adding initial peer with replica id {}", i);
1206 }
1207
1208 log::info!("initial text: {:?}", base_text);
1209
1210 let mut now = Instant::now();
1211 let mut mutation_count = operations;
1212 let mut next_diagnostic_id = 0;
1213 let mut active_selections = BTreeMap::default();
1214 loop {
1215 let replica_index = rng.gen_range(0..replica_ids.len());
1216 let replica_id = replica_ids[replica_index];
1217 let buffer = &mut buffers[replica_index];
1218 let mut new_buffer = None;
1219 match rng.gen_range(0..100) {
1220 0..=29 if mutation_count != 0 => {
1221 buffer.update(cx, |buffer, cx| {
1222 buffer.start_transaction_at(now);
1223 buffer.randomly_edit(&mut rng, 5, cx);
1224 buffer.end_transaction_at(now, cx);
1225 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1226 });
1227 mutation_count -= 1;
1228 }
1229 30..=39 if mutation_count != 0 => {
1230 buffer.update(cx, |buffer, cx| {
1231 let mut selections = Vec::new();
1232 for id in 0..rng.gen_range(1..=5) {
1233 let range = buffer.random_byte_range(0, &mut rng);
1234 selections.push(Selection {
1235 id,
1236 start: buffer.anchor_before(range.start),
1237 end: buffer.anchor_before(range.end),
1238 reversed: false,
1239 goal: SelectionGoal::None,
1240 });
1241 }
1242 let selections: Arc<[Selection<Anchor>]> = selections.into();
1243 log::info!(
1244 "peer {} setting active selections: {:?}",
1245 replica_id,
1246 selections
1247 );
1248 active_selections.insert(replica_id, selections.clone());
1249 buffer.set_active_selections(selections, cx);
1250 });
1251 mutation_count -= 1;
1252 }
1253 40..=49 if mutation_count != 0 && replica_id == 0 => {
1254 let entry_count = rng.gen_range(1..=5);
1255 buffer.update(cx, |buffer, cx| {
1256 let diagnostics = (0..entry_count)
1257 .map(|_| {
1258 let range = buffer.random_byte_range(0, &mut rng);
1259 DiagnosticEntry {
1260 range,
1261 diagnostic: Diagnostic {
1262 message: post_inc(&mut next_diagnostic_id).to_string(),
1263 ..Default::default()
1264 },
1265 }
1266 })
1267 .collect();
1268 log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
1269 buffer.update_diagnostics(diagnostics, None, cx).unwrap();
1270 });
1271 mutation_count -= 1;
1272 }
1273 50..=59 if replica_ids.len() < max_peers => {
1274 let old_buffer = buffer.read(cx).to_proto();
1275 let new_replica_id = replica_ids.len() as ReplicaId;
1276 log::info!(
1277 "Adding new replica {} (replicating from {})",
1278 new_replica_id,
1279 replica_id
1280 );
1281 new_buffer = Some(cx.add_model(|cx| {
1282 let mut new_buffer =
1283 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1284 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1285 let network = network.clone();
1286 cx.become_delegate(&cx.handle(), move |buffer, _, event, _| {
1287 if let Event::Operation(op) = event {
1288 network.borrow_mut().broadcast(
1289 buffer.replica_id(),
1290 vec![proto::serialize_operation(&op)],
1291 );
1292 }
1293 })
1294 .detach();
1295 new_buffer
1296 }));
1297 replica_ids.push(new_replica_id);
1298 network.borrow_mut().replicate(replica_id, new_replica_id);
1299 }
1300 60..=69 if mutation_count != 0 => {
1301 buffer.update(cx, |buffer, cx| {
1302 buffer.randomly_undo_redo(&mut rng, cx);
1303 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1304 });
1305 mutation_count -= 1;
1306 }
1307 _ if network.borrow().has_unreceived(replica_id) => {
1308 let ops = network
1309 .borrow_mut()
1310 .receive(replica_id)
1311 .into_iter()
1312 .map(|op| proto::deserialize_operation(op).unwrap());
1313 if ops.len() > 0 {
1314 log::info!(
1315 "peer {} applying {} ops from the network.",
1316 replica_id,
1317 ops.len()
1318 );
1319 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1320 }
1321 }
1322 _ => {}
1323 }
1324
1325 now += Duration::from_millis(rng.gen_range(0..=200));
1326 buffers.extend(new_buffer);
1327
1328 for buffer in &buffers {
1329 buffer.read(cx).check_invariants();
1330 }
1331
1332 if mutation_count == 0 && network.borrow().is_idle() {
1333 break;
1334 }
1335 }
1336
1337 let first_buffer = buffers[0].read(cx).snapshot();
1338 for buffer in &buffers[1..] {
1339 let buffer = buffer.read(cx).snapshot();
1340 assert_eq!(
1341 buffer.text(),
1342 first_buffer.text(),
1343 "Replica {} text != Replica 0 text",
1344 buffer.replica_id()
1345 );
1346 assert_eq!(
1347 buffer
1348 .diagnostics_in_range::<_, usize>(0..buffer.len())
1349 .collect::<Vec<_>>(),
1350 first_buffer
1351 .diagnostics_in_range::<_, usize>(0..first_buffer.len())
1352 .collect::<Vec<_>>(),
1353 "Replica {} diagnostics != Replica 0 diagnostics",
1354 buffer.replica_id()
1355 );
1356 }
1357
1358 for buffer in &buffers {
1359 let buffer = buffer.read(cx).snapshot();
1360 let actual_remote_selections = buffer
1361 .remote_selections_in_range(Anchor::min()..Anchor::max())
1362 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1363 .collect::<Vec<_>>();
1364 let expected_remote_selections = active_selections
1365 .iter()
1366 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1367 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1368 .collect::<Vec<_>>();
1369 assert_eq!(actual_remote_selections, expected_remote_selections);
1370 }
1371}
1372
1373fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1374 buffer: &Buffer,
1375 range: Range<T>,
1376) -> Vec<(String, Option<DiagnosticSeverity>)> {
1377 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1378 for chunk in buffer.snapshot().chunks(range, true) {
1379 if chunks
1380 .last()
1381 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1382 {
1383 chunks.last_mut().unwrap().0.push_str(chunk.text);
1384 } else {
1385 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1386 }
1387 }
1388 chunks
1389}
1390
1391#[test]
1392fn test_contiguous_ranges() {
1393 assert_eq!(
1394 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1395 &[1..4, 5..7, 9..13]
1396 );
1397
1398 // Respects the `max_len` parameter
1399 assert_eq!(
1400 contiguous_ranges(
1401 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1402 3
1403 )
1404 .collect::<Vec<_>>(),
1405 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1406 );
1407}
1408
1409impl Buffer {
1410 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1411 &self,
1412 range: Range<T>,
1413 ) -> Option<(Range<Point>, Range<Point>)> {
1414 self.snapshot()
1415 .enclosing_bracket_ranges(range)
1416 .map(|(start, end)| {
1417 let point_start = start.start.to_point(self)..start.end.to_point(self);
1418 let point_end = end.start.to_point(self)..end.end.to_point(self);
1419 (point_start, point_end)
1420 })
1421 }
1422}
1423
1424fn rust_lang() -> Language {
1425 Language::new(
1426 LanguageConfig {
1427 name: "Rust".into(),
1428 path_suffixes: vec!["rs".to_string()],
1429 language_server: None,
1430 ..Default::default()
1431 },
1432 Some(tree_sitter_rust::language()),
1433 )
1434 .with_indents_query(
1435 r#"
1436 (call_expression) @indent
1437 (field_expression) @indent
1438 (_ "(" ")" @end) @indent
1439 (_ "{" "}" @end) @indent
1440 "#,
1441 )
1442 .unwrap()
1443 .with_brackets_query(
1444 r#"
1445 ("{" @open "}" @close)
1446 "#,
1447 )
1448 .unwrap()
1449}
1450
1451fn empty(point: Point) -> Range<Point> {
1452 point..point
1453}