1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 if std::env::var("RUST_LOG").is_ok() {
21 env_logger::init();
22 }
23}
24
25#[test]
26fn test_select_language() {
27 let registry = LanguageRegistry {
28 languages: vec![
29 Arc::new(Language::new(
30 LanguageConfig {
31 name: "Rust".to_string(),
32 path_suffixes: vec!["rs".to_string()],
33 ..Default::default()
34 },
35 Some(tree_sitter_rust::language()),
36 )),
37 Arc::new(Language::new(
38 LanguageConfig {
39 name: "Make".to_string(),
40 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
41 ..Default::default()
42 },
43 Some(tree_sitter_rust::language()),
44 )),
45 ],
46 };
47
48 // matching file extension
49 assert_eq!(
50 registry.select_language("zed/lib.rs").map(|l| l.name()),
51 Some("Rust")
52 );
53 assert_eq!(
54 registry.select_language("zed/lib.mk").map(|l| l.name()),
55 Some("Make")
56 );
57
58 // matching filename
59 assert_eq!(
60 registry.select_language("zed/Makefile").map(|l| l.name()),
61 Some("Make")
62 );
63
64 // matching suffix that is not the full file extension or filename
65 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
66 assert_eq!(
67 registry.select_language("zed/a.cars").map(|l| l.name()),
68 None
69 );
70 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
71}
72
73#[gpui::test]
74fn test_edit_events(cx: &mut gpui::MutableAppContext) {
75 let mut now = Instant::now();
76 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
77 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
78
79 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
80 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
81 let buffer_ops = buffer1.update(cx, |buffer, cx| {
82 let buffer_1_events = buffer_1_events.clone();
83 cx.subscribe(&buffer1, move |_, _, event, _| {
84 buffer_1_events.borrow_mut().push(event.clone())
85 })
86 .detach();
87 let buffer_2_events = buffer_2_events.clone();
88 cx.subscribe(&buffer2, move |_, _, event, _| {
89 buffer_2_events.borrow_mut().push(event.clone())
90 })
91 .detach();
92
93 // An edit emits an edited event, followed by a dirtied event,
94 // since the buffer was previously in a clean state.
95 buffer.edit(Some(2..4), "XYZ", cx);
96
97 // An empty transaction does not emit any events.
98 buffer.start_transaction();
99 buffer.end_transaction(cx);
100
101 // A transaction containing two edits emits one edited event.
102 now += Duration::from_secs(1);
103 buffer.start_transaction_at(now);
104 buffer.edit(Some(5..5), "u", cx);
105 buffer.edit(Some(6..6), "w", cx);
106 buffer.end_transaction_at(now, cx);
107
108 // Undoing a transaction emits one edited event.
109 buffer.undo(cx);
110
111 buffer.operations.clone()
112 });
113
114 // Incorporating a set of remote ops emits a single edited event,
115 // followed by a dirtied event.
116 buffer2.update(cx, |buffer, cx| {
117 buffer.apply_ops(buffer_ops, cx).unwrap();
118 });
119
120 let buffer_1_events = buffer_1_events.borrow();
121 assert_eq!(
122 *buffer_1_events,
123 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
124 );
125
126 let buffer_2_events = buffer_2_events.borrow();
127 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
128}
129
130#[gpui::test]
131async fn test_apply_diff(mut cx: gpui::TestAppContext) {
132 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
133 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
134
135 let text = "a\nccc\ndddd\nffffff\n";
136 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
137 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
138 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
139
140 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
141 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
142 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
143 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
144}
145
146#[gpui::test]
147async fn test_reparse(mut cx: gpui::TestAppContext) {
148 let text = "fn a() {}";
149 let buffer =
150 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
151
152 // Wait for the initial text to parse
153 buffer
154 .condition(&cx, |buffer, _| !buffer.is_parsing())
155 .await;
156 assert_eq!(
157 get_tree_sexp(&buffer, &cx),
158 concat!(
159 "(source_file (function_item name: (identifier) ",
160 "parameters: (parameters) ",
161 "body: (block)))"
162 )
163 );
164
165 buffer.update(&mut cx, |buffer, _| {
166 buffer.set_sync_parse_timeout(Duration::ZERO)
167 });
168
169 // Perform some edits (add parameter and variable reference)
170 // Parsing doesn't begin until the transaction is complete
171 buffer.update(&mut cx, |buf, cx| {
172 buf.start_transaction();
173
174 let offset = buf.text().find(")").unwrap();
175 buf.edit(vec![offset..offset], "b: C", cx);
176 assert!(!buf.is_parsing());
177
178 let offset = buf.text().find("}").unwrap();
179 buf.edit(vec![offset..offset], " d; ", cx);
180 assert!(!buf.is_parsing());
181
182 buf.end_transaction(cx);
183 assert_eq!(buf.text(), "fn a(b: C) { d; }");
184 assert!(buf.is_parsing());
185 });
186 buffer
187 .condition(&cx, |buffer, _| !buffer.is_parsing())
188 .await;
189 assert_eq!(
190 get_tree_sexp(&buffer, &cx),
191 concat!(
192 "(source_file (function_item name: (identifier) ",
193 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
194 "body: (block (expression_statement (identifier)))))"
195 )
196 );
197
198 // Perform a series of edits without waiting for the current parse to complete:
199 // * turn identifier into a field expression
200 // * turn field expression into a method call
201 // * add a turbofish to the method call
202 buffer.update(&mut cx, |buf, cx| {
203 let offset = buf.text().find(";").unwrap();
204 buf.edit(vec![offset..offset], ".e", cx);
205 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
206 assert!(buf.is_parsing());
207 });
208 buffer.update(&mut cx, |buf, cx| {
209 let offset = buf.text().find(";").unwrap();
210 buf.edit(vec![offset..offset], "(f)", cx);
211 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
212 assert!(buf.is_parsing());
213 });
214 buffer.update(&mut cx, |buf, cx| {
215 let offset = buf.text().find("(f)").unwrap();
216 buf.edit(vec![offset..offset], "::<G>", cx);
217 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
218 assert!(buf.is_parsing());
219 });
220 buffer
221 .condition(&cx, |buffer, _| !buffer.is_parsing())
222 .await;
223 assert_eq!(
224 get_tree_sexp(&buffer, &cx),
225 concat!(
226 "(source_file (function_item name: (identifier) ",
227 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
228 "body: (block (expression_statement (call_expression ",
229 "function: (generic_function ",
230 "function: (field_expression value: (identifier) field: (field_identifier)) ",
231 "type_arguments: (type_arguments (type_identifier))) ",
232 "arguments: (arguments (identifier)))))))",
233 )
234 );
235
236 buffer.update(&mut cx, |buf, cx| {
237 buf.undo(cx);
238 assert_eq!(buf.text(), "fn a() {}");
239 assert!(buf.is_parsing());
240 });
241 buffer
242 .condition(&cx, |buffer, _| !buffer.is_parsing())
243 .await;
244 assert_eq!(
245 get_tree_sexp(&buffer, &cx),
246 concat!(
247 "(source_file (function_item name: (identifier) ",
248 "parameters: (parameters) ",
249 "body: (block)))"
250 )
251 );
252
253 buffer.update(&mut cx, |buf, cx| {
254 buf.redo(cx);
255 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
256 assert!(buf.is_parsing());
257 });
258 buffer
259 .condition(&cx, |buffer, _| !buffer.is_parsing())
260 .await;
261 assert_eq!(
262 get_tree_sexp(&buffer, &cx),
263 concat!(
264 "(source_file (function_item name: (identifier) ",
265 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
266 "body: (block (expression_statement (call_expression ",
267 "function: (generic_function ",
268 "function: (field_expression value: (identifier) field: (field_identifier)) ",
269 "type_arguments: (type_arguments (type_identifier))) ",
270 "arguments: (arguments (identifier)))))))",
271 )
272 );
273
274 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
275 buffer.read_with(cx, |buffer, _| {
276 buffer.syntax_tree().unwrap().root_node().to_sexp()
277 })
278 }
279}
280
281#[gpui::test]
282async fn test_outline(mut cx: gpui::TestAppContext) {
283 let language = Arc::new(
284 rust_lang()
285 .with_outline_query(
286 r#"
287 (struct_item
288 "struct" @context
289 name: (_) @name) @item
290 (enum_item
291 "enum" @context
292 name: (_) @name) @item
293 (enum_variant
294 name: (_) @name) @item
295 (field_declaration
296 name: (_) @name) @item
297 (impl_item
298 "impl" @context
299 trait: (_) @name
300 "for" @context
301 type: (_) @name) @item
302 (function_item
303 "fn" @context
304 name: (_) @name) @item
305 (mod_item
306 "mod" @context
307 name: (_) @name) @item
308 "#,
309 )
310 .unwrap(),
311 );
312
313 let text = r#"
314 struct Person {
315 name: String,
316 age: usize,
317 }
318
319 mod module {
320 enum LoginState {
321 LoggedOut,
322 LoggingOn,
323 LoggedIn {
324 person: Person,
325 time: Instant,
326 }
327 }
328 }
329
330 impl Eq for Person {}
331
332 impl Drop for Person {
333 fn drop(&mut self) {
334 println!("bye");
335 }
336 }
337 "#
338 .unindent();
339
340 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
341 let outline = buffer
342 .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
343 .unwrap();
344
345 assert_eq!(
346 outline
347 .items
348 .iter()
349 .map(|item| (item.text.as_str(), item.depth))
350 .collect::<Vec<_>>(),
351 &[
352 ("struct Person", 0),
353 ("name", 1),
354 ("age", 1),
355 ("mod module", 0),
356 ("enum LoginState", 1),
357 ("LoggedOut", 2),
358 ("LoggingOn", 2),
359 ("LoggedIn", 2),
360 ("person", 3),
361 ("time", 3),
362 ("impl Eq for Person", 0),
363 ("impl Drop for Person", 0),
364 ("fn drop", 1),
365 ]
366 );
367
368 // Without space, we only match on names
369 assert_eq!(
370 search(&outline, "oon", &cx).await,
371 &[
372 ("mod module", vec![]), // included as the parent of a match
373 ("enum LoginState", vec![]), // included as the parent of a match
374 ("LoggingOn", vec![1, 7, 8]), // matches
375 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
376 ]
377 );
378
379 assert_eq!(
380 search(&outline, "dp p", &cx).await,
381 &[
382 ("impl Drop for Person", vec![5, 8, 9, 14]),
383 ("fn drop", vec![]),
384 ]
385 );
386 assert_eq!(
387 search(&outline, "dpn", &cx).await,
388 &[("impl Drop for Person", vec![5, 14, 19])]
389 );
390 assert_eq!(
391 search(&outline, "impl ", &cx).await,
392 &[
393 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
394 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
395 ("fn drop", vec![]),
396 ]
397 );
398
399 async fn search<'a>(
400 outline: &'a Outline<Anchor>,
401 query: &str,
402 cx: &gpui::TestAppContext,
403 ) -> Vec<(&'a str, Vec<usize>)> {
404 let matches = cx
405 .read(|cx| outline.search(query, cx.background().clone()))
406 .await;
407 matches
408 .into_iter()
409 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
410 .collect::<Vec<_>>()
411 }
412}
413
414#[gpui::test]
415fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
416 let buffer = cx.add_model(|cx| {
417 let text = "
418 mod x {
419 mod y {
420
421 }
422 }
423 "
424 .unindent();
425 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
426 });
427 let buffer = buffer.read(cx);
428 assert_eq!(
429 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
430 Some((
431 Point::new(0, 6)..Point::new(0, 7),
432 Point::new(4, 0)..Point::new(4, 1)
433 ))
434 );
435 assert_eq!(
436 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
437 Some((
438 Point::new(1, 10)..Point::new(1, 11),
439 Point::new(3, 4)..Point::new(3, 5)
440 ))
441 );
442 assert_eq!(
443 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
444 Some((
445 Point::new(1, 10)..Point::new(1, 11),
446 Point::new(3, 4)..Point::new(3, 5)
447 ))
448 );
449}
450
451#[gpui::test]
452fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
453 cx.add_model(|cx| {
454 let text = "fn a() {}";
455 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
456
457 buffer.edit_with_autoindent([8..8], "\n\n", cx);
458 assert_eq!(buffer.text(), "fn a() {\n \n}");
459
460 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
461 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
462
463 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
464 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
465
466 buffer
467 });
468}
469
470#[gpui::test]
471fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
472 cx.add_model(|cx| {
473 let text = "
474 fn a() {
475 c;
476 d;
477 }
478 "
479 .unindent();
480
481 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
482
483 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
484 // their indentation is not adjusted.
485 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
486 assert_eq!(
487 buffer.text(),
488 "
489 fn a() {
490 c();
491 d();
492 }
493 "
494 .unindent()
495 );
496
497 // When appending new content after these lines, the indentation is based on the
498 // preceding lines' actual indentation.
499 buffer.edit_with_autoindent(
500 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
501 "\n.f\n.g",
502 cx,
503 );
504 assert_eq!(
505 buffer.text(),
506 "
507 fn a() {
508 c
509 .f
510 .g();
511 d
512 .f
513 .g();
514 }
515 "
516 .unindent()
517 );
518 buffer
519 });
520}
521
522#[gpui::test]
523fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
524 cx.add_model(|cx| {
525 let text = "
526 fn a() {}
527 "
528 .unindent();
529
530 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
531
532 buffer.edit_with_autoindent([5..5], "\nb", cx);
533 assert_eq!(
534 buffer.text(),
535 "
536 fn a(
537 b) {}
538 "
539 .unindent()
540 );
541
542 // The indentation suggestion changed because `@end` node (a close paren)
543 // is now at the beginning of the line.
544 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
545 assert_eq!(
546 buffer.text(),
547 "
548 fn a(
549 ) {}
550 "
551 .unindent()
552 );
553
554 buffer
555 });
556}
557
558#[gpui::test]
559async fn test_diagnostics(mut cx: gpui::TestAppContext) {
560 let (language_server, mut fake) = lsp::LanguageServer::fake(&cx).await;
561 let mut rust_lang = rust_lang();
562 rust_lang.config.language_server = Some(LanguageServerConfig {
563 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
564 ..Default::default()
565 });
566
567 let text = "
568 fn a() { A }
569 fn b() { BB }
570 fn c() { CCC }
571 "
572 .unindent();
573
574 let file = Box::new(FakeFile {
575 path: Path::new("/some/path").into(),
576 }) as Box<dyn File>;
577 let buffer = cx.add_model(|cx| {
578 Buffer::from_file(0, text, file, cx)
579 .with_language(Arc::new(rust_lang), cx)
580 .with_language_server(language_server, cx)
581 });
582
583 let open_notification = fake
584 .receive_notification::<lsp::notification::DidOpenTextDocument>()
585 .await;
586
587 // Edit the buffer, moving the content down
588 buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
589 let change_notification_1 = fake
590 .receive_notification::<lsp::notification::DidChangeTextDocument>()
591 .await;
592 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
593
594 buffer.update(&mut cx, |buffer, cx| {
595 // Receive diagnostics for an earlier version of the buffer.
596 buffer
597 .update_diagnostics(
598 vec![
599 DiagnosticEntry {
600 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
601 diagnostic: Diagnostic {
602 severity: DiagnosticSeverity::ERROR,
603 message: "undefined variable 'A'".to_string(),
604 is_disk_based: true,
605 group_id: 0,
606 is_primary: true,
607 ..Default::default()
608 },
609 },
610 DiagnosticEntry {
611 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
612 diagnostic: Diagnostic {
613 severity: DiagnosticSeverity::ERROR,
614 message: "undefined variable 'BB'".to_string(),
615 is_disk_based: true,
616 group_id: 1,
617 is_primary: true,
618 ..Default::default()
619 },
620 },
621 DiagnosticEntry {
622 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
623 diagnostic: Diagnostic {
624 severity: DiagnosticSeverity::ERROR,
625 is_disk_based: true,
626 message: "undefined variable 'CCC'".to_string(),
627 group_id: 2,
628 is_primary: true,
629 ..Default::default()
630 },
631 },
632 ],
633 Some(open_notification.text_document.version),
634 cx,
635 )
636 .unwrap();
637
638 // The diagnostics have moved down since they were created.
639 assert_eq!(
640 buffer
641 .snapshot()
642 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
643 .collect::<Vec<_>>(),
644 &[
645 DiagnosticEntry {
646 range: Point::new(3, 9)..Point::new(3, 11),
647 diagnostic: Diagnostic {
648 severity: DiagnosticSeverity::ERROR,
649 message: "undefined variable 'BB'".to_string(),
650 is_disk_based: true,
651 group_id: 1,
652 is_primary: true,
653 ..Default::default()
654 },
655 },
656 DiagnosticEntry {
657 range: Point::new(4, 9)..Point::new(4, 12),
658 diagnostic: Diagnostic {
659 severity: DiagnosticSeverity::ERROR,
660 message: "undefined variable 'CCC'".to_string(),
661 is_disk_based: true,
662 group_id: 2,
663 is_primary: true,
664 ..Default::default()
665 }
666 }
667 ]
668 );
669 assert_eq!(
670 chunks_with_diagnostics(buffer, 0..buffer.len()),
671 [
672 ("\n\nfn a() { ".to_string(), None),
673 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
674 (" }\nfn b() { ".to_string(), None),
675 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
676 (" }\nfn c() { ".to_string(), None),
677 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
678 (" }\n".to_string(), None),
679 ]
680 );
681 assert_eq!(
682 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
683 [
684 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
685 (" }\nfn c() { ".to_string(), None),
686 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
687 ]
688 );
689
690 // Ensure overlapping diagnostics are highlighted correctly.
691 buffer
692 .update_diagnostics(
693 vec![
694 DiagnosticEntry {
695 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
696 diagnostic: Diagnostic {
697 severity: DiagnosticSeverity::ERROR,
698 message: "undefined variable 'A'".to_string(),
699 is_disk_based: true,
700 group_id: 0,
701 is_primary: true,
702 ..Default::default()
703 },
704 },
705 DiagnosticEntry {
706 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
707 diagnostic: Diagnostic {
708 severity: DiagnosticSeverity::WARNING,
709 message: "unreachable statement".to_string(),
710 group_id: 1,
711 is_primary: true,
712 ..Default::default()
713 },
714 },
715 ],
716 Some(open_notification.text_document.version),
717 cx,
718 )
719 .unwrap();
720 assert_eq!(
721 buffer
722 .snapshot()
723 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
724 .collect::<Vec<_>>(),
725 &[
726 DiagnosticEntry {
727 range: Point::new(2, 9)..Point::new(2, 12),
728 diagnostic: Diagnostic {
729 severity: DiagnosticSeverity::WARNING,
730 message: "unreachable statement".to_string(),
731 group_id: 1,
732 is_primary: true,
733 ..Default::default()
734 }
735 },
736 DiagnosticEntry {
737 range: Point::new(2, 9)..Point::new(2, 10),
738 diagnostic: Diagnostic {
739 severity: DiagnosticSeverity::ERROR,
740 message: "undefined variable 'A'".to_string(),
741 is_disk_based: true,
742 group_id: 0,
743 is_primary: true,
744 ..Default::default()
745 },
746 }
747 ]
748 );
749 assert_eq!(
750 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
751 [
752 ("fn a() { ".to_string(), None),
753 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
754 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
755 ("\n".to_string(), None),
756 ]
757 );
758 assert_eq!(
759 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
760 [
761 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
762 ("\n".to_string(), None),
763 ]
764 );
765 });
766
767 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
768 // changes since the last save.
769 buffer.update(&mut cx, |buffer, cx| {
770 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
771 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
772 });
773 let change_notification_2 = fake
774 .receive_notification::<lsp::notification::DidChangeTextDocument>()
775 .await;
776 assert!(
777 change_notification_2.text_document.version > change_notification_1.text_document.version
778 );
779
780 buffer.update(&mut cx, |buffer, cx| {
781 buffer
782 .update_diagnostics(
783 vec![
784 DiagnosticEntry {
785 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
786 diagnostic: Diagnostic {
787 severity: DiagnosticSeverity::ERROR,
788 message: "undefined variable 'BB'".to_string(),
789 is_disk_based: true,
790 group_id: 1,
791 is_primary: true,
792 ..Default::default()
793 },
794 },
795 DiagnosticEntry {
796 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
797 diagnostic: Diagnostic {
798 severity: DiagnosticSeverity::ERROR,
799 message: "undefined variable 'A'".to_string(),
800 is_disk_based: true,
801 group_id: 0,
802 is_primary: true,
803 ..Default::default()
804 },
805 },
806 ],
807 Some(change_notification_2.text_document.version),
808 cx,
809 )
810 .unwrap();
811 assert_eq!(
812 buffer
813 .snapshot()
814 .diagnostics_in_range::<_, Point>(0..buffer.len())
815 .collect::<Vec<_>>(),
816 &[
817 DiagnosticEntry {
818 range: Point::new(2, 21)..Point::new(2, 22),
819 diagnostic: Diagnostic {
820 severity: DiagnosticSeverity::ERROR,
821 message: "undefined variable 'A'".to_string(),
822 is_disk_based: true,
823 group_id: 0,
824 is_primary: true,
825 ..Default::default()
826 }
827 },
828 DiagnosticEntry {
829 range: Point::new(3, 9)..Point::new(3, 11),
830 diagnostic: Diagnostic {
831 severity: DiagnosticSeverity::ERROR,
832 message: "undefined variable 'BB'".to_string(),
833 is_disk_based: true,
834 group_id: 1,
835 is_primary: true,
836 ..Default::default()
837 },
838 }
839 ]
840 );
841 });
842}
843
844#[gpui::test]
845async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
846 cx.add_model(|cx| {
847 let text = concat!(
848 "let one = ;\n", //
849 "let two = \n",
850 "let three = 3;\n",
851 );
852
853 let mut buffer = Buffer::new(0, text, cx);
854 buffer.set_language(Some(Arc::new(rust_lang())), cx);
855 buffer
856 .update_diagnostics(
857 vec![
858 DiagnosticEntry {
859 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
860 diagnostic: Diagnostic {
861 severity: DiagnosticSeverity::ERROR,
862 message: "syntax error 1".to_string(),
863 ..Default::default()
864 },
865 },
866 DiagnosticEntry {
867 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
868 diagnostic: Diagnostic {
869 severity: DiagnosticSeverity::ERROR,
870 message: "syntax error 2".to_string(),
871 ..Default::default()
872 },
873 },
874 ],
875 None,
876 cx,
877 )
878 .unwrap();
879
880 // An empty range is extended forward to include the following character.
881 // At the end of a line, an empty range is extended backward to include
882 // the preceding character.
883 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
884 assert_eq!(
885 chunks
886 .iter()
887 .map(|(s, d)| (s.as_str(), *d))
888 .collect::<Vec<_>>(),
889 &[
890 ("let one = ", None),
891 (";", Some(DiagnosticSeverity::ERROR)),
892 ("\nlet two =", None),
893 (" ", Some(DiagnosticSeverity::ERROR)),
894 ("\nlet three = 3;\n", None)
895 ]
896 );
897 buffer
898 });
899}
900
901#[gpui::test]
902fn test_serialization(cx: &mut gpui::MutableAppContext) {
903 let mut now = Instant::now();
904
905 let buffer1 = cx.add_model(|cx| {
906 let mut buffer = Buffer::new(0, "abc", cx);
907 buffer.edit([3..3], "D", cx);
908
909 now += Duration::from_secs(1);
910 buffer.start_transaction_at(now);
911 buffer.edit([4..4], "E", cx);
912 buffer.end_transaction_at(now, cx);
913 assert_eq!(buffer.text(), "abcDE");
914
915 buffer.undo(cx);
916 assert_eq!(buffer.text(), "abcD");
917
918 buffer.edit([4..4], "F", cx);
919 assert_eq!(buffer.text(), "abcDF");
920 buffer
921 });
922 assert_eq!(buffer1.read(cx).text(), "abcDF");
923
924 let message = buffer1.read(cx).to_proto();
925 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
926 assert_eq!(buffer2.read(cx).text(), "abcDF");
927}
928
929#[gpui::test(iterations = 100)]
930fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
931 let min_peers = env::var("MIN_PEERS")
932 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
933 .unwrap_or(1);
934 let max_peers = env::var("MAX_PEERS")
935 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
936 .unwrap_or(5);
937 let operations = env::var("OPERATIONS")
938 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
939 .unwrap_or(10);
940
941 let base_text_len = rng.gen_range(0..10);
942 let base_text = RandomCharIter::new(&mut rng)
943 .take(base_text_len)
944 .collect::<String>();
945 let mut replica_ids = Vec::new();
946 let mut buffers = Vec::new();
947 let mut network = Network::new(rng.clone());
948
949 for i in 0..rng.gen_range(min_peers..=max_peers) {
950 let buffer = cx.add_model(|cx| {
951 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
952 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
953 buffer
954 });
955 buffers.push(buffer);
956 replica_ids.push(i as ReplicaId);
957 network.add_peer(i as ReplicaId);
958 log::info!("Adding initial peer with replica id {}", i);
959 }
960
961 log::info!("initial text: {:?}", base_text);
962
963 let mut now = Instant::now();
964 let mut mutation_count = operations;
965 let mut active_selections = BTreeMap::default();
966 loop {
967 let replica_index = rng.gen_range(0..replica_ids.len());
968 let replica_id = replica_ids[replica_index];
969 let buffer = &mut buffers[replica_index];
970 let mut new_buffer = None;
971 match rng.gen_range(0..100) {
972 0..=29 if mutation_count != 0 => {
973 buffer.update(cx, |buffer, cx| {
974 buffer.start_transaction_at(now);
975 buffer.randomly_edit(&mut rng, 5, cx);
976 buffer.end_transaction_at(now, cx);
977 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
978 });
979 mutation_count -= 1;
980 }
981 30..=39 if mutation_count != 0 => {
982 buffer.update(cx, |buffer, cx| {
983 let mut selections = Vec::new();
984 for id in 0..rng.gen_range(1..=5) {
985 let range = buffer.random_byte_range(0, &mut rng);
986 selections.push(Selection {
987 id,
988 start: buffer.anchor_before(range.start),
989 end: buffer.anchor_before(range.end),
990 reversed: false,
991 goal: SelectionGoal::None,
992 });
993 }
994 let selections: Arc<[Selection<Anchor>]> = selections.into();
995 log::info!(
996 "peer {} setting active selections: {:?}",
997 replica_id,
998 selections
999 );
1000 active_selections.insert(replica_id, selections.clone());
1001 buffer.set_active_selections(selections, cx);
1002 });
1003 mutation_count -= 1;
1004 }
1005 40..=49 if replica_ids.len() < max_peers => {
1006 let old_buffer = buffer.read(cx).to_proto();
1007 let new_replica_id = replica_ids.len() as ReplicaId;
1008 log::info!(
1009 "Adding new replica {} (replicating from {})",
1010 new_replica_id,
1011 replica_id
1012 );
1013 new_buffer = Some(cx.add_model(|cx| {
1014 let mut new_buffer =
1015 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1016 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1017 new_buffer
1018 }));
1019 replica_ids.push(new_replica_id);
1020 network.replicate(replica_id, new_replica_id);
1021 }
1022 50..=69 if mutation_count != 0 => {
1023 buffer.update(cx, |buffer, cx| {
1024 buffer.randomly_undo_redo(&mut rng, cx);
1025 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1026 });
1027 mutation_count -= 1;
1028 }
1029 70..=99 if network.has_unreceived(replica_id) => {
1030 let ops = network
1031 .receive(replica_id)
1032 .into_iter()
1033 .map(|op| proto::deserialize_operation(op).unwrap());
1034 if ops.len() > 0 {
1035 log::info!(
1036 "peer {} applying {} ops from the network.",
1037 replica_id,
1038 ops.len()
1039 );
1040 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1041 }
1042 }
1043 _ => {}
1044 }
1045
1046 buffer.update(cx, |buffer, _| {
1047 let ops = buffer
1048 .operations
1049 .drain(..)
1050 .map(|op| proto::serialize_operation(&op))
1051 .collect();
1052 network.broadcast(buffer.replica_id(), ops);
1053 });
1054 now += Duration::from_millis(rng.gen_range(0..=200));
1055 buffers.extend(new_buffer);
1056
1057 for buffer in &buffers {
1058 buffer.read(cx).check_invariants();
1059 }
1060
1061 if mutation_count == 0 && network.is_idle() {
1062 break;
1063 }
1064 }
1065
1066 let first_buffer = buffers[0].read(cx);
1067 for buffer in &buffers[1..] {
1068 let buffer = buffer.read(cx);
1069 assert_eq!(
1070 buffer.text(),
1071 first_buffer.text(),
1072 "Replica {} text != Replica 0 text",
1073 buffer.replica_id()
1074 );
1075 }
1076
1077 for buffer in &buffers {
1078 let buffer = buffer.read(cx).snapshot();
1079 let actual_remote_selections = buffer
1080 .remote_selections_in_range(Anchor::min()..Anchor::max())
1081 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1082 .collect::<Vec<_>>();
1083 let expected_remote_selections = active_selections
1084 .iter()
1085 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1086 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1087 .collect::<Vec<_>>();
1088 assert_eq!(actual_remote_selections, expected_remote_selections);
1089 }
1090}
1091
1092fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1093 buffer: &Buffer,
1094 range: Range<T>,
1095) -> Vec<(String, Option<DiagnosticSeverity>)> {
1096 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1097 for chunk in buffer.snapshot().chunks(range, true) {
1098 if chunks
1099 .last()
1100 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1101 {
1102 chunks.last_mut().unwrap().0.push_str(chunk.text);
1103 } else {
1104 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1105 }
1106 }
1107 chunks
1108}
1109
1110#[test]
1111fn test_contiguous_ranges() {
1112 assert_eq!(
1113 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1114 &[1..4, 5..7, 9..13]
1115 );
1116
1117 // Respects the `max_len` parameter
1118 assert_eq!(
1119 contiguous_ranges(
1120 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1121 3
1122 )
1123 .collect::<Vec<_>>(),
1124 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1125 );
1126}
1127
1128impl Buffer {
1129 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1130 &self,
1131 range: Range<T>,
1132 ) -> Option<(Range<Point>, Range<Point>)> {
1133 self.snapshot()
1134 .enclosing_bracket_ranges(range)
1135 .map(|(start, end)| {
1136 let point_start = start.start.to_point(self)..start.end.to_point(self);
1137 let point_end = end.start.to_point(self)..end.end.to_point(self);
1138 (point_start, point_end)
1139 })
1140 }
1141}
1142
1143fn rust_lang() -> Language {
1144 Language::new(
1145 LanguageConfig {
1146 name: "Rust".to_string(),
1147 path_suffixes: vec!["rs".to_string()],
1148 language_server: None,
1149 ..Default::default()
1150 },
1151 Some(tree_sitter_rust::language()),
1152 )
1153 .with_indents_query(
1154 r#"
1155 (call_expression) @indent
1156 (field_expression) @indent
1157 (_ "(" ")" @end) @indent
1158 (_ "{" "}" @end) @indent
1159 "#,
1160 )
1161 .unwrap()
1162 .with_brackets_query(
1163 r#"
1164 ("{" @open "}" @close)
1165 "#,
1166 )
1167 .unwrap()
1168}
1169
1170fn empty(point: Point) -> Range<Point> {
1171 point..point
1172}