1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 // std::env::set_var("RUST_LOG", "info");
21 env_logger::init();
22}
23
24#[test]
25fn test_select_language() {
26 let registry = LanguageRegistry {
27 languages: vec![
28 Arc::new(Language::new(
29 LanguageConfig {
30 name: "Rust".to_string(),
31 path_suffixes: vec!["rs".to_string()],
32 ..Default::default()
33 },
34 Some(tree_sitter_rust::language()),
35 )),
36 Arc::new(Language::new(
37 LanguageConfig {
38 name: "Make".to_string(),
39 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
40 ..Default::default()
41 },
42 Some(tree_sitter_rust::language()),
43 )),
44 ],
45 };
46
47 // matching file extension
48 assert_eq!(
49 registry.select_language("zed/lib.rs").map(|l| l.name()),
50 Some("Rust")
51 );
52 assert_eq!(
53 registry.select_language("zed/lib.mk").map(|l| l.name()),
54 Some("Make")
55 );
56
57 // matching filename
58 assert_eq!(
59 registry.select_language("zed/Makefile").map(|l| l.name()),
60 Some("Make")
61 );
62
63 // matching suffix that is not the full file extension or filename
64 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
65 assert_eq!(
66 registry.select_language("zed/a.cars").map(|l| l.name()),
67 None
68 );
69 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
70}
71
72#[gpui::test]
73fn test_edit_events(cx: &mut gpui::MutableAppContext) {
74 let mut now = Instant::now();
75 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
76 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
77
78 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
79 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
80 let buffer_ops = buffer1.update(cx, |buffer, cx| {
81 let buffer_1_events = buffer_1_events.clone();
82 cx.subscribe(&buffer1, move |_, _, event, _| {
83 buffer_1_events.borrow_mut().push(event.clone())
84 })
85 .detach();
86 let buffer_2_events = buffer_2_events.clone();
87 cx.subscribe(&buffer2, move |_, _, event, _| {
88 buffer_2_events.borrow_mut().push(event.clone())
89 })
90 .detach();
91
92 // An edit emits an edited event, followed by a dirtied event,
93 // since the buffer was previously in a clean state.
94 buffer.edit(Some(2..4), "XYZ", cx);
95
96 // An empty transaction does not emit any events.
97 buffer.start_transaction();
98 buffer.end_transaction(cx);
99
100 // A transaction containing two edits emits one edited event.
101 now += Duration::from_secs(1);
102 buffer.start_transaction_at(now);
103 buffer.edit(Some(5..5), "u", cx);
104 buffer.edit(Some(6..6), "w", cx);
105 buffer.end_transaction_at(now, cx);
106
107 // Undoing a transaction emits one edited event.
108 buffer.undo(cx);
109
110 buffer.operations.clone()
111 });
112
113 // Incorporating a set of remote ops emits a single edited event,
114 // followed by a dirtied event.
115 buffer2.update(cx, |buffer, cx| {
116 buffer.apply_ops(buffer_ops, cx).unwrap();
117 });
118
119 let buffer_1_events = buffer_1_events.borrow();
120 assert_eq!(
121 *buffer_1_events,
122 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
123 );
124
125 let buffer_2_events = buffer_2_events.borrow();
126 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
127}
128
129#[gpui::test]
130async fn test_apply_diff(mut cx: gpui::TestAppContext) {
131 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
132 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
133
134 let text = "a\nccc\ndddd\nffffff\n";
135 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
136 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
137 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
138
139 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
140 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
141 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
142 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
143}
144
145#[gpui::test]
146async fn test_reparse(mut cx: gpui::TestAppContext) {
147 let text = "fn a() {}";
148 let buffer =
149 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
150
151 // Wait for the initial text to parse
152 buffer
153 .condition(&cx, |buffer, _| !buffer.is_parsing())
154 .await;
155 assert_eq!(
156 get_tree_sexp(&buffer, &cx),
157 concat!(
158 "(source_file (function_item name: (identifier) ",
159 "parameters: (parameters) ",
160 "body: (block)))"
161 )
162 );
163
164 buffer.update(&mut cx, |buffer, _| {
165 buffer.set_sync_parse_timeout(Duration::ZERO)
166 });
167
168 // Perform some edits (add parameter and variable reference)
169 // Parsing doesn't begin until the transaction is complete
170 buffer.update(&mut cx, |buf, cx| {
171 buf.start_transaction();
172
173 let offset = buf.text().find(")").unwrap();
174 buf.edit(vec![offset..offset], "b: C", cx);
175 assert!(!buf.is_parsing());
176
177 let offset = buf.text().find("}").unwrap();
178 buf.edit(vec![offset..offset], " d; ", cx);
179 assert!(!buf.is_parsing());
180
181 buf.end_transaction(cx);
182 assert_eq!(buf.text(), "fn a(b: C) { d; }");
183 assert!(buf.is_parsing());
184 });
185 buffer
186 .condition(&cx, |buffer, _| !buffer.is_parsing())
187 .await;
188 assert_eq!(
189 get_tree_sexp(&buffer, &cx),
190 concat!(
191 "(source_file (function_item name: (identifier) ",
192 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
193 "body: (block (identifier))))"
194 )
195 );
196
197 // Perform a series of edits without waiting for the current parse to complete:
198 // * turn identifier into a field expression
199 // * turn field expression into a method call
200 // * add a turbofish to the method call
201 buffer.update(&mut cx, |buf, cx| {
202 let offset = buf.text().find(";").unwrap();
203 buf.edit(vec![offset..offset], ".e", cx);
204 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
205 assert!(buf.is_parsing());
206 });
207 buffer.update(&mut cx, |buf, cx| {
208 let offset = buf.text().find(";").unwrap();
209 buf.edit(vec![offset..offset], "(f)", cx);
210 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
211 assert!(buf.is_parsing());
212 });
213 buffer.update(&mut cx, |buf, cx| {
214 let offset = buf.text().find("(f)").unwrap();
215 buf.edit(vec![offset..offset], "::<G>", cx);
216 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
217 assert!(buf.is_parsing());
218 });
219 buffer
220 .condition(&cx, |buffer, _| !buffer.is_parsing())
221 .await;
222 assert_eq!(
223 get_tree_sexp(&buffer, &cx),
224 concat!(
225 "(source_file (function_item name: (identifier) ",
226 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
227 "body: (block (call_expression ",
228 "function: (generic_function ",
229 "function: (field_expression value: (identifier) field: (field_identifier)) ",
230 "type_arguments: (type_arguments (type_identifier))) ",
231 "arguments: (arguments (identifier))))))",
232 )
233 );
234
235 buffer.update(&mut cx, |buf, cx| {
236 buf.undo(cx);
237 assert_eq!(buf.text(), "fn a() {}");
238 assert!(buf.is_parsing());
239 });
240 buffer
241 .condition(&cx, |buffer, _| !buffer.is_parsing())
242 .await;
243 assert_eq!(
244 get_tree_sexp(&buffer, &cx),
245 concat!(
246 "(source_file (function_item name: (identifier) ",
247 "parameters: (parameters) ",
248 "body: (block)))"
249 )
250 );
251
252 buffer.update(&mut cx, |buf, cx| {
253 buf.redo(cx);
254 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
255 assert!(buf.is_parsing());
256 });
257 buffer
258 .condition(&cx, |buffer, _| !buffer.is_parsing())
259 .await;
260 assert_eq!(
261 get_tree_sexp(&buffer, &cx),
262 concat!(
263 "(source_file (function_item name: (identifier) ",
264 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
265 "body: (block (call_expression ",
266 "function: (generic_function ",
267 "function: (field_expression value: (identifier) field: (field_identifier)) ",
268 "type_arguments: (type_arguments (type_identifier))) ",
269 "arguments: (arguments (identifier))))))",
270 )
271 );
272
273 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
274 buffer.read_with(cx, |buffer, _| {
275 buffer.syntax_tree().unwrap().root_node().to_sexp()
276 })
277 }
278}
279
280#[gpui::test]
281async fn test_outline(mut cx: gpui::TestAppContext) {
282 let language = Arc::new(
283 rust_lang()
284 .with_outline_query(
285 r#"
286 (struct_item
287 "struct" @context
288 name: (_) @name) @item
289 (enum_item
290 "enum" @context
291 name: (_) @name) @item
292 (enum_variant
293 name: (_) @name) @item
294 (field_declaration
295 name: (_) @name) @item
296 (impl_item
297 "impl" @context
298 trait: (_) @name
299 "for" @context
300 type: (_) @name) @item
301 (function_item
302 "fn" @context
303 name: (_) @name) @item
304 (mod_item
305 "mod" @context
306 name: (_) @name) @item
307 "#,
308 )
309 .unwrap(),
310 );
311
312 let text = r#"
313 struct Person {
314 name: String,
315 age: usize,
316 }
317
318 mod module {
319 enum LoginState {
320 LoggedOut,
321 LoggingOn,
322 LoggedIn {
323 person: Person,
324 time: Instant,
325 }
326 }
327 }
328
329 impl Eq for Person {}
330
331 impl Drop for Person {
332 fn drop(&mut self) {
333 println!("bye");
334 }
335 }
336 "#
337 .unindent();
338
339 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
340 let outline = buffer
341 .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
342 .unwrap();
343
344 assert_eq!(
345 outline
346 .items
347 .iter()
348 .map(|item| (item.text.as_str(), item.depth))
349 .collect::<Vec<_>>(),
350 &[
351 ("struct Person", 0),
352 ("name", 1),
353 ("age", 1),
354 ("mod module", 0),
355 ("enum LoginState", 1),
356 ("LoggedOut", 2),
357 ("LoggingOn", 2),
358 ("LoggedIn", 2),
359 ("person", 3),
360 ("time", 3),
361 ("impl Eq for Person", 0),
362 ("impl Drop for Person", 0),
363 ("fn drop", 1),
364 ]
365 );
366
367 // Without space, we only match on names
368 assert_eq!(
369 search(&outline, "oon", &cx).await,
370 &[
371 ("mod module", vec![]), // included as the parent of a match
372 ("enum LoginState", vec![]), // included as the parent of a match
373 ("LoggingOn", vec![1, 7, 8]), // matches
374 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
375 ]
376 );
377
378 assert_eq!(
379 search(&outline, "dp p", &cx).await,
380 &[
381 ("impl Drop for Person", vec![5, 8, 9, 14]),
382 ("fn drop", vec![]),
383 ]
384 );
385 assert_eq!(
386 search(&outline, "dpn", &cx).await,
387 &[("impl Drop for Person", vec![5, 14, 19])]
388 );
389 assert_eq!(
390 search(&outline, "impl ", &cx).await,
391 &[
392 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
393 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
394 ("fn drop", vec![]),
395 ]
396 );
397
398 async fn search<'a>(
399 outline: &'a Outline<Anchor>,
400 query: &str,
401 cx: &gpui::TestAppContext,
402 ) -> Vec<(&'a str, Vec<usize>)> {
403 let matches = cx
404 .read(|cx| outline.search(query, cx.background().clone()))
405 .await;
406 matches
407 .into_iter()
408 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
409 .collect::<Vec<_>>()
410 }
411}
412
413#[gpui::test]
414fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
415 let buffer = cx.add_model(|cx| {
416 let text = "
417 mod x {
418 mod y {
419
420 }
421 }
422 "
423 .unindent();
424 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
425 });
426 let buffer = buffer.read(cx);
427 assert_eq!(
428 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
429 Some((
430 Point::new(0, 6)..Point::new(0, 7),
431 Point::new(4, 0)..Point::new(4, 1)
432 ))
433 );
434 assert_eq!(
435 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
436 Some((
437 Point::new(1, 10)..Point::new(1, 11),
438 Point::new(3, 4)..Point::new(3, 5)
439 ))
440 );
441 assert_eq!(
442 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
443 Some((
444 Point::new(1, 10)..Point::new(1, 11),
445 Point::new(3, 4)..Point::new(3, 5)
446 ))
447 );
448}
449
450#[gpui::test]
451fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
452 cx.add_model(|cx| {
453 let text = "fn a() {}";
454 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
455
456 buffer.edit_with_autoindent([8..8], "\n\n", cx);
457 assert_eq!(buffer.text(), "fn a() {\n \n}");
458
459 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
460 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
461
462 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
463 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
464
465 buffer
466 });
467}
468
469#[gpui::test]
470fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
471 cx.add_model(|cx| {
472 let text = "
473 fn a() {
474 c;
475 d;
476 }
477 "
478 .unindent();
479
480 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
481
482 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
483 // their indentation is not adjusted.
484 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
485 assert_eq!(
486 buffer.text(),
487 "
488 fn a() {
489 c();
490 d();
491 }
492 "
493 .unindent()
494 );
495
496 // When appending new content after these lines, the indentation is based on the
497 // preceding lines' actual indentation.
498 buffer.edit_with_autoindent(
499 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
500 "\n.f\n.g",
501 cx,
502 );
503 assert_eq!(
504 buffer.text(),
505 "
506 fn a() {
507 c
508 .f
509 .g();
510 d
511 .f
512 .g();
513 }
514 "
515 .unindent()
516 );
517 buffer
518 });
519}
520
521#[gpui::test]
522fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
523 cx.add_model(|cx| {
524 let text = "
525 fn a() {}
526 "
527 .unindent();
528
529 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
530
531 buffer.edit_with_autoindent([5..5], "\nb", cx);
532 assert_eq!(
533 buffer.text(),
534 "
535 fn a(
536 b) {}
537 "
538 .unindent()
539 );
540
541 // The indentation suggestion changed because `@end` node (a close paren)
542 // is now at the beginning of the line.
543 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
544 assert_eq!(
545 buffer.text(),
546 "
547 fn a(
548 ) {}
549 "
550 .unindent()
551 );
552
553 buffer
554 });
555}
556
557#[gpui::test]
558async fn test_diagnostics(mut cx: gpui::TestAppContext) {
559 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await;
560 let mut rust_lang = rust_lang();
561 rust_lang.config.language_server = Some(LanguageServerConfig {
562 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
563 ..Default::default()
564 });
565
566 let text = "
567 fn a() { A }
568 fn b() { BB }
569 fn c() { CCC }
570 "
571 .unindent();
572
573 let buffer = cx.add_model(|cx| {
574 Buffer::new(0, text, cx)
575 .with_language(Arc::new(rust_lang), cx)
576 .with_language_server(language_server, cx)
577 });
578
579 let open_notification = fake
580 .receive_notification::<lsp::notification::DidOpenTextDocument>()
581 .await;
582
583 // Edit the buffer, moving the content down
584 buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
585 let change_notification_1 = fake
586 .receive_notification::<lsp::notification::DidChangeTextDocument>()
587 .await;
588 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
589
590 buffer.update(&mut cx, |buffer, cx| {
591 // Receive diagnostics for an earlier version of the buffer.
592 buffer
593 .update_diagnostics(
594 Some(open_notification.text_document.version),
595 vec![
596 DiagnosticEntry {
597 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
598 diagnostic: Diagnostic {
599 severity: DiagnosticSeverity::ERROR,
600 message: "undefined variable 'A'".to_string(),
601 is_disk_based: true,
602 group_id: 0,
603 is_primary: true,
604 ..Default::default()
605 },
606 },
607 DiagnosticEntry {
608 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
609 diagnostic: Diagnostic {
610 severity: DiagnosticSeverity::ERROR,
611 message: "undefined variable 'BB'".to_string(),
612 is_disk_based: true,
613 group_id: 1,
614 is_primary: true,
615 ..Default::default()
616 },
617 },
618 DiagnosticEntry {
619 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
620 diagnostic: Diagnostic {
621 severity: DiagnosticSeverity::ERROR,
622 is_disk_based: true,
623 message: "undefined variable 'CCC'".to_string(),
624 group_id: 2,
625 is_primary: true,
626 ..Default::default()
627 },
628 },
629 ],
630 cx,
631 )
632 .unwrap();
633
634 // The diagnostics have moved down since they were created.
635 assert_eq!(
636 buffer
637 .snapshot()
638 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
639 .collect::<Vec<_>>(),
640 &[
641 DiagnosticEntry {
642 range: Point::new(3, 9)..Point::new(3, 11),
643 diagnostic: Diagnostic {
644 severity: DiagnosticSeverity::ERROR,
645 message: "undefined variable 'BB'".to_string(),
646 is_disk_based: true,
647 group_id: 1,
648 is_primary: true,
649 ..Default::default()
650 },
651 },
652 DiagnosticEntry {
653 range: Point::new(4, 9)..Point::new(4, 12),
654 diagnostic: Diagnostic {
655 severity: DiagnosticSeverity::ERROR,
656 message: "undefined variable 'CCC'".to_string(),
657 is_disk_based: true,
658 group_id: 2,
659 is_primary: true,
660 ..Default::default()
661 }
662 }
663 ]
664 );
665 assert_eq!(
666 chunks_with_diagnostics(buffer, 0..buffer.len()),
667 [
668 ("\n\nfn a() { ".to_string(), None),
669 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
670 (" }\nfn b() { ".to_string(), None),
671 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
672 (" }\nfn c() { ".to_string(), None),
673 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
674 (" }\n".to_string(), None),
675 ]
676 );
677 assert_eq!(
678 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
679 [
680 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
681 (" }\nfn c() { ".to_string(), None),
682 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
683 ]
684 );
685
686 // Ensure overlapping diagnostics are highlighted correctly.
687 buffer
688 .update_diagnostics(
689 Some(open_notification.text_document.version),
690 vec![
691 DiagnosticEntry {
692 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
693 diagnostic: Diagnostic {
694 severity: DiagnosticSeverity::ERROR,
695 message: "undefined variable 'A'".to_string(),
696 is_disk_based: true,
697 group_id: 0,
698 is_primary: true,
699 ..Default::default()
700 },
701 },
702 DiagnosticEntry {
703 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
704 diagnostic: Diagnostic {
705 severity: DiagnosticSeverity::WARNING,
706 message: "unreachable statement".to_string(),
707 group_id: 1,
708 is_primary: true,
709 ..Default::default()
710 },
711 },
712 ],
713 cx,
714 )
715 .unwrap();
716 assert_eq!(
717 buffer
718 .snapshot()
719 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
720 .collect::<Vec<_>>(),
721 &[
722 DiagnosticEntry {
723 range: Point::new(2, 9)..Point::new(2, 12),
724 diagnostic: Diagnostic {
725 severity: DiagnosticSeverity::WARNING,
726 message: "unreachable statement".to_string(),
727 group_id: 1,
728 is_primary: true,
729 ..Default::default()
730 }
731 },
732 DiagnosticEntry {
733 range: Point::new(2, 9)..Point::new(2, 10),
734 diagnostic: Diagnostic {
735 severity: DiagnosticSeverity::ERROR,
736 message: "undefined variable 'A'".to_string(),
737 is_disk_based: true,
738 group_id: 0,
739 is_primary: true,
740 ..Default::default()
741 },
742 }
743 ]
744 );
745 assert_eq!(
746 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
747 [
748 ("fn a() { ".to_string(), None),
749 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
750 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
751 ("\n".to_string(), None),
752 ]
753 );
754 assert_eq!(
755 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
756 [
757 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
758 ("\n".to_string(), None),
759 ]
760 );
761 });
762
763 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
764 // changes since the last save.
765 buffer.update(&mut cx, |buffer, cx| {
766 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
767 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
768 });
769 let change_notification_2 = fake
770 .receive_notification::<lsp::notification::DidChangeTextDocument>()
771 .await;
772 assert!(
773 change_notification_2.text_document.version > change_notification_1.text_document.version
774 );
775
776 buffer.update(&mut cx, |buffer, cx| {
777 buffer
778 .update_diagnostics(
779 Some(change_notification_2.text_document.version),
780 vec![
781 DiagnosticEntry {
782 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
783 diagnostic: Diagnostic {
784 severity: DiagnosticSeverity::ERROR,
785 message: "undefined variable 'BB'".to_string(),
786 is_disk_based: true,
787 group_id: 1,
788 is_primary: true,
789 ..Default::default()
790 },
791 },
792 DiagnosticEntry {
793 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
794 diagnostic: Diagnostic {
795 severity: DiagnosticSeverity::ERROR,
796 message: "undefined variable 'A'".to_string(),
797 is_disk_based: true,
798 group_id: 0,
799 is_primary: true,
800 ..Default::default()
801 },
802 },
803 ],
804 cx,
805 )
806 .unwrap();
807 assert_eq!(
808 buffer
809 .snapshot()
810 .diagnostics_in_range::<_, Point>(0..buffer.len())
811 .collect::<Vec<_>>(),
812 &[
813 DiagnosticEntry {
814 range: Point::new(2, 21)..Point::new(2, 22),
815 diagnostic: Diagnostic {
816 severity: DiagnosticSeverity::ERROR,
817 message: "undefined variable 'A'".to_string(),
818 is_disk_based: true,
819 group_id: 0,
820 is_primary: true,
821 ..Default::default()
822 }
823 },
824 DiagnosticEntry {
825 range: Point::new(3, 9)..Point::new(3, 11),
826 diagnostic: Diagnostic {
827 severity: DiagnosticSeverity::ERROR,
828 message: "undefined variable 'BB'".to_string(),
829 is_disk_based: true,
830 group_id: 1,
831 is_primary: true,
832 ..Default::default()
833 },
834 }
835 ]
836 );
837 });
838}
839
840#[gpui::test]
841async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
842 cx.add_model(|cx| {
843 let text = concat!(
844 "let one = ;\n", //
845 "let two = \n",
846 "let three = 3;\n",
847 );
848
849 let mut buffer = Buffer::new(0, text, cx);
850 buffer.set_language(Some(Arc::new(rust_lang())), cx);
851 buffer
852 .update_diagnostics(
853 None,
854 vec![
855 DiagnosticEntry {
856 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
857 diagnostic: Diagnostic {
858 severity: DiagnosticSeverity::ERROR,
859 message: "syntax error 1".to_string(),
860 ..Default::default()
861 },
862 },
863 DiagnosticEntry {
864 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
865 diagnostic: Diagnostic {
866 severity: DiagnosticSeverity::ERROR,
867 message: "syntax error 2".to_string(),
868 ..Default::default()
869 },
870 },
871 ],
872 cx,
873 )
874 .unwrap();
875
876 // An empty range is extended forward to include the following character.
877 // At the end of a line, an empty range is extended backward to include
878 // the preceding character.
879 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
880 assert_eq!(
881 chunks
882 .iter()
883 .map(|(s, d)| (s.as_str(), *d))
884 .collect::<Vec<_>>(),
885 &[
886 ("let one = ", None),
887 (";", Some(DiagnosticSeverity::ERROR)),
888 ("\nlet two =", None),
889 (" ", Some(DiagnosticSeverity::ERROR)),
890 ("\nlet three = 3;\n", None)
891 ]
892 );
893 buffer
894 });
895}
896
897#[gpui::test]
898fn test_serialization(cx: &mut gpui::MutableAppContext) {
899 let mut now = Instant::now();
900
901 let buffer1 = cx.add_model(|cx| {
902 let mut buffer = Buffer::new(0, "abc", cx);
903 buffer.edit([3..3], "D", cx);
904
905 now += Duration::from_secs(1);
906 buffer.start_transaction_at(now);
907 buffer.edit([4..4], "E", cx);
908 buffer.end_transaction_at(now, cx);
909 assert_eq!(buffer.text(), "abcDE");
910
911 buffer.undo(cx);
912 assert_eq!(buffer.text(), "abcD");
913
914 buffer.edit([4..4], "F", cx);
915 assert_eq!(buffer.text(), "abcDF");
916 buffer
917 });
918 assert_eq!(buffer1.read(cx).text(), "abcDF");
919
920 let message = buffer1.read(cx).to_proto();
921 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
922 assert_eq!(buffer2.read(cx).text(), "abcDF");
923}
924
925#[gpui::test(iterations = 100)]
926fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
927 let min_peers = env::var("MIN_PEERS")
928 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
929 .unwrap_or(1);
930 let max_peers = env::var("MAX_PEERS")
931 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
932 .unwrap_or(5);
933 let operations = env::var("OPERATIONS")
934 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
935 .unwrap_or(10);
936
937 let base_text_len = rng.gen_range(0..10);
938 let base_text = RandomCharIter::new(&mut rng)
939 .take(base_text_len)
940 .collect::<String>();
941 let mut replica_ids = Vec::new();
942 let mut buffers = Vec::new();
943 let mut network = Network::new(rng.clone());
944
945 for i in 0..rng.gen_range(min_peers..=max_peers) {
946 let buffer = cx.add_model(|cx| {
947 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
948 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
949 buffer
950 });
951 buffers.push(buffer);
952 replica_ids.push(i as ReplicaId);
953 network.add_peer(i as ReplicaId);
954 log::info!("Adding initial peer with replica id {}", i);
955 }
956
957 log::info!("initial text: {:?}", base_text);
958
959 let mut now = Instant::now();
960 let mut mutation_count = operations;
961 let mut active_selections = BTreeMap::default();
962 loop {
963 let replica_index = rng.gen_range(0..replica_ids.len());
964 let replica_id = replica_ids[replica_index];
965 let buffer = &mut buffers[replica_index];
966 let mut new_buffer = None;
967 match rng.gen_range(0..100) {
968 0..=29 if mutation_count != 0 => {
969 buffer.update(cx, |buffer, cx| {
970 buffer.start_transaction_at(now);
971 buffer.randomly_edit(&mut rng, 5, cx);
972 buffer.end_transaction_at(now, cx);
973 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
974 });
975 mutation_count -= 1;
976 }
977 30..=39 if mutation_count != 0 => {
978 buffer.update(cx, |buffer, cx| {
979 let mut selections = Vec::new();
980 for id in 0..rng.gen_range(1..=5) {
981 let range = buffer.random_byte_range(0, &mut rng);
982 selections.push(Selection {
983 id,
984 start: buffer.anchor_before(range.start),
985 end: buffer.anchor_before(range.end),
986 reversed: false,
987 goal: SelectionGoal::None,
988 });
989 }
990 let selections: Arc<[Selection<Anchor>]> = selections.into();
991 log::info!(
992 "peer {} setting active selections: {:?}",
993 replica_id,
994 selections
995 );
996 active_selections.insert(replica_id, selections.clone());
997 buffer.set_active_selections(selections, cx);
998 });
999 mutation_count -= 1;
1000 }
1001 40..=49 if replica_ids.len() < max_peers => {
1002 let old_buffer = buffer.read(cx).to_proto();
1003 let new_replica_id = replica_ids.len() as ReplicaId;
1004 log::info!(
1005 "Adding new replica {} (replicating from {})",
1006 new_replica_id,
1007 replica_id
1008 );
1009 new_buffer = Some(cx.add_model(|cx| {
1010 let mut new_buffer =
1011 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1012 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1013 new_buffer
1014 }));
1015 replica_ids.push(new_replica_id);
1016 network.replicate(replica_id, new_replica_id);
1017 }
1018 50..=69 if mutation_count != 0 => {
1019 buffer.update(cx, |buffer, cx| {
1020 buffer.randomly_undo_redo(&mut rng, cx);
1021 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1022 });
1023 mutation_count -= 1;
1024 }
1025 70..=99 if network.has_unreceived(replica_id) => {
1026 let ops = network
1027 .receive(replica_id)
1028 .into_iter()
1029 .map(|op| proto::deserialize_operation(op).unwrap());
1030 if ops.len() > 0 {
1031 log::info!(
1032 "peer {} applying {} ops from the network.",
1033 replica_id,
1034 ops.len()
1035 );
1036 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1037 }
1038 }
1039 _ => {}
1040 }
1041
1042 buffer.update(cx, |buffer, _| {
1043 let ops = buffer
1044 .operations
1045 .drain(..)
1046 .map(|op| proto::serialize_operation(&op))
1047 .collect();
1048 network.broadcast(buffer.replica_id(), ops);
1049 });
1050 now += Duration::from_millis(rng.gen_range(0..=200));
1051 buffers.extend(new_buffer);
1052
1053 for buffer in &buffers {
1054 buffer.read(cx).check_invariants();
1055 }
1056
1057 if mutation_count == 0 && network.is_idle() {
1058 break;
1059 }
1060 }
1061
1062 let first_buffer = buffers[0].read(cx);
1063 for buffer in &buffers[1..] {
1064 let buffer = buffer.read(cx);
1065 assert_eq!(
1066 buffer.text(),
1067 first_buffer.text(),
1068 "Replica {} text != Replica 0 text",
1069 buffer.replica_id()
1070 );
1071 }
1072
1073 for buffer in &buffers {
1074 let buffer = buffer.read(cx).snapshot();
1075 let expected_remote_selections = active_selections
1076 .iter()
1077 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1078 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1079 .collect::<Vec<_>>();
1080 let actual_remote_selections = buffer
1081 .remote_selections_in_range(Anchor::min()..Anchor::max())
1082 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1083 .collect::<Vec<_>>();
1084 assert_eq!(actual_remote_selections, expected_remote_selections);
1085 }
1086}
1087
1088fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1089 buffer: &Buffer,
1090 range: Range<T>,
1091) -> Vec<(String, Option<DiagnosticSeverity>)> {
1092 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1093 for chunk in buffer.snapshot().chunks(range) {
1094 if chunks
1095 .last()
1096 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1097 {
1098 chunks.last_mut().unwrap().0.push_str(chunk.text);
1099 } else {
1100 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1101 }
1102 }
1103 chunks
1104}
1105
1106#[test]
1107fn test_contiguous_ranges() {
1108 assert_eq!(
1109 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1110 &[1..4, 5..7, 9..13]
1111 );
1112
1113 // Respects the `max_len` parameter
1114 assert_eq!(
1115 contiguous_ranges(
1116 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1117 3
1118 )
1119 .collect::<Vec<_>>(),
1120 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1121 );
1122}
1123
1124impl Buffer {
1125 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1126 &self,
1127 range: Range<T>,
1128 ) -> Option<(Range<Point>, Range<Point>)> {
1129 self.snapshot()
1130 .enclosing_bracket_ranges(range)
1131 .map(|(start, end)| {
1132 let point_start = start.start.to_point(self)..start.end.to_point(self);
1133 let point_end = end.start.to_point(self)..end.end.to_point(self);
1134 (point_start, point_end)
1135 })
1136 }
1137}
1138
1139fn rust_lang() -> Language {
1140 Language::new(
1141 LanguageConfig {
1142 name: "Rust".to_string(),
1143 path_suffixes: vec!["rs".to_string()],
1144 language_server: None,
1145 ..Default::default()
1146 },
1147 Some(tree_sitter_rust::language()),
1148 )
1149 .with_indents_query(
1150 r#"
1151 (call_expression) @indent
1152 (field_expression) @indent
1153 (_ "(" ")" @end) @indent
1154 (_ "{" "}" @end) @indent
1155 "#,
1156 )
1157 .unwrap()
1158 .with_brackets_query(
1159 r#"
1160 ("{" @open "}" @close)
1161 "#,
1162 )
1163 .unwrap()
1164}
1165
1166fn empty(point: Point) -> Range<Point> {
1167 point..point
1168}