1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 // std::env::set_var("RUST_LOG", "info");
21 env_logger::init();
22}
23
24#[test]
25fn test_select_language() {
26 let registry = LanguageRegistry {
27 languages: vec![
28 Arc::new(Language::new(
29 LanguageConfig {
30 name: "Rust".to_string(),
31 path_suffixes: vec!["rs".to_string()],
32 ..Default::default()
33 },
34 Some(tree_sitter_rust::language()),
35 )),
36 Arc::new(Language::new(
37 LanguageConfig {
38 name: "Make".to_string(),
39 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
40 ..Default::default()
41 },
42 Some(tree_sitter_rust::language()),
43 )),
44 ],
45 };
46
47 // matching file extension
48 assert_eq!(
49 registry.select_language("zed/lib.rs").map(|l| l.name()),
50 Some("Rust")
51 );
52 assert_eq!(
53 registry.select_language("zed/lib.mk").map(|l| l.name()),
54 Some("Make")
55 );
56
57 // matching filename
58 assert_eq!(
59 registry.select_language("zed/Makefile").map(|l| l.name()),
60 Some("Make")
61 );
62
63 // matching suffix that is not the full file extension or filename
64 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
65 assert_eq!(
66 registry.select_language("zed/a.cars").map(|l| l.name()),
67 None
68 );
69 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
70}
71
72#[gpui::test]
73fn test_edit_events(cx: &mut gpui::MutableAppContext) {
74 let mut now = Instant::now();
75 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
76 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
77
78 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
79 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
80 let buffer_ops = buffer1.update(cx, |buffer, cx| {
81 let buffer_1_events = buffer_1_events.clone();
82 cx.subscribe(&buffer1, move |_, _, event, _| {
83 buffer_1_events.borrow_mut().push(event.clone())
84 })
85 .detach();
86 let buffer_2_events = buffer_2_events.clone();
87 cx.subscribe(&buffer2, move |_, _, event, _| {
88 buffer_2_events.borrow_mut().push(event.clone())
89 })
90 .detach();
91
92 // An edit emits an edited event, followed by a dirtied event,
93 // since the buffer was previously in a clean state.
94 buffer.edit(Some(2..4), "XYZ", cx);
95
96 // An empty transaction does not emit any events.
97 buffer.start_transaction();
98 buffer.end_transaction(cx);
99
100 // A transaction containing two edits emits one edited event.
101 now += Duration::from_secs(1);
102 buffer.start_transaction_at(now);
103 buffer.edit(Some(5..5), "u", cx);
104 buffer.edit(Some(6..6), "w", cx);
105 buffer.end_transaction_at(now, cx);
106
107 // Undoing a transaction emits one edited event.
108 buffer.undo(cx);
109
110 buffer.operations.clone()
111 });
112
113 // Incorporating a set of remote ops emits a single edited event,
114 // followed by a dirtied event.
115 buffer2.update(cx, |buffer, cx| {
116 buffer.apply_ops(buffer_ops, cx).unwrap();
117 });
118
119 let buffer_1_events = buffer_1_events.borrow();
120 assert_eq!(
121 *buffer_1_events,
122 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
123 );
124
125 let buffer_2_events = buffer_2_events.borrow();
126 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
127}
128
129#[gpui::test]
130async fn test_apply_diff(mut cx: gpui::TestAppContext) {
131 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
132 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
133
134 let text = "a\nccc\ndddd\nffffff\n";
135 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
136 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
137 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
138
139 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
140 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
141 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
142 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
143}
144
145#[gpui::test]
146async fn test_reparse(mut cx: gpui::TestAppContext) {
147 let text = "fn a() {}";
148 let buffer = cx.add_model(|cx| {
149 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
150 });
151
152 // Wait for the initial text to parse
153 buffer
154 .condition(&cx, |buffer, _| !buffer.is_parsing())
155 .await;
156 assert_eq!(
157 get_tree_sexp(&buffer, &cx),
158 concat!(
159 "(source_file (function_item name: (identifier) ",
160 "parameters: (parameters) ",
161 "body: (block)))"
162 )
163 );
164
165 buffer.update(&mut cx, |buffer, _| {
166 buffer.set_sync_parse_timeout(Duration::ZERO)
167 });
168
169 // Perform some edits (add parameter and variable reference)
170 // Parsing doesn't begin until the transaction is complete
171 buffer.update(&mut cx, |buf, cx| {
172 buf.start_transaction();
173
174 let offset = buf.text().find(")").unwrap();
175 buf.edit(vec![offset..offset], "b: C", cx);
176 assert!(!buf.is_parsing());
177
178 let offset = buf.text().find("}").unwrap();
179 buf.edit(vec![offset..offset], " d; ", cx);
180 assert!(!buf.is_parsing());
181
182 buf.end_transaction(cx);
183 assert_eq!(buf.text(), "fn a(b: C) { d; }");
184 assert!(buf.is_parsing());
185 });
186 buffer
187 .condition(&cx, |buffer, _| !buffer.is_parsing())
188 .await;
189 assert_eq!(
190 get_tree_sexp(&buffer, &cx),
191 concat!(
192 "(source_file (function_item name: (identifier) ",
193 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
194 "body: (block (identifier))))"
195 )
196 );
197
198 // Perform a series of edits without waiting for the current parse to complete:
199 // * turn identifier into a field expression
200 // * turn field expression into a method call
201 // * add a turbofish to the method call
202 buffer.update(&mut cx, |buf, cx| {
203 let offset = buf.text().find(";").unwrap();
204 buf.edit(vec![offset..offset], ".e", cx);
205 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
206 assert!(buf.is_parsing());
207 });
208 buffer.update(&mut cx, |buf, cx| {
209 let offset = buf.text().find(";").unwrap();
210 buf.edit(vec![offset..offset], "(f)", cx);
211 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
212 assert!(buf.is_parsing());
213 });
214 buffer.update(&mut cx, |buf, cx| {
215 let offset = buf.text().find("(f)").unwrap();
216 buf.edit(vec![offset..offset], "::<G>", cx);
217 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
218 assert!(buf.is_parsing());
219 });
220 buffer
221 .condition(&cx, |buffer, _| !buffer.is_parsing())
222 .await;
223 assert_eq!(
224 get_tree_sexp(&buffer, &cx),
225 concat!(
226 "(source_file (function_item name: (identifier) ",
227 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
228 "body: (block (call_expression ",
229 "function: (generic_function ",
230 "function: (field_expression value: (identifier) field: (field_identifier)) ",
231 "type_arguments: (type_arguments (type_identifier))) ",
232 "arguments: (arguments (identifier))))))",
233 )
234 );
235
236 buffer.update(&mut cx, |buf, cx| {
237 buf.undo(cx);
238 assert_eq!(buf.text(), "fn a() {}");
239 assert!(buf.is_parsing());
240 });
241 buffer
242 .condition(&cx, |buffer, _| !buffer.is_parsing())
243 .await;
244 assert_eq!(
245 get_tree_sexp(&buffer, &cx),
246 concat!(
247 "(source_file (function_item name: (identifier) ",
248 "parameters: (parameters) ",
249 "body: (block)))"
250 )
251 );
252
253 buffer.update(&mut cx, |buf, cx| {
254 buf.redo(cx);
255 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
256 assert!(buf.is_parsing());
257 });
258 buffer
259 .condition(&cx, |buffer, _| !buffer.is_parsing())
260 .await;
261 assert_eq!(
262 get_tree_sexp(&buffer, &cx),
263 concat!(
264 "(source_file (function_item name: (identifier) ",
265 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
266 "body: (block (call_expression ",
267 "function: (generic_function ",
268 "function: (field_expression value: (identifier) field: (field_identifier)) ",
269 "type_arguments: (type_arguments (type_identifier))) ",
270 "arguments: (arguments (identifier))))))",
271 )
272 );
273
274 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
275 buffer.read_with(cx, |buffer, _| {
276 buffer.syntax_tree().unwrap().root_node().to_sexp()
277 })
278 }
279}
280
281#[gpui::test]
282async fn test_outline(mut cx: gpui::TestAppContext) {
283 let language = Some(Arc::new(
284 rust_lang()
285 .with_outline_query(
286 r#"
287 (struct_item
288 "struct" @context
289 name: (_) @name) @item
290 (enum_item
291 "enum" @context
292 name: (_) @name) @item
293 (enum_variant
294 name: (_) @name) @item
295 (field_declaration
296 name: (_) @name) @item
297 (impl_item
298 "impl" @context
299 trait: (_) @name
300 "for" @context
301 type: (_) @name) @item
302 (function_item
303 "fn" @context
304 name: (_) @name) @item
305 (mod_item
306 "mod" @context
307 name: (_) @name) @item
308 "#,
309 )
310 .unwrap(),
311 ));
312
313 let text = r#"
314 struct Person {
315 name: String,
316 age: usize,
317 }
318
319 mod module {
320 enum LoginState {
321 LoggedOut,
322 LoggingOn,
323 LoggedIn {
324 person: Person,
325 time: Instant,
326 }
327 }
328 }
329
330 impl Eq for Person {}
331
332 impl Drop for Person {
333 fn drop(&mut self) {
334 println!("bye");
335 }
336 }
337 "#
338 .unindent();
339
340 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
341 let outline = buffer
342 .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
343 .unwrap();
344
345 assert_eq!(
346 outline
347 .items
348 .iter()
349 .map(|item| (item.text.as_str(), item.depth))
350 .collect::<Vec<_>>(),
351 &[
352 ("struct Person", 0),
353 ("name", 1),
354 ("age", 1),
355 ("mod module", 0),
356 ("enum LoginState", 1),
357 ("LoggedOut", 2),
358 ("LoggingOn", 2),
359 ("LoggedIn", 2),
360 ("person", 3),
361 ("time", 3),
362 ("impl Eq for Person", 0),
363 ("impl Drop for Person", 0),
364 ("fn drop", 1),
365 ]
366 );
367
368 // Without space, we only match on names
369 assert_eq!(
370 search(&outline, "oon", &cx).await,
371 &[
372 ("mod module", vec![]), // included as the parent of a match
373 ("enum LoginState", vec![]), // included as the parent of a match
374 ("LoggingOn", vec![1, 7, 8]), // matches
375 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
376 ]
377 );
378
379 assert_eq!(
380 search(&outline, "dp p", &cx).await,
381 &[
382 ("impl Drop for Person", vec![5, 8, 9, 14]),
383 ("fn drop", vec![]),
384 ]
385 );
386 assert_eq!(
387 search(&outline, "dpn", &cx).await,
388 &[("impl Drop for Person", vec![5, 14, 19])]
389 );
390 assert_eq!(
391 search(&outline, "impl ", &cx).await,
392 &[
393 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
394 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
395 ("fn drop", vec![]),
396 ]
397 );
398
399 async fn search<'a>(
400 outline: &'a Outline<Anchor>,
401 query: &str,
402 cx: &gpui::TestAppContext,
403 ) -> Vec<(&'a str, Vec<usize>)> {
404 let matches = cx
405 .read(|cx| outline.search(query, cx.background().clone()))
406 .await;
407 matches
408 .into_iter()
409 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
410 .collect::<Vec<_>>()
411 }
412}
413
414#[gpui::test]
415fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
416 let buffer = cx.add_model(|cx| {
417 let text = "
418 mod x {
419 mod y {
420
421 }
422 }
423 "
424 .unindent();
425 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
426 });
427 let buffer = buffer.read(cx);
428 assert_eq!(
429 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
430 Some((
431 Point::new(0, 6)..Point::new(0, 7),
432 Point::new(4, 0)..Point::new(4, 1)
433 ))
434 );
435 assert_eq!(
436 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
437 Some((
438 Point::new(1, 10)..Point::new(1, 11),
439 Point::new(3, 4)..Point::new(3, 5)
440 ))
441 );
442 assert_eq!(
443 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
444 Some((
445 Point::new(1, 10)..Point::new(1, 11),
446 Point::new(3, 4)..Point::new(3, 5)
447 ))
448 );
449}
450
451#[gpui::test]
452fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
453 cx.add_model(|cx| {
454 let text = "fn a() {}";
455 let mut buffer =
456 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
457
458 buffer.edit_with_autoindent([8..8], "\n\n", cx);
459 assert_eq!(buffer.text(), "fn a() {\n \n}");
460
461 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
462 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
463
464 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
465 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
466
467 buffer
468 });
469}
470
471#[gpui::test]
472fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
473 cx.add_model(|cx| {
474 let text = "
475 fn a() {
476 c;
477 d;
478 }
479 "
480 .unindent();
481
482 let mut buffer =
483 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
484
485 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
486 // their indentation is not adjusted.
487 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
488 assert_eq!(
489 buffer.text(),
490 "
491 fn a() {
492 c();
493 d();
494 }
495 "
496 .unindent()
497 );
498
499 // When appending new content after these lines, the indentation is based on the
500 // preceding lines' actual indentation.
501 buffer.edit_with_autoindent(
502 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
503 "\n.f\n.g",
504 cx,
505 );
506 assert_eq!(
507 buffer.text(),
508 "
509 fn a() {
510 c
511 .f
512 .g();
513 d
514 .f
515 .g();
516 }
517 "
518 .unindent()
519 );
520 buffer
521 });
522}
523
524#[gpui::test]
525fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
526 cx.add_model(|cx| {
527 let text = "
528 fn a() {}
529 "
530 .unindent();
531
532 let mut buffer =
533 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
534
535 buffer.edit_with_autoindent([5..5], "\nb", cx);
536 assert_eq!(
537 buffer.text(),
538 "
539 fn a(
540 b) {}
541 "
542 .unindent()
543 );
544
545 // The indentation suggestion changed because `@end` node (a close paren)
546 // is now at the beginning of the line.
547 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
548 assert_eq!(
549 buffer.text(),
550 "
551 fn a(
552 ) {}
553 "
554 .unindent()
555 );
556
557 buffer
558 });
559}
560
561#[gpui::test]
562async fn test_diagnostics(mut cx: gpui::TestAppContext) {
563 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await;
564 let mut rust_lang = rust_lang();
565 rust_lang.config.language_server = Some(LanguageServerConfig {
566 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
567 ..Default::default()
568 });
569
570 let text = "
571 fn a() { A }
572 fn b() { BB }
573 fn c() { CCC }
574 "
575 .unindent();
576
577 let buffer = cx.add_model(|cx| {
578 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang)), Some(language_server), cx)
579 });
580
581 let open_notification = fake
582 .receive_notification::<lsp::notification::DidOpenTextDocument>()
583 .await;
584
585 // Edit the buffer, moving the content down
586 buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
587 let change_notification_1 = fake
588 .receive_notification::<lsp::notification::DidChangeTextDocument>()
589 .await;
590 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
591
592 buffer.update(&mut cx, |buffer, cx| {
593 // Receive diagnostics for an earlier version of the buffer.
594 buffer
595 .update_diagnostics(
596 Some(open_notification.text_document.version),
597 vec![
598 DiagnosticEntry {
599 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
600 diagnostic: Diagnostic {
601 severity: DiagnosticSeverity::ERROR,
602 message: "undefined variable 'A'".to_string(),
603 is_disk_based: true,
604 group_id: 0,
605 is_primary: true,
606 ..Default::default()
607 },
608 },
609 DiagnosticEntry {
610 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
611 diagnostic: Diagnostic {
612 severity: DiagnosticSeverity::ERROR,
613 message: "undefined variable 'BB'".to_string(),
614 is_disk_based: true,
615 group_id: 1,
616 is_primary: true,
617 ..Default::default()
618 },
619 },
620 DiagnosticEntry {
621 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
622 diagnostic: Diagnostic {
623 severity: DiagnosticSeverity::ERROR,
624 is_disk_based: true,
625 message: "undefined variable 'CCC'".to_string(),
626 group_id: 2,
627 is_primary: true,
628 ..Default::default()
629 },
630 },
631 ],
632 cx,
633 )
634 .unwrap();
635
636 // The diagnostics have moved down since they were created.
637 assert_eq!(
638 buffer
639 .snapshot()
640 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
641 .collect::<Vec<_>>(),
642 &[
643 DiagnosticEntry {
644 range: Point::new(3, 9)..Point::new(3, 11),
645 diagnostic: Diagnostic {
646 severity: DiagnosticSeverity::ERROR,
647 message: "undefined variable 'BB'".to_string(),
648 is_disk_based: true,
649 group_id: 1,
650 is_primary: true,
651 ..Default::default()
652 },
653 },
654 DiagnosticEntry {
655 range: Point::new(4, 9)..Point::new(4, 12),
656 diagnostic: Diagnostic {
657 severity: DiagnosticSeverity::ERROR,
658 message: "undefined variable 'CCC'".to_string(),
659 is_disk_based: true,
660 group_id: 2,
661 is_primary: true,
662 ..Default::default()
663 }
664 }
665 ]
666 );
667 assert_eq!(
668 chunks_with_diagnostics(buffer, 0..buffer.len()),
669 [
670 ("\n\nfn a() { ".to_string(), None),
671 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
672 (" }\nfn b() { ".to_string(), None),
673 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
674 (" }\nfn c() { ".to_string(), None),
675 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
676 (" }\n".to_string(), None),
677 ]
678 );
679 assert_eq!(
680 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
681 [
682 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
683 (" }\nfn c() { ".to_string(), None),
684 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
685 ]
686 );
687
688 // Ensure overlapping diagnostics are highlighted correctly.
689 buffer
690 .update_diagnostics(
691 Some(open_notification.text_document.version),
692 vec![
693 DiagnosticEntry {
694 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
695 diagnostic: Diagnostic {
696 severity: DiagnosticSeverity::ERROR,
697 message: "undefined variable 'A'".to_string(),
698 is_disk_based: true,
699 group_id: 0,
700 is_primary: true,
701 ..Default::default()
702 },
703 },
704 DiagnosticEntry {
705 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
706 diagnostic: Diagnostic {
707 severity: DiagnosticSeverity::WARNING,
708 message: "unreachable statement".to_string(),
709 group_id: 1,
710 is_primary: true,
711 ..Default::default()
712 },
713 },
714 ],
715 cx,
716 )
717 .unwrap();
718 assert_eq!(
719 buffer
720 .snapshot()
721 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
722 .collect::<Vec<_>>(),
723 &[
724 DiagnosticEntry {
725 range: Point::new(2, 9)..Point::new(2, 12),
726 diagnostic: Diagnostic {
727 severity: DiagnosticSeverity::WARNING,
728 message: "unreachable statement".to_string(),
729 group_id: 1,
730 is_primary: true,
731 ..Default::default()
732 }
733 },
734 DiagnosticEntry {
735 range: Point::new(2, 9)..Point::new(2, 10),
736 diagnostic: Diagnostic {
737 severity: DiagnosticSeverity::ERROR,
738 message: "undefined variable 'A'".to_string(),
739 is_disk_based: true,
740 group_id: 0,
741 is_primary: true,
742 ..Default::default()
743 },
744 }
745 ]
746 );
747 assert_eq!(
748 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
749 [
750 ("fn a() { ".to_string(), None),
751 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
752 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
753 ("\n".to_string(), None),
754 ]
755 );
756 assert_eq!(
757 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
758 [
759 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
760 ("\n".to_string(), None),
761 ]
762 );
763 });
764
765 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
766 // changes since the last save.
767 buffer.update(&mut cx, |buffer, cx| {
768 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
769 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
770 });
771 let change_notification_2 = fake
772 .receive_notification::<lsp::notification::DidChangeTextDocument>()
773 .await;
774 assert!(
775 change_notification_2.text_document.version > change_notification_1.text_document.version
776 );
777
778 buffer.update(&mut cx, |buffer, cx| {
779 buffer
780 .update_diagnostics(
781 Some(change_notification_2.text_document.version),
782 vec![
783 DiagnosticEntry {
784 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
785 diagnostic: Diagnostic {
786 severity: DiagnosticSeverity::ERROR,
787 message: "undefined variable 'BB'".to_string(),
788 is_disk_based: true,
789 group_id: 1,
790 is_primary: true,
791 ..Default::default()
792 },
793 },
794 DiagnosticEntry {
795 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
796 diagnostic: Diagnostic {
797 severity: DiagnosticSeverity::ERROR,
798 message: "undefined variable 'A'".to_string(),
799 is_disk_based: true,
800 group_id: 0,
801 is_primary: true,
802 ..Default::default()
803 },
804 },
805 ],
806 cx,
807 )
808 .unwrap();
809 assert_eq!(
810 buffer
811 .snapshot()
812 .diagnostics_in_range::<_, Point>(0..buffer.len())
813 .collect::<Vec<_>>(),
814 &[
815 DiagnosticEntry {
816 range: Point::new(2, 21)..Point::new(2, 22),
817 diagnostic: Diagnostic {
818 severity: DiagnosticSeverity::ERROR,
819 message: "undefined variable 'A'".to_string(),
820 is_disk_based: true,
821 group_id: 0,
822 is_primary: true,
823 ..Default::default()
824 }
825 },
826 DiagnosticEntry {
827 range: Point::new(3, 9)..Point::new(3, 11),
828 diagnostic: Diagnostic {
829 severity: DiagnosticSeverity::ERROR,
830 message: "undefined variable 'BB'".to_string(),
831 is_disk_based: true,
832 group_id: 1,
833 is_primary: true,
834 ..Default::default()
835 },
836 }
837 ]
838 );
839 });
840}
841
842#[gpui::test]
843async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
844 cx.add_model(|cx| {
845 let text = concat!(
846 "let one = ;\n", //
847 "let two = \n",
848 "let three = 3;\n",
849 );
850
851 let mut buffer = Buffer::new(0, text, cx);
852 buffer.set_language(Some(Arc::new(rust_lang())), None, cx);
853 buffer
854 .update_diagnostics(
855 None,
856 vec![
857 DiagnosticEntry {
858 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
859 diagnostic: Diagnostic {
860 severity: DiagnosticSeverity::ERROR,
861 message: "syntax error 1".to_string(),
862 ..Default::default()
863 },
864 },
865 DiagnosticEntry {
866 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
867 diagnostic: Diagnostic {
868 severity: DiagnosticSeverity::ERROR,
869 message: "syntax error 2".to_string(),
870 ..Default::default()
871 },
872 },
873 ],
874 cx,
875 )
876 .unwrap();
877
878 // An empty range is extended forward to include the following character.
879 // At the end of a line, an empty range is extended backward to include
880 // the preceding character.
881 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
882 assert_eq!(
883 chunks
884 .iter()
885 .map(|(s, d)| (s.as_str(), *d))
886 .collect::<Vec<_>>(),
887 &[
888 ("let one = ", None),
889 (";", Some(DiagnosticSeverity::ERROR)),
890 ("\nlet two =", None),
891 (" ", Some(DiagnosticSeverity::ERROR)),
892 ("\nlet three = 3;\n", None)
893 ]
894 );
895 buffer
896 });
897}
898
899#[gpui::test]
900fn test_serialization(cx: &mut gpui::MutableAppContext) {
901 let mut now = Instant::now();
902
903 let buffer1 = cx.add_model(|cx| {
904 let mut buffer = Buffer::new(0, "abc", cx);
905 buffer.edit([3..3], "D", cx);
906
907 now += Duration::from_secs(1);
908 buffer.start_transaction_at(now);
909 buffer.edit([4..4], "E", cx);
910 buffer.end_transaction_at(now, cx);
911 assert_eq!(buffer.text(), "abcDE");
912
913 buffer.undo(cx);
914 assert_eq!(buffer.text(), "abcD");
915
916 buffer.edit([4..4], "F", cx);
917 assert_eq!(buffer.text(), "abcDF");
918 buffer
919 });
920 assert_eq!(buffer1.read(cx).text(), "abcDF");
921
922 let message = buffer1.read(cx).to_proto();
923 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
924 assert_eq!(buffer2.read(cx).text(), "abcDF");
925}
926
927#[gpui::test(iterations = 100)]
928fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
929 let min_peers = env::var("MIN_PEERS")
930 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
931 .unwrap_or(1);
932 let max_peers = env::var("MAX_PEERS")
933 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
934 .unwrap_or(5);
935 let operations = env::var("OPERATIONS")
936 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
937 .unwrap_or(10);
938
939 let base_text_len = rng.gen_range(0..10);
940 let base_text = RandomCharIter::new(&mut rng)
941 .take(base_text_len)
942 .collect::<String>();
943 let mut replica_ids = Vec::new();
944 let mut buffers = Vec::new();
945 let mut network = Network::new(rng.clone());
946
947 for i in 0..rng.gen_range(min_peers..=max_peers) {
948 let buffer = cx.add_model(|cx| {
949 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
950 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
951 buffer
952 });
953 buffers.push(buffer);
954 replica_ids.push(i as ReplicaId);
955 network.add_peer(i as ReplicaId);
956 log::info!("Adding initial peer with replica id {}", i);
957 }
958
959 log::info!("initial text: {:?}", base_text);
960
961 let mut now = Instant::now();
962 let mut mutation_count = operations;
963 let mut active_selections = BTreeMap::default();
964 loop {
965 let replica_index = rng.gen_range(0..replica_ids.len());
966 let replica_id = replica_ids[replica_index];
967 let buffer = &mut buffers[replica_index];
968 let mut new_buffer = None;
969 match rng.gen_range(0..100) {
970 0..=29 if mutation_count != 0 => {
971 buffer.update(cx, |buffer, cx| {
972 buffer.start_transaction_at(now);
973 buffer.randomly_edit(&mut rng, 5, cx);
974 buffer.end_transaction_at(now, cx);
975 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
976 });
977 mutation_count -= 1;
978 }
979 30..=39 if mutation_count != 0 => {
980 buffer.update(cx, |buffer, cx| {
981 let mut selections = Vec::new();
982 for id in 0..rng.gen_range(1..=5) {
983 let range = buffer.random_byte_range(0, &mut rng);
984 selections.push(Selection {
985 id,
986 start: buffer.anchor_before(range.start),
987 end: buffer.anchor_before(range.end),
988 reversed: false,
989 goal: SelectionGoal::None,
990 });
991 }
992 let selections: Arc<[Selection<Anchor>]> = selections.into();
993 log::info!(
994 "peer {} setting active selections: {:?}",
995 replica_id,
996 selections
997 );
998 active_selections.insert(replica_id, selections.clone());
999 buffer.set_active_selections(selections, cx);
1000 });
1001 mutation_count -= 1;
1002 }
1003 40..=49 if replica_ids.len() < max_peers => {
1004 let old_buffer = buffer.read(cx).to_proto();
1005 let new_replica_id = replica_ids.len() as ReplicaId;
1006 log::info!(
1007 "Adding new replica {} (replicating from {})",
1008 new_replica_id,
1009 replica_id
1010 );
1011 new_buffer = Some(cx.add_model(|cx| {
1012 let mut new_buffer =
1013 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1014 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1015 new_buffer
1016 }));
1017 replica_ids.push(new_replica_id);
1018 network.replicate(replica_id, new_replica_id);
1019 }
1020 50..=69 if mutation_count != 0 => {
1021 buffer.update(cx, |buffer, cx| {
1022 buffer.randomly_undo_redo(&mut rng, cx);
1023 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1024 });
1025 mutation_count -= 1;
1026 }
1027 70..=99 if network.has_unreceived(replica_id) => {
1028 let ops = network
1029 .receive(replica_id)
1030 .into_iter()
1031 .map(|op| proto::deserialize_operation(op).unwrap());
1032 if ops.len() > 0 {
1033 log::info!(
1034 "peer {} applying {} ops from the network.",
1035 replica_id,
1036 ops.len()
1037 );
1038 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1039 }
1040 }
1041 _ => {}
1042 }
1043
1044 buffer.update(cx, |buffer, _| {
1045 let ops = buffer
1046 .operations
1047 .drain(..)
1048 .map(|op| proto::serialize_operation(&op))
1049 .collect();
1050 network.broadcast(buffer.replica_id(), ops);
1051 });
1052 now += Duration::from_millis(rng.gen_range(0..=200));
1053 buffers.extend(new_buffer);
1054
1055 for buffer in &buffers {
1056 buffer.read(cx).check_invariants();
1057 }
1058
1059 if mutation_count == 0 && network.is_idle() {
1060 break;
1061 }
1062 }
1063
1064 let first_buffer = buffers[0].read(cx);
1065 for buffer in &buffers[1..] {
1066 let buffer = buffer.read(cx);
1067 assert_eq!(
1068 buffer.text(),
1069 first_buffer.text(),
1070 "Replica {} text != Replica 0 text",
1071 buffer.replica_id()
1072 );
1073 }
1074
1075 for buffer in &buffers {
1076 let buffer = buffer.read(cx).snapshot();
1077 let expected_remote_selections = active_selections
1078 .iter()
1079 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1080 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1081 .collect::<Vec<_>>();
1082 let actual_remote_selections = buffer
1083 .remote_selections_in_range(Anchor::min()..Anchor::max())
1084 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1085 .collect::<Vec<_>>();
1086 assert_eq!(actual_remote_selections, expected_remote_selections);
1087 }
1088}
1089
1090fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1091 buffer: &Buffer,
1092 range: Range<T>,
1093) -> Vec<(String, Option<DiagnosticSeverity>)> {
1094 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1095 for chunk in buffer.snapshot().chunks(range, Some(&Default::default())) {
1096 if chunks
1097 .last()
1098 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1099 {
1100 chunks.last_mut().unwrap().0.push_str(chunk.text);
1101 } else {
1102 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1103 }
1104 }
1105 chunks
1106}
1107
1108#[test]
1109fn test_contiguous_ranges() {
1110 assert_eq!(
1111 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1112 &[1..4, 5..7, 9..13]
1113 );
1114
1115 // Respects the `max_len` parameter
1116 assert_eq!(
1117 contiguous_ranges(
1118 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1119 3
1120 )
1121 .collect::<Vec<_>>(),
1122 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1123 );
1124}
1125
1126impl Buffer {
1127 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1128 &self,
1129 range: Range<T>,
1130 ) -> Option<(Range<Point>, Range<Point>)> {
1131 self.snapshot()
1132 .enclosing_bracket_ranges(range)
1133 .map(|(start, end)| {
1134 let point_start = start.start.to_point(self)..start.end.to_point(self);
1135 let point_end = end.start.to_point(self)..end.end.to_point(self);
1136 (point_start, point_end)
1137 })
1138 }
1139}
1140
1141fn rust_lang() -> Language {
1142 Language::new(
1143 LanguageConfig {
1144 name: "Rust".to_string(),
1145 path_suffixes: vec!["rs".to_string()],
1146 language_server: None,
1147 ..Default::default()
1148 },
1149 Some(tree_sitter_rust::language()),
1150 )
1151 .with_indents_query(
1152 r#"
1153 (call_expression) @indent
1154 (field_expression) @indent
1155 (_ "(" ")" @end) @indent
1156 (_ "{" "}" @end) @indent
1157 "#,
1158 )
1159 .unwrap()
1160 .with_brackets_query(
1161 r#"
1162 ("{" @open "}" @close)
1163 "#,
1164 )
1165 .unwrap()
1166}
1167
1168fn empty(point: Point) -> Range<Point> {
1169 point..point
1170}