1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 // std::env::set_var("RUST_LOG", "info");
21 env_logger::init();
22}
23
24#[test]
25fn test_select_language() {
26 let registry = LanguageRegistry {
27 languages: vec![
28 Arc::new(Language::new(
29 LanguageConfig {
30 name: "Rust".to_string(),
31 path_suffixes: vec!["rs".to_string()],
32 ..Default::default()
33 },
34 Some(tree_sitter_rust::language()),
35 )),
36 Arc::new(Language::new(
37 LanguageConfig {
38 name: "Make".to_string(),
39 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
40 ..Default::default()
41 },
42 Some(tree_sitter_rust::language()),
43 )),
44 ],
45 };
46
47 // matching file extension
48 assert_eq!(
49 registry.select_language("zed/lib.rs").map(|l| l.name()),
50 Some("Rust")
51 );
52 assert_eq!(
53 registry.select_language("zed/lib.mk").map(|l| l.name()),
54 Some("Make")
55 );
56
57 // matching filename
58 assert_eq!(
59 registry.select_language("zed/Makefile").map(|l| l.name()),
60 Some("Make")
61 );
62
63 // matching suffix that is not the full file extension or filename
64 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
65 assert_eq!(
66 registry.select_language("zed/a.cars").map(|l| l.name()),
67 None
68 );
69 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
70}
71
72#[gpui::test]
73fn test_edit_events(cx: &mut gpui::MutableAppContext) {
74 let mut now = Instant::now();
75 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
76 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
77
78 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
79 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
80 let buffer_ops = buffer1.update(cx, |buffer, cx| {
81 let buffer_1_events = buffer_1_events.clone();
82 cx.subscribe(&buffer1, move |_, _, event, _| {
83 buffer_1_events.borrow_mut().push(event.clone())
84 })
85 .detach();
86 let buffer_2_events = buffer_2_events.clone();
87 cx.subscribe(&buffer2, move |_, _, event, _| {
88 buffer_2_events.borrow_mut().push(event.clone())
89 })
90 .detach();
91
92 // An edit emits an edited event, followed by a dirtied event,
93 // since the buffer was previously in a clean state.
94 buffer.edit(Some(2..4), "XYZ", cx);
95
96 // An empty transaction does not emit any events.
97 buffer.start_transaction();
98 buffer.end_transaction(cx);
99
100 // A transaction containing two edits emits one edited event.
101 now += Duration::from_secs(1);
102 buffer.start_transaction_at(now);
103 buffer.edit(Some(5..5), "u", cx);
104 buffer.edit(Some(6..6), "w", cx);
105 buffer.end_transaction_at(now, cx);
106
107 // Undoing a transaction emits one edited event.
108 buffer.undo(cx);
109
110 buffer.operations.clone()
111 });
112
113 // Incorporating a set of remote ops emits a single edited event,
114 // followed by a dirtied event.
115 buffer2.update(cx, |buffer, cx| {
116 buffer.apply_ops(buffer_ops, cx).unwrap();
117 });
118
119 let buffer_1_events = buffer_1_events.borrow();
120 assert_eq!(
121 *buffer_1_events,
122 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
123 );
124
125 let buffer_2_events = buffer_2_events.borrow();
126 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
127}
128
129#[gpui::test]
130async fn test_apply_diff(mut cx: gpui::TestAppContext) {
131 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
132 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
133
134 let text = "a\nccc\ndddd\nffffff\n";
135 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
136 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
137 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
138
139 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
140 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
141 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
142 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
143}
144
145#[gpui::test]
146async fn test_reparse(mut cx: gpui::TestAppContext) {
147 let text = "fn a() {}";
148 let buffer =
149 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
150
151 // Wait for the initial text to parse
152 buffer
153 .condition(&cx, |buffer, _| !buffer.is_parsing())
154 .await;
155 assert_eq!(
156 get_tree_sexp(&buffer, &cx),
157 concat!(
158 "(source_file (function_item name: (identifier) ",
159 "parameters: (parameters) ",
160 "body: (block)))"
161 )
162 );
163
164 buffer.update(&mut cx, |buffer, _| {
165 buffer.set_sync_parse_timeout(Duration::ZERO)
166 });
167
168 // Perform some edits (add parameter and variable reference)
169 // Parsing doesn't begin until the transaction is complete
170 buffer.update(&mut cx, |buf, cx| {
171 buf.start_transaction();
172
173 let offset = buf.text().find(")").unwrap();
174 buf.edit(vec![offset..offset], "b: C", cx);
175 assert!(!buf.is_parsing());
176
177 let offset = buf.text().find("}").unwrap();
178 buf.edit(vec![offset..offset], " d; ", cx);
179 assert!(!buf.is_parsing());
180
181 buf.end_transaction(cx);
182 assert_eq!(buf.text(), "fn a(b: C) { d; }");
183 assert!(buf.is_parsing());
184 });
185 buffer
186 .condition(&cx, |buffer, _| !buffer.is_parsing())
187 .await;
188 assert_eq!(
189 get_tree_sexp(&buffer, &cx),
190 concat!(
191 "(source_file (function_item name: (identifier) ",
192 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
193 "body: (block (expression_statement (identifier)))))"
194 )
195 );
196
197 // Perform a series of edits without waiting for the current parse to complete:
198 // * turn identifier into a field expression
199 // * turn field expression into a method call
200 // * add a turbofish to the method call
201 buffer.update(&mut cx, |buf, cx| {
202 let offset = buf.text().find(";").unwrap();
203 buf.edit(vec![offset..offset], ".e", cx);
204 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
205 assert!(buf.is_parsing());
206 });
207 buffer.update(&mut cx, |buf, cx| {
208 let offset = buf.text().find(";").unwrap();
209 buf.edit(vec![offset..offset], "(f)", cx);
210 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
211 assert!(buf.is_parsing());
212 });
213 buffer.update(&mut cx, |buf, cx| {
214 let offset = buf.text().find("(f)").unwrap();
215 buf.edit(vec![offset..offset], "::<G>", cx);
216 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
217 assert!(buf.is_parsing());
218 });
219 buffer
220 .condition(&cx, |buffer, _| !buffer.is_parsing())
221 .await;
222 assert_eq!(
223 get_tree_sexp(&buffer, &cx),
224 concat!(
225 "(source_file (function_item name: (identifier) ",
226 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
227 "body: (block (expression_statement (call_expression ",
228 "function: (generic_function ",
229 "function: (field_expression value: (identifier) field: (field_identifier)) ",
230 "type_arguments: (type_arguments (type_identifier))) ",
231 "arguments: (arguments (identifier)))))))",
232 )
233 );
234
235 buffer.update(&mut cx, |buf, cx| {
236 buf.undo(cx);
237 assert_eq!(buf.text(), "fn a() {}");
238 assert!(buf.is_parsing());
239 });
240 buffer
241 .condition(&cx, |buffer, _| !buffer.is_parsing())
242 .await;
243 assert_eq!(
244 get_tree_sexp(&buffer, &cx),
245 concat!(
246 "(source_file (function_item name: (identifier) ",
247 "parameters: (parameters) ",
248 "body: (block)))"
249 )
250 );
251
252 buffer.update(&mut cx, |buf, cx| {
253 buf.redo(cx);
254 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
255 assert!(buf.is_parsing());
256 });
257 buffer
258 .condition(&cx, |buffer, _| !buffer.is_parsing())
259 .await;
260 assert_eq!(
261 get_tree_sexp(&buffer, &cx),
262 concat!(
263 "(source_file (function_item name: (identifier) ",
264 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
265 "body: (block (expression_statement (call_expression ",
266 "function: (generic_function ",
267 "function: (field_expression value: (identifier) field: (field_identifier)) ",
268 "type_arguments: (type_arguments (type_identifier))) ",
269 "arguments: (arguments (identifier)))))))",
270 )
271 );
272
273 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
274 buffer.read_with(cx, |buffer, _| {
275 buffer.syntax_tree().unwrap().root_node().to_sexp()
276 })
277 }
278}
279
280#[gpui::test]
281async fn test_outline(mut cx: gpui::TestAppContext) {
282 let language = Arc::new(
283 rust_lang()
284 .with_outline_query(
285 r#"
286 (struct_item
287 "struct" @context
288 name: (_) @name) @item
289 (enum_item
290 "enum" @context
291 name: (_) @name) @item
292 (enum_variant
293 name: (_) @name) @item
294 (field_declaration
295 name: (_) @name) @item
296 (impl_item
297 "impl" @context
298 trait: (_) @name
299 "for" @context
300 type: (_) @name) @item
301 (function_item
302 "fn" @context
303 name: (_) @name) @item
304 (mod_item
305 "mod" @context
306 name: (_) @name) @item
307 "#,
308 )
309 .unwrap(),
310 );
311
312 let text = r#"
313 struct Person {
314 name: String,
315 age: usize,
316 }
317
318 mod module {
319 enum LoginState {
320 LoggedOut,
321 LoggingOn,
322 LoggedIn {
323 person: Person,
324 time: Instant,
325 }
326 }
327 }
328
329 impl Eq for Person {}
330
331 impl Drop for Person {
332 fn drop(&mut self) {
333 println!("bye");
334 }
335 }
336 "#
337 .unindent();
338
339 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
340 let outline = buffer
341 .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
342 .unwrap();
343
344 assert_eq!(
345 outline
346 .items
347 .iter()
348 .map(|item| (item.text.as_str(), item.depth))
349 .collect::<Vec<_>>(),
350 &[
351 ("struct Person", 0),
352 ("name", 1),
353 ("age", 1),
354 ("mod module", 0),
355 ("enum LoginState", 1),
356 ("LoggedOut", 2),
357 ("LoggingOn", 2),
358 ("LoggedIn", 2),
359 ("person", 3),
360 ("time", 3),
361 ("impl Eq for Person", 0),
362 ("impl Drop for Person", 0),
363 ("fn drop", 1),
364 ]
365 );
366
367 // Without space, we only match on names
368 assert_eq!(
369 search(&outline, "oon", &cx).await,
370 &[
371 ("mod module", vec![]), // included as the parent of a match
372 ("enum LoginState", vec![]), // included as the parent of a match
373 ("LoggingOn", vec![1, 7, 8]), // matches
374 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
375 ]
376 );
377
378 assert_eq!(
379 search(&outline, "dp p", &cx).await,
380 &[
381 ("impl Drop for Person", vec![5, 8, 9, 14]),
382 ("fn drop", vec![]),
383 ]
384 );
385 assert_eq!(
386 search(&outline, "dpn", &cx).await,
387 &[("impl Drop for Person", vec![5, 14, 19])]
388 );
389 assert_eq!(
390 search(&outline, "impl ", &cx).await,
391 &[
392 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
393 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
394 ("fn drop", vec![]),
395 ]
396 );
397
398 async fn search<'a>(
399 outline: &'a Outline<Anchor>,
400 query: &str,
401 cx: &gpui::TestAppContext,
402 ) -> Vec<(&'a str, Vec<usize>)> {
403 let matches = cx
404 .read(|cx| outline.search(query, cx.background().clone()))
405 .await;
406 matches
407 .into_iter()
408 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
409 .collect::<Vec<_>>()
410 }
411}
412
413#[gpui::test]
414fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
415 let buffer = cx.add_model(|cx| {
416 let text = "
417 mod x {
418 mod y {
419
420 }
421 }
422 "
423 .unindent();
424 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
425 });
426 let buffer = buffer.read(cx);
427 assert_eq!(
428 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
429 Some((
430 Point::new(0, 6)..Point::new(0, 7),
431 Point::new(4, 0)..Point::new(4, 1)
432 ))
433 );
434 assert_eq!(
435 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
436 Some((
437 Point::new(1, 10)..Point::new(1, 11),
438 Point::new(3, 4)..Point::new(3, 5)
439 ))
440 );
441 assert_eq!(
442 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
443 Some((
444 Point::new(1, 10)..Point::new(1, 11),
445 Point::new(3, 4)..Point::new(3, 5)
446 ))
447 );
448}
449
450#[gpui::test]
451fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
452 cx.add_model(|cx| {
453 let text = "fn a() {}";
454 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
455
456 buffer.edit_with_autoindent([8..8], "\n\n", cx);
457 assert_eq!(buffer.text(), "fn a() {\n \n}");
458
459 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
460 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
461
462 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
463 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
464
465 buffer
466 });
467}
468
469#[gpui::test]
470fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
471 cx.add_model(|cx| {
472 let text = "
473 fn a() {
474 c;
475 d;
476 }
477 "
478 .unindent();
479
480 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
481
482 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
483 // their indentation is not adjusted.
484 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
485 assert_eq!(
486 buffer.text(),
487 "
488 fn a() {
489 c();
490 d();
491 }
492 "
493 .unindent()
494 );
495
496 // When appending new content after these lines, the indentation is based on the
497 // preceding lines' actual indentation.
498 buffer.edit_with_autoindent(
499 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
500 "\n.f\n.g",
501 cx,
502 );
503 assert_eq!(
504 buffer.text(),
505 "
506 fn a() {
507 c
508 .f
509 .g();
510 d
511 .f
512 .g();
513 }
514 "
515 .unindent()
516 );
517 buffer
518 });
519}
520
521#[gpui::test]
522fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
523 cx.add_model(|cx| {
524 let text = "
525 fn a() {}
526 "
527 .unindent();
528
529 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
530
531 buffer.edit_with_autoindent([5..5], "\nb", cx);
532 assert_eq!(
533 buffer.text(),
534 "
535 fn a(
536 b) {}
537 "
538 .unindent()
539 );
540
541 // The indentation suggestion changed because `@end` node (a close paren)
542 // is now at the beginning of the line.
543 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
544 assert_eq!(
545 buffer.text(),
546 "
547 fn a(
548 ) {}
549 "
550 .unindent()
551 );
552
553 buffer
554 });
555}
556
557#[gpui::test]
558async fn test_diagnostics(mut cx: gpui::TestAppContext) {
559 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await;
560 let mut rust_lang = rust_lang();
561 rust_lang.config.language_server = Some(LanguageServerConfig {
562 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
563 ..Default::default()
564 });
565
566 let text = "
567 fn a() { A }
568 fn b() { BB }
569 fn c() { CCC }
570 "
571 .unindent();
572
573 let file = Box::new(FakeFile {
574 path: Path::new("/some/path").into(),
575 }) as Box<dyn File>;
576 let buffer = cx.add_model(|cx| {
577 Buffer::from_file(0, text, file, cx)
578 .with_language(Arc::new(rust_lang), cx)
579 .with_language_server(language_server, cx)
580 });
581
582 let open_notification = fake
583 .receive_notification::<lsp::notification::DidOpenTextDocument>()
584 .await;
585
586 // Edit the buffer, moving the content down
587 buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
588 let change_notification_1 = fake
589 .receive_notification::<lsp::notification::DidChangeTextDocument>()
590 .await;
591 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
592
593 buffer.update(&mut cx, |buffer, cx| {
594 // Receive diagnostics for an earlier version of the buffer.
595 buffer
596 .update_diagnostics(
597 vec![
598 DiagnosticEntry {
599 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
600 diagnostic: Diagnostic {
601 severity: DiagnosticSeverity::ERROR,
602 message: "undefined variable 'A'".to_string(),
603 is_disk_based: true,
604 group_id: 0,
605 is_primary: true,
606 ..Default::default()
607 },
608 },
609 DiagnosticEntry {
610 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
611 diagnostic: Diagnostic {
612 severity: DiagnosticSeverity::ERROR,
613 message: "undefined variable 'BB'".to_string(),
614 is_disk_based: true,
615 group_id: 1,
616 is_primary: true,
617 ..Default::default()
618 },
619 },
620 DiagnosticEntry {
621 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
622 diagnostic: Diagnostic {
623 severity: DiagnosticSeverity::ERROR,
624 is_disk_based: true,
625 message: "undefined variable 'CCC'".to_string(),
626 group_id: 2,
627 is_primary: true,
628 ..Default::default()
629 },
630 },
631 ],
632 Some(open_notification.text_document.version),
633 cx,
634 )
635 .unwrap();
636
637 // The diagnostics have moved down since they were created.
638 assert_eq!(
639 buffer
640 .snapshot()
641 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
642 .collect::<Vec<_>>(),
643 &[
644 DiagnosticEntry {
645 range: Point::new(3, 9)..Point::new(3, 11),
646 diagnostic: Diagnostic {
647 severity: DiagnosticSeverity::ERROR,
648 message: "undefined variable 'BB'".to_string(),
649 is_disk_based: true,
650 group_id: 1,
651 is_primary: true,
652 ..Default::default()
653 },
654 },
655 DiagnosticEntry {
656 range: Point::new(4, 9)..Point::new(4, 12),
657 diagnostic: Diagnostic {
658 severity: DiagnosticSeverity::ERROR,
659 message: "undefined variable 'CCC'".to_string(),
660 is_disk_based: true,
661 group_id: 2,
662 is_primary: true,
663 ..Default::default()
664 }
665 }
666 ]
667 );
668 assert_eq!(
669 chunks_with_diagnostics(buffer, 0..buffer.len()),
670 [
671 ("\n\nfn a() { ".to_string(), None),
672 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
673 (" }\nfn b() { ".to_string(), None),
674 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
675 (" }\nfn c() { ".to_string(), None),
676 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
677 (" }\n".to_string(), None),
678 ]
679 );
680 assert_eq!(
681 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
682 [
683 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
684 (" }\nfn c() { ".to_string(), None),
685 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
686 ]
687 );
688
689 // Ensure overlapping diagnostics are highlighted correctly.
690 buffer
691 .update_diagnostics(
692 vec![
693 DiagnosticEntry {
694 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
695 diagnostic: Diagnostic {
696 severity: DiagnosticSeverity::ERROR,
697 message: "undefined variable 'A'".to_string(),
698 is_disk_based: true,
699 group_id: 0,
700 is_primary: true,
701 ..Default::default()
702 },
703 },
704 DiagnosticEntry {
705 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
706 diagnostic: Diagnostic {
707 severity: DiagnosticSeverity::WARNING,
708 message: "unreachable statement".to_string(),
709 group_id: 1,
710 is_primary: true,
711 ..Default::default()
712 },
713 },
714 ],
715 Some(open_notification.text_document.version),
716 cx,
717 )
718 .unwrap();
719 assert_eq!(
720 buffer
721 .snapshot()
722 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
723 .collect::<Vec<_>>(),
724 &[
725 DiagnosticEntry {
726 range: Point::new(2, 9)..Point::new(2, 12),
727 diagnostic: Diagnostic {
728 severity: DiagnosticSeverity::WARNING,
729 message: "unreachable statement".to_string(),
730 group_id: 1,
731 is_primary: true,
732 ..Default::default()
733 }
734 },
735 DiagnosticEntry {
736 range: Point::new(2, 9)..Point::new(2, 10),
737 diagnostic: Diagnostic {
738 severity: DiagnosticSeverity::ERROR,
739 message: "undefined variable 'A'".to_string(),
740 is_disk_based: true,
741 group_id: 0,
742 is_primary: true,
743 ..Default::default()
744 },
745 }
746 ]
747 );
748 assert_eq!(
749 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
750 [
751 ("fn a() { ".to_string(), None),
752 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
753 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
754 ("\n".to_string(), None),
755 ]
756 );
757 assert_eq!(
758 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
759 [
760 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
761 ("\n".to_string(), None),
762 ]
763 );
764 });
765
766 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
767 // changes since the last save.
768 buffer.update(&mut cx, |buffer, cx| {
769 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
770 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
771 });
772 let change_notification_2 = fake
773 .receive_notification::<lsp::notification::DidChangeTextDocument>()
774 .await;
775 assert!(
776 change_notification_2.text_document.version > change_notification_1.text_document.version
777 );
778
779 buffer.update(&mut cx, |buffer, cx| {
780 buffer
781 .update_diagnostics(
782 vec![
783 DiagnosticEntry {
784 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
785 diagnostic: Diagnostic {
786 severity: DiagnosticSeverity::ERROR,
787 message: "undefined variable 'BB'".to_string(),
788 is_disk_based: true,
789 group_id: 1,
790 is_primary: true,
791 ..Default::default()
792 },
793 },
794 DiagnosticEntry {
795 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
796 diagnostic: Diagnostic {
797 severity: DiagnosticSeverity::ERROR,
798 message: "undefined variable 'A'".to_string(),
799 is_disk_based: true,
800 group_id: 0,
801 is_primary: true,
802 ..Default::default()
803 },
804 },
805 ],
806 Some(change_notification_2.text_document.version),
807 cx,
808 )
809 .unwrap();
810 assert_eq!(
811 buffer
812 .snapshot()
813 .diagnostics_in_range::<_, Point>(0..buffer.len())
814 .collect::<Vec<_>>(),
815 &[
816 DiagnosticEntry {
817 range: Point::new(2, 21)..Point::new(2, 22),
818 diagnostic: Diagnostic {
819 severity: DiagnosticSeverity::ERROR,
820 message: "undefined variable 'A'".to_string(),
821 is_disk_based: true,
822 group_id: 0,
823 is_primary: true,
824 ..Default::default()
825 }
826 },
827 DiagnosticEntry {
828 range: Point::new(3, 9)..Point::new(3, 11),
829 diagnostic: Diagnostic {
830 severity: DiagnosticSeverity::ERROR,
831 message: "undefined variable 'BB'".to_string(),
832 is_disk_based: true,
833 group_id: 1,
834 is_primary: true,
835 ..Default::default()
836 },
837 }
838 ]
839 );
840 });
841}
842
843#[gpui::test]
844async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
845 cx.add_model(|cx| {
846 let text = concat!(
847 "let one = ;\n", //
848 "let two = \n",
849 "let three = 3;\n",
850 );
851
852 let mut buffer = Buffer::new(0, text, cx);
853 buffer.set_language(Some(Arc::new(rust_lang())), cx);
854 buffer
855 .update_diagnostics(
856 vec![
857 DiagnosticEntry {
858 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
859 diagnostic: Diagnostic {
860 severity: DiagnosticSeverity::ERROR,
861 message: "syntax error 1".to_string(),
862 ..Default::default()
863 },
864 },
865 DiagnosticEntry {
866 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
867 diagnostic: Diagnostic {
868 severity: DiagnosticSeverity::ERROR,
869 message: "syntax error 2".to_string(),
870 ..Default::default()
871 },
872 },
873 ],
874 None,
875 cx,
876 )
877 .unwrap();
878
879 // An empty range is extended forward to include the following character.
880 // At the end of a line, an empty range is extended backward to include
881 // the preceding character.
882 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
883 assert_eq!(
884 chunks
885 .iter()
886 .map(|(s, d)| (s.as_str(), *d))
887 .collect::<Vec<_>>(),
888 &[
889 ("let one = ", None),
890 (";", Some(DiagnosticSeverity::ERROR)),
891 ("\nlet two =", None),
892 (" ", Some(DiagnosticSeverity::ERROR)),
893 ("\nlet three = 3;\n", None)
894 ]
895 );
896 buffer
897 });
898}
899
900#[gpui::test]
901fn test_serialization(cx: &mut gpui::MutableAppContext) {
902 let mut now = Instant::now();
903
904 let buffer1 = cx.add_model(|cx| {
905 let mut buffer = Buffer::new(0, "abc", cx);
906 buffer.edit([3..3], "D", cx);
907
908 now += Duration::from_secs(1);
909 buffer.start_transaction_at(now);
910 buffer.edit([4..4], "E", cx);
911 buffer.end_transaction_at(now, cx);
912 assert_eq!(buffer.text(), "abcDE");
913
914 buffer.undo(cx);
915 assert_eq!(buffer.text(), "abcD");
916
917 buffer.edit([4..4], "F", cx);
918 assert_eq!(buffer.text(), "abcDF");
919 buffer
920 });
921 assert_eq!(buffer1.read(cx).text(), "abcDF");
922
923 let message = buffer1.read(cx).to_proto();
924 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
925 assert_eq!(buffer2.read(cx).text(), "abcDF");
926}
927
928#[gpui::test(iterations = 100)]
929fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
930 let min_peers = env::var("MIN_PEERS")
931 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
932 .unwrap_or(1);
933 let max_peers = env::var("MAX_PEERS")
934 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
935 .unwrap_or(5);
936 let operations = env::var("OPERATIONS")
937 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
938 .unwrap_or(10);
939
940 let base_text_len = rng.gen_range(0..10);
941 let base_text = RandomCharIter::new(&mut rng)
942 .take(base_text_len)
943 .collect::<String>();
944 let mut replica_ids = Vec::new();
945 let mut buffers = Vec::new();
946 let mut network = Network::new(rng.clone());
947
948 for i in 0..rng.gen_range(min_peers..=max_peers) {
949 let buffer = cx.add_model(|cx| {
950 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
951 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
952 buffer
953 });
954 buffers.push(buffer);
955 replica_ids.push(i as ReplicaId);
956 network.add_peer(i as ReplicaId);
957 log::info!("Adding initial peer with replica id {}", i);
958 }
959
960 log::info!("initial text: {:?}", base_text);
961
962 let mut now = Instant::now();
963 let mut mutation_count = operations;
964 let mut active_selections = BTreeMap::default();
965 loop {
966 let replica_index = rng.gen_range(0..replica_ids.len());
967 let replica_id = replica_ids[replica_index];
968 let buffer = &mut buffers[replica_index];
969 let mut new_buffer = None;
970 match rng.gen_range(0..100) {
971 0..=29 if mutation_count != 0 => {
972 buffer.update(cx, |buffer, cx| {
973 buffer.start_transaction_at(now);
974 buffer.randomly_edit(&mut rng, 5, cx);
975 buffer.end_transaction_at(now, cx);
976 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
977 });
978 mutation_count -= 1;
979 }
980 30..=39 if mutation_count != 0 => {
981 buffer.update(cx, |buffer, cx| {
982 let mut selections = Vec::new();
983 for id in 0..rng.gen_range(1..=5) {
984 let range = buffer.random_byte_range(0, &mut rng);
985 selections.push(Selection {
986 id,
987 start: buffer.anchor_before(range.start),
988 end: buffer.anchor_before(range.end),
989 reversed: false,
990 goal: SelectionGoal::None,
991 });
992 }
993 let selections: Arc<[Selection<Anchor>]> = selections.into();
994 log::info!(
995 "peer {} setting active selections: {:?}",
996 replica_id,
997 selections
998 );
999 active_selections.insert(replica_id, selections.clone());
1000 buffer.set_active_selections(selections, cx);
1001 });
1002 mutation_count -= 1;
1003 }
1004 40..=49 if replica_ids.len() < max_peers => {
1005 let old_buffer = buffer.read(cx).to_proto();
1006 let new_replica_id = replica_ids.len() as ReplicaId;
1007 log::info!(
1008 "Adding new replica {} (replicating from {})",
1009 new_replica_id,
1010 replica_id
1011 );
1012 new_buffer = Some(cx.add_model(|cx| {
1013 let mut new_buffer =
1014 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1015 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1016 new_buffer
1017 }));
1018 replica_ids.push(new_replica_id);
1019 network.replicate(replica_id, new_replica_id);
1020 }
1021 50..=69 if mutation_count != 0 => {
1022 buffer.update(cx, |buffer, cx| {
1023 buffer.randomly_undo_redo(&mut rng, cx);
1024 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1025 });
1026 mutation_count -= 1;
1027 }
1028 70..=99 if network.has_unreceived(replica_id) => {
1029 let ops = network
1030 .receive(replica_id)
1031 .into_iter()
1032 .map(|op| proto::deserialize_operation(op).unwrap());
1033 if ops.len() > 0 {
1034 log::info!(
1035 "peer {} applying {} ops from the network.",
1036 replica_id,
1037 ops.len()
1038 );
1039 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1040 }
1041 }
1042 _ => {}
1043 }
1044
1045 buffer.update(cx, |buffer, _| {
1046 let ops = buffer
1047 .operations
1048 .drain(..)
1049 .map(|op| proto::serialize_operation(&op))
1050 .collect();
1051 network.broadcast(buffer.replica_id(), ops);
1052 });
1053 now += Duration::from_millis(rng.gen_range(0..=200));
1054 buffers.extend(new_buffer);
1055
1056 for buffer in &buffers {
1057 buffer.read(cx).check_invariants();
1058 }
1059
1060 if mutation_count == 0 && network.is_idle() {
1061 break;
1062 }
1063 }
1064
1065 let first_buffer = buffers[0].read(cx);
1066 for buffer in &buffers[1..] {
1067 let buffer = buffer.read(cx);
1068 assert_eq!(
1069 buffer.text(),
1070 first_buffer.text(),
1071 "Replica {} text != Replica 0 text",
1072 buffer.replica_id()
1073 );
1074 }
1075
1076 for buffer in &buffers {
1077 let buffer = buffer.read(cx).snapshot();
1078 let actual_remote_selections = buffer
1079 .remote_selections_in_range(Anchor::min()..Anchor::max())
1080 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1081 .collect::<Vec<_>>();
1082 let expected_remote_selections = active_selections
1083 .iter()
1084 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1085 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1086 .collect::<Vec<_>>();
1087 assert_eq!(actual_remote_selections, expected_remote_selections);
1088 }
1089}
1090
1091fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1092 buffer: &Buffer,
1093 range: Range<T>,
1094) -> Vec<(String, Option<DiagnosticSeverity>)> {
1095 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1096 for chunk in buffer.snapshot().chunks(range, true) {
1097 if chunks
1098 .last()
1099 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1100 {
1101 chunks.last_mut().unwrap().0.push_str(chunk.text);
1102 } else {
1103 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1104 }
1105 }
1106 chunks
1107}
1108
1109#[test]
1110fn test_contiguous_ranges() {
1111 assert_eq!(
1112 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1113 &[1..4, 5..7, 9..13]
1114 );
1115
1116 // Respects the `max_len` parameter
1117 assert_eq!(
1118 contiguous_ranges(
1119 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1120 3
1121 )
1122 .collect::<Vec<_>>(),
1123 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1124 );
1125}
1126
1127impl Buffer {
1128 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1129 &self,
1130 range: Range<T>,
1131 ) -> Option<(Range<Point>, Range<Point>)> {
1132 self.snapshot()
1133 .enclosing_bracket_ranges(range)
1134 .map(|(start, end)| {
1135 let point_start = start.start.to_point(self)..start.end.to_point(self);
1136 let point_end = end.start.to_point(self)..end.end.to_point(self);
1137 (point_start, point_end)
1138 })
1139 }
1140}
1141
1142fn rust_lang() -> Language {
1143 Language::new(
1144 LanguageConfig {
1145 name: "Rust".to_string(),
1146 path_suffixes: vec!["rs".to_string()],
1147 language_server: None,
1148 ..Default::default()
1149 },
1150 Some(tree_sitter_rust::language()),
1151 )
1152 .with_indents_query(
1153 r#"
1154 (call_expression) @indent
1155 (field_expression) @indent
1156 (_ "(" ")" @end) @indent
1157 (_ "{" "}" @end) @indent
1158 "#,
1159 )
1160 .unwrap()
1161 .with_brackets_query(
1162 r#"
1163 ("{" @open "}" @close)
1164 "#,
1165 )
1166 .unwrap()
1167}
1168
1169fn empty(point: Point) -> Range<Point> {
1170 point..point
1171}