1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 // std::env::set_var("RUST_LOG", "info");
21 env_logger::init();
22}
23
24#[test]
25fn test_select_language() {
26 let registry = LanguageRegistry {
27 languages: vec![
28 Arc::new(Language::new(
29 LanguageConfig {
30 name: "Rust".to_string(),
31 path_suffixes: vec!["rs".to_string()],
32 ..Default::default()
33 },
34 Some(tree_sitter_rust::language()),
35 )),
36 Arc::new(Language::new(
37 LanguageConfig {
38 name: "Make".to_string(),
39 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
40 ..Default::default()
41 },
42 Some(tree_sitter_rust::language()),
43 )),
44 ],
45 };
46
47 // matching file extension
48 assert_eq!(
49 registry.select_language("zed/lib.rs").map(|l| l.name()),
50 Some("Rust")
51 );
52 assert_eq!(
53 registry.select_language("zed/lib.mk").map(|l| l.name()),
54 Some("Make")
55 );
56
57 // matching filename
58 assert_eq!(
59 registry.select_language("zed/Makefile").map(|l| l.name()),
60 Some("Make")
61 );
62
63 // matching suffix that is not the full file extension or filename
64 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
65 assert_eq!(
66 registry.select_language("zed/a.cars").map(|l| l.name()),
67 None
68 );
69 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
70}
71
72#[gpui::test]
73fn test_edit_events(cx: &mut gpui::MutableAppContext) {
74 let mut now = Instant::now();
75 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
76 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
77
78 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
79 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
80 let buffer_ops = buffer1.update(cx, |buffer, cx| {
81 let buffer_1_events = buffer_1_events.clone();
82 cx.subscribe(&buffer1, move |_, _, event, _| {
83 buffer_1_events.borrow_mut().push(event.clone())
84 })
85 .detach();
86 let buffer_2_events = buffer_2_events.clone();
87 cx.subscribe(&buffer2, move |_, _, event, _| {
88 buffer_2_events.borrow_mut().push(event.clone())
89 })
90 .detach();
91
92 // An edit emits an edited event, followed by a dirtied event,
93 // since the buffer was previously in a clean state.
94 buffer.edit(Some(2..4), "XYZ", cx);
95
96 // An empty transaction does not emit any events.
97 buffer.start_transaction();
98 buffer.end_transaction(cx);
99
100 // A transaction containing two edits emits one edited event.
101 now += Duration::from_secs(1);
102 buffer.start_transaction_at(now);
103 buffer.edit(Some(5..5), "u", cx);
104 buffer.edit(Some(6..6), "w", cx);
105 buffer.end_transaction_at(now, cx);
106
107 // Undoing a transaction emits one edited event.
108 buffer.undo(cx);
109
110 buffer.operations.clone()
111 });
112
113 // Incorporating a set of remote ops emits a single edited event,
114 // followed by a dirtied event.
115 buffer2.update(cx, |buffer, cx| {
116 buffer.apply_ops(buffer_ops, cx).unwrap();
117 });
118
119 let buffer_1_events = buffer_1_events.borrow();
120 assert_eq!(
121 *buffer_1_events,
122 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
123 );
124
125 let buffer_2_events = buffer_2_events.borrow();
126 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
127}
128
129#[gpui::test]
130async fn test_apply_diff(mut cx: gpui::TestAppContext) {
131 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
132 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
133
134 let text = "a\nccc\ndddd\nffffff\n";
135 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
136 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
137 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
138
139 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
140 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
141 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
142 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
143}
144
145#[gpui::test]
146async fn test_reparse(mut cx: gpui::TestAppContext) {
147 let text = "fn a() {}";
148 let buffer = cx.add_model(|cx| {
149 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
150 });
151
152 // Wait for the initial text to parse
153 buffer
154 .condition(&cx, |buffer, _| !buffer.is_parsing())
155 .await;
156 assert_eq!(
157 get_tree_sexp(&buffer, &cx),
158 concat!(
159 "(source_file (function_item name: (identifier) ",
160 "parameters: (parameters) ",
161 "body: (block)))"
162 )
163 );
164
165 buffer.update(&mut cx, |buffer, _| {
166 buffer.set_sync_parse_timeout(Duration::ZERO)
167 });
168
169 // Perform some edits (add parameter and variable reference)
170 // Parsing doesn't begin until the transaction is complete
171 buffer.update(&mut cx, |buf, cx| {
172 buf.start_transaction();
173
174 let offset = buf.text().find(")").unwrap();
175 buf.edit(vec![offset..offset], "b: C", cx);
176 assert!(!buf.is_parsing());
177
178 let offset = buf.text().find("}").unwrap();
179 buf.edit(vec![offset..offset], " d; ", cx);
180 assert!(!buf.is_parsing());
181
182 buf.end_transaction(cx);
183 assert_eq!(buf.text(), "fn a(b: C) { d; }");
184 assert!(buf.is_parsing());
185 });
186 buffer
187 .condition(&cx, |buffer, _| !buffer.is_parsing())
188 .await;
189 assert_eq!(
190 get_tree_sexp(&buffer, &cx),
191 concat!(
192 "(source_file (function_item name: (identifier) ",
193 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
194 "body: (block (identifier))))"
195 )
196 );
197
198 // Perform a series of edits without waiting for the current parse to complete:
199 // * turn identifier into a field expression
200 // * turn field expression into a method call
201 // * add a turbofish to the method call
202 buffer.update(&mut cx, |buf, cx| {
203 let offset = buf.text().find(";").unwrap();
204 buf.edit(vec![offset..offset], ".e", cx);
205 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
206 assert!(buf.is_parsing());
207 });
208 buffer.update(&mut cx, |buf, cx| {
209 let offset = buf.text().find(";").unwrap();
210 buf.edit(vec![offset..offset], "(f)", cx);
211 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
212 assert!(buf.is_parsing());
213 });
214 buffer.update(&mut cx, |buf, cx| {
215 let offset = buf.text().find("(f)").unwrap();
216 buf.edit(vec![offset..offset], "::<G>", cx);
217 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
218 assert!(buf.is_parsing());
219 });
220 buffer
221 .condition(&cx, |buffer, _| !buffer.is_parsing())
222 .await;
223 assert_eq!(
224 get_tree_sexp(&buffer, &cx),
225 concat!(
226 "(source_file (function_item name: (identifier) ",
227 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
228 "body: (block (call_expression ",
229 "function: (generic_function ",
230 "function: (field_expression value: (identifier) field: (field_identifier)) ",
231 "type_arguments: (type_arguments (type_identifier))) ",
232 "arguments: (arguments (identifier))))))",
233 )
234 );
235
236 buffer.update(&mut cx, |buf, cx| {
237 buf.undo(cx);
238 assert_eq!(buf.text(), "fn a() {}");
239 assert!(buf.is_parsing());
240 });
241 buffer
242 .condition(&cx, |buffer, _| !buffer.is_parsing())
243 .await;
244 assert_eq!(
245 get_tree_sexp(&buffer, &cx),
246 concat!(
247 "(source_file (function_item name: (identifier) ",
248 "parameters: (parameters) ",
249 "body: (block)))"
250 )
251 );
252
253 buffer.update(&mut cx, |buf, cx| {
254 buf.redo(cx);
255 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
256 assert!(buf.is_parsing());
257 });
258 buffer
259 .condition(&cx, |buffer, _| !buffer.is_parsing())
260 .await;
261 assert_eq!(
262 get_tree_sexp(&buffer, &cx),
263 concat!(
264 "(source_file (function_item name: (identifier) ",
265 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
266 "body: (block (call_expression ",
267 "function: (generic_function ",
268 "function: (field_expression value: (identifier) field: (field_identifier)) ",
269 "type_arguments: (type_arguments (type_identifier))) ",
270 "arguments: (arguments (identifier))))))",
271 )
272 );
273
274 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
275 buffer.read_with(cx, |buffer, _| {
276 buffer.syntax_tree().unwrap().root_node().to_sexp()
277 })
278 }
279}
280
281#[gpui::test]
282async fn test_outline(mut cx: gpui::TestAppContext) {
283 let language = Some(Arc::new(
284 rust_lang()
285 .with_outline_query(
286 r#"
287 (struct_item
288 "struct" @context
289 name: (_) @name) @item
290 (enum_item
291 "enum" @context
292 name: (_) @name) @item
293 (enum_variant
294 name: (_) @name) @item
295 (field_declaration
296 name: (_) @name) @item
297 (impl_item
298 "impl" @context
299 trait: (_) @name
300 "for" @context
301 type: (_) @name) @item
302 (function_item
303 "fn" @context
304 name: (_) @name) @item
305 (mod_item
306 "mod" @context
307 name: (_) @name) @item
308 "#,
309 )
310 .unwrap(),
311 ));
312
313 let text = r#"
314 struct Person {
315 name: String,
316 age: usize,
317 }
318
319 mod module {
320 enum LoginState {
321 LoggedOut,
322 LoggingOn,
323 LoggedIn {
324 person: Person,
325 time: Instant,
326 }
327 }
328 }
329
330 impl Eq for Person {}
331
332 impl Drop for Person {
333 fn drop(&mut self) {
334 println!("bye");
335 }
336 }
337 "#
338 .unindent();
339
340 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx));
341 let outline = buffer
342 .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
343 .unwrap();
344
345 assert_eq!(
346 outline
347 .items
348 .iter()
349 .map(|item| (item.text.as_str(), item.depth))
350 .collect::<Vec<_>>(),
351 &[
352 ("struct Person", 0),
353 ("name", 1),
354 ("age", 1),
355 ("mod module", 0),
356 ("enum LoginState", 1),
357 ("LoggedOut", 2),
358 ("LoggingOn", 2),
359 ("LoggedIn", 2),
360 ("person", 3),
361 ("time", 3),
362 ("impl Eq for Person", 0),
363 ("impl Drop for Person", 0),
364 ("fn drop", 1),
365 ]
366 );
367
368 assert_eq!(
369 search(&outline, "oon", &cx).await,
370 &[
371 ("mod module", vec![]), // included as the parent of a match
372 ("enum LoginState", vec![]), // included as the parent of a match
373 ("LoggingOn", vec![1, 7, 8]), // matches
374 ("impl Eq for Person", vec![9, 16, 17]), // matches part of the context
375 ("impl Drop for Person", vec![11, 18, 19]), // matches in two disjoint names
376 ]
377 );
378 assert_eq!(
379 search(&outline, "dp p", &cx).await,
380 &[("impl Drop for Person", vec![5, 8, 9, 14])]
381 );
382 assert_eq!(
383 search(&outline, "dpn", &cx).await,
384 &[("impl Drop for Person", vec![5, 8, 19])]
385 );
386 assert_eq!(
387 search(&outline, "impl", &cx).await,
388 &[
389 ("impl Eq for Person", vec![0, 1, 2, 3]),
390 ("impl Drop for Person", vec![0, 1, 2, 3])
391 ]
392 );
393
394 async fn search<'a>(
395 outline: &'a Outline<Anchor>,
396 query: &str,
397 cx: &gpui::TestAppContext,
398 ) -> Vec<(&'a str, Vec<usize>)> {
399 let matches = cx
400 .read(|cx| outline.search(query, cx.background().clone()))
401 .await;
402 matches
403 .into_iter()
404 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
405 .collect::<Vec<_>>()
406 }
407}
408
409#[gpui::test]
410fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
411 let buffer = cx.add_model(|cx| {
412 let text = "
413 mod x {
414 mod y {
415
416 }
417 }
418 "
419 .unindent();
420 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx)
421 });
422 let buffer = buffer.read(cx);
423 assert_eq!(
424 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
425 Some((
426 Point::new(0, 6)..Point::new(0, 7),
427 Point::new(4, 0)..Point::new(4, 1)
428 ))
429 );
430 assert_eq!(
431 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
432 Some((
433 Point::new(1, 10)..Point::new(1, 11),
434 Point::new(3, 4)..Point::new(3, 5)
435 ))
436 );
437 assert_eq!(
438 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
439 Some((
440 Point::new(1, 10)..Point::new(1, 11),
441 Point::new(3, 4)..Point::new(3, 5)
442 ))
443 );
444}
445
446#[gpui::test]
447fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
448 cx.add_model(|cx| {
449 let text = "fn a() {}";
450 let mut buffer =
451 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
452
453 buffer.edit_with_autoindent([8..8], "\n\n", cx);
454 assert_eq!(buffer.text(), "fn a() {\n \n}");
455
456 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
457 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
458
459 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
460 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
461
462 buffer
463 });
464}
465
466#[gpui::test]
467fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
468 cx.add_model(|cx| {
469 let text = "
470 fn a() {
471 c;
472 d;
473 }
474 "
475 .unindent();
476
477 let mut buffer =
478 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
479
480 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
481 // their indentation is not adjusted.
482 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
483 assert_eq!(
484 buffer.text(),
485 "
486 fn a() {
487 c();
488 d();
489 }
490 "
491 .unindent()
492 );
493
494 // When appending new content after these lines, the indentation is based on the
495 // preceding lines' actual indentation.
496 buffer.edit_with_autoindent(
497 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
498 "\n.f\n.g",
499 cx,
500 );
501 assert_eq!(
502 buffer.text(),
503 "
504 fn a() {
505 c
506 .f
507 .g();
508 d
509 .f
510 .g();
511 }
512 "
513 .unindent()
514 );
515 buffer
516 });
517}
518
519#[gpui::test]
520fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
521 cx.add_model(|cx| {
522 let text = "
523 fn a() {}
524 "
525 .unindent();
526
527 let mut buffer =
528 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx);
529
530 buffer.edit_with_autoindent([5..5], "\nb", cx);
531 assert_eq!(
532 buffer.text(),
533 "
534 fn a(
535 b) {}
536 "
537 .unindent()
538 );
539
540 // The indentation suggestion changed because `@end` node (a close paren)
541 // is now at the beginning of the line.
542 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
543 assert_eq!(
544 buffer.text(),
545 "
546 fn a(
547 ) {}
548 "
549 .unindent()
550 );
551
552 buffer
553 });
554}
555
556#[gpui::test]
557async fn test_diagnostics(mut cx: gpui::TestAppContext) {
558 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await;
559 let mut rust_lang = rust_lang();
560 rust_lang.config.language_server = Some(LanguageServerConfig {
561 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
562 ..Default::default()
563 });
564
565 let text = "
566 fn a() { A }
567 fn b() { BB }
568 fn c() { CCC }
569 "
570 .unindent();
571
572 let buffer = cx.add_model(|cx| {
573 Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang)), Some(language_server), cx)
574 });
575
576 let open_notification = fake
577 .receive_notification::<lsp::notification::DidOpenTextDocument>()
578 .await;
579
580 // Edit the buffer, moving the content down
581 buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
582 let change_notification_1 = fake
583 .receive_notification::<lsp::notification::DidChangeTextDocument>()
584 .await;
585 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
586
587 buffer.update(&mut cx, |buffer, cx| {
588 // Receive diagnostics for an earlier version of the buffer.
589 buffer
590 .update_diagnostics(
591 Some(open_notification.text_document.version),
592 vec![
593 DiagnosticEntry {
594 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
595 diagnostic: Diagnostic {
596 severity: DiagnosticSeverity::ERROR,
597 message: "undefined variable 'A'".to_string(),
598 is_disk_based: true,
599 group_id: 0,
600 is_primary: true,
601 ..Default::default()
602 },
603 },
604 DiagnosticEntry {
605 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
606 diagnostic: Diagnostic {
607 severity: DiagnosticSeverity::ERROR,
608 message: "undefined variable 'BB'".to_string(),
609 is_disk_based: true,
610 group_id: 1,
611 is_primary: true,
612 ..Default::default()
613 },
614 },
615 DiagnosticEntry {
616 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
617 diagnostic: Diagnostic {
618 severity: DiagnosticSeverity::ERROR,
619 is_disk_based: true,
620 message: "undefined variable 'CCC'".to_string(),
621 group_id: 2,
622 is_primary: true,
623 ..Default::default()
624 },
625 },
626 ],
627 cx,
628 )
629 .unwrap();
630
631 // The diagnostics have moved down since they were created.
632 assert_eq!(
633 buffer
634 .snapshot()
635 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
636 .collect::<Vec<_>>(),
637 &[
638 DiagnosticEntry {
639 range: Point::new(3, 9)..Point::new(3, 11),
640 diagnostic: Diagnostic {
641 severity: DiagnosticSeverity::ERROR,
642 message: "undefined variable 'BB'".to_string(),
643 is_disk_based: true,
644 group_id: 1,
645 is_primary: true,
646 ..Default::default()
647 },
648 },
649 DiagnosticEntry {
650 range: Point::new(4, 9)..Point::new(4, 12),
651 diagnostic: Diagnostic {
652 severity: DiagnosticSeverity::ERROR,
653 message: "undefined variable 'CCC'".to_string(),
654 is_disk_based: true,
655 group_id: 2,
656 is_primary: true,
657 ..Default::default()
658 }
659 }
660 ]
661 );
662 assert_eq!(
663 chunks_with_diagnostics(buffer, 0..buffer.len()),
664 [
665 ("\n\nfn a() { ".to_string(), None),
666 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
667 (" }\nfn b() { ".to_string(), None),
668 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
669 (" }\nfn c() { ".to_string(), None),
670 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
671 (" }\n".to_string(), None),
672 ]
673 );
674 assert_eq!(
675 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
676 [
677 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
678 (" }\nfn c() { ".to_string(), None),
679 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
680 ]
681 );
682
683 // Ensure overlapping diagnostics are highlighted correctly.
684 buffer
685 .update_diagnostics(
686 Some(open_notification.text_document.version),
687 vec![
688 DiagnosticEntry {
689 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
690 diagnostic: Diagnostic {
691 severity: DiagnosticSeverity::ERROR,
692 message: "undefined variable 'A'".to_string(),
693 is_disk_based: true,
694 group_id: 0,
695 is_primary: true,
696 ..Default::default()
697 },
698 },
699 DiagnosticEntry {
700 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
701 diagnostic: Diagnostic {
702 severity: DiagnosticSeverity::WARNING,
703 message: "unreachable statement".to_string(),
704 group_id: 1,
705 is_primary: true,
706 ..Default::default()
707 },
708 },
709 ],
710 cx,
711 )
712 .unwrap();
713 assert_eq!(
714 buffer
715 .snapshot()
716 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
717 .collect::<Vec<_>>(),
718 &[
719 DiagnosticEntry {
720 range: Point::new(2, 9)..Point::new(2, 12),
721 diagnostic: Diagnostic {
722 severity: DiagnosticSeverity::WARNING,
723 message: "unreachable statement".to_string(),
724 group_id: 1,
725 is_primary: true,
726 ..Default::default()
727 }
728 },
729 DiagnosticEntry {
730 range: Point::new(2, 9)..Point::new(2, 10),
731 diagnostic: Diagnostic {
732 severity: DiagnosticSeverity::ERROR,
733 message: "undefined variable 'A'".to_string(),
734 is_disk_based: true,
735 group_id: 0,
736 is_primary: true,
737 ..Default::default()
738 },
739 }
740 ]
741 );
742 assert_eq!(
743 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
744 [
745 ("fn a() { ".to_string(), None),
746 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
747 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
748 ("\n".to_string(), None),
749 ]
750 );
751 assert_eq!(
752 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
753 [
754 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
755 ("\n".to_string(), None),
756 ]
757 );
758 });
759
760 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
761 // changes since the last save.
762 buffer.update(&mut cx, |buffer, cx| {
763 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
764 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
765 });
766 let change_notification_2 = fake
767 .receive_notification::<lsp::notification::DidChangeTextDocument>()
768 .await;
769 assert!(
770 change_notification_2.text_document.version > change_notification_1.text_document.version
771 );
772
773 buffer.update(&mut cx, |buffer, cx| {
774 buffer
775 .update_diagnostics(
776 Some(change_notification_2.text_document.version),
777 vec![
778 DiagnosticEntry {
779 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
780 diagnostic: Diagnostic {
781 severity: DiagnosticSeverity::ERROR,
782 message: "undefined variable 'BB'".to_string(),
783 is_disk_based: true,
784 group_id: 1,
785 is_primary: true,
786 ..Default::default()
787 },
788 },
789 DiagnosticEntry {
790 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
791 diagnostic: Diagnostic {
792 severity: DiagnosticSeverity::ERROR,
793 message: "undefined variable 'A'".to_string(),
794 is_disk_based: true,
795 group_id: 0,
796 is_primary: true,
797 ..Default::default()
798 },
799 },
800 ],
801 cx,
802 )
803 .unwrap();
804 assert_eq!(
805 buffer
806 .snapshot()
807 .diagnostics_in_range::<_, Point>(0..buffer.len())
808 .collect::<Vec<_>>(),
809 &[
810 DiagnosticEntry {
811 range: Point::new(2, 21)..Point::new(2, 22),
812 diagnostic: Diagnostic {
813 severity: DiagnosticSeverity::ERROR,
814 message: "undefined variable 'A'".to_string(),
815 is_disk_based: true,
816 group_id: 0,
817 is_primary: true,
818 ..Default::default()
819 }
820 },
821 DiagnosticEntry {
822 range: Point::new(3, 9)..Point::new(3, 11),
823 diagnostic: Diagnostic {
824 severity: DiagnosticSeverity::ERROR,
825 message: "undefined variable 'BB'".to_string(),
826 is_disk_based: true,
827 group_id: 1,
828 is_primary: true,
829 ..Default::default()
830 },
831 }
832 ]
833 );
834 });
835}
836
837#[gpui::test]
838async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
839 cx.add_model(|cx| {
840 let text = concat!(
841 "let one = ;\n", //
842 "let two = \n",
843 "let three = 3;\n",
844 );
845
846 let mut buffer = Buffer::new(0, text, cx);
847 buffer.set_language(Some(Arc::new(rust_lang())), None, cx);
848 buffer
849 .update_diagnostics(
850 None,
851 vec![
852 DiagnosticEntry {
853 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
854 diagnostic: Diagnostic {
855 severity: DiagnosticSeverity::ERROR,
856 message: "syntax error 1".to_string(),
857 ..Default::default()
858 },
859 },
860 DiagnosticEntry {
861 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
862 diagnostic: Diagnostic {
863 severity: DiagnosticSeverity::ERROR,
864 message: "syntax error 2".to_string(),
865 ..Default::default()
866 },
867 },
868 ],
869 cx,
870 )
871 .unwrap();
872
873 // An empty range is extended forward to include the following character.
874 // At the end of a line, an empty range is extended backward to include
875 // the preceding character.
876 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
877 assert_eq!(
878 chunks
879 .iter()
880 .map(|(s, d)| (s.as_str(), *d))
881 .collect::<Vec<_>>(),
882 &[
883 ("let one = ", None),
884 (";", Some(DiagnosticSeverity::ERROR)),
885 ("\nlet two =", None),
886 (" ", Some(DiagnosticSeverity::ERROR)),
887 ("\nlet three = 3;\n", None)
888 ]
889 );
890 buffer
891 });
892}
893
894#[gpui::test]
895fn test_serialization(cx: &mut gpui::MutableAppContext) {
896 let mut now = Instant::now();
897
898 let buffer1 = cx.add_model(|cx| {
899 let mut buffer = Buffer::new(0, "abc", cx);
900 buffer.edit([3..3], "D", cx);
901
902 now += Duration::from_secs(1);
903 buffer.start_transaction_at(now);
904 buffer.edit([4..4], "E", cx);
905 buffer.end_transaction_at(now, cx);
906 assert_eq!(buffer.text(), "abcDE");
907
908 buffer.undo(cx);
909 assert_eq!(buffer.text(), "abcD");
910
911 buffer.edit([4..4], "F", cx);
912 assert_eq!(buffer.text(), "abcDF");
913 buffer
914 });
915 assert_eq!(buffer1.read(cx).text(), "abcDF");
916
917 let message = buffer1.read(cx).to_proto();
918 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
919 assert_eq!(buffer2.read(cx).text(), "abcDF");
920}
921
922#[gpui::test(iterations = 100)]
923fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
924 let min_peers = env::var("MIN_PEERS")
925 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
926 .unwrap_or(1);
927 let max_peers = env::var("MAX_PEERS")
928 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
929 .unwrap_or(5);
930 let operations = env::var("OPERATIONS")
931 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
932 .unwrap_or(10);
933
934 let base_text_len = rng.gen_range(0..10);
935 let base_text = RandomCharIter::new(&mut rng)
936 .take(base_text_len)
937 .collect::<String>();
938 let mut replica_ids = Vec::new();
939 let mut buffers = Vec::new();
940 let mut network = Network::new(rng.clone());
941
942 for i in 0..rng.gen_range(min_peers..=max_peers) {
943 let buffer = cx.add_model(|cx| {
944 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
945 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
946 buffer
947 });
948 buffers.push(buffer);
949 replica_ids.push(i as ReplicaId);
950 network.add_peer(i as ReplicaId);
951 log::info!("Adding initial peer with replica id {}", i);
952 }
953
954 log::info!("initial text: {:?}", base_text);
955
956 let mut now = Instant::now();
957 let mut mutation_count = operations;
958 let mut active_selections = BTreeMap::default();
959 loop {
960 let replica_index = rng.gen_range(0..replica_ids.len());
961 let replica_id = replica_ids[replica_index];
962 let buffer = &mut buffers[replica_index];
963 let mut new_buffer = None;
964 match rng.gen_range(0..100) {
965 0..=29 if mutation_count != 0 => {
966 buffer.update(cx, |buffer, cx| {
967 buffer.start_transaction_at(now);
968 buffer.randomly_edit(&mut rng, 5, cx);
969 buffer.end_transaction_at(now, cx);
970 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
971 });
972 mutation_count -= 1;
973 }
974 30..=39 if mutation_count != 0 => {
975 buffer.update(cx, |buffer, cx| {
976 let mut selections = Vec::new();
977 for id in 0..rng.gen_range(1..=5) {
978 let range = buffer.random_byte_range(0, &mut rng);
979 selections.push(Selection {
980 id,
981 start: buffer.anchor_before(range.start),
982 end: buffer.anchor_before(range.end),
983 reversed: false,
984 goal: SelectionGoal::None,
985 });
986 }
987 let selections: Arc<[Selection<Anchor>]> = selections.into();
988 log::info!(
989 "peer {} setting active selections: {:?}",
990 replica_id,
991 selections
992 );
993 active_selections.insert(replica_id, selections.clone());
994 buffer.set_active_selections(selections, cx);
995 });
996 mutation_count -= 1;
997 }
998 40..=49 if replica_ids.len() < max_peers => {
999 let old_buffer = buffer.read(cx).to_proto();
1000 let new_replica_id = replica_ids.len() as ReplicaId;
1001 log::info!(
1002 "Adding new replica {} (replicating from {})",
1003 new_replica_id,
1004 replica_id
1005 );
1006 new_buffer = Some(cx.add_model(|cx| {
1007 let mut new_buffer =
1008 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1009 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1010 new_buffer
1011 }));
1012 replica_ids.push(new_replica_id);
1013 network.replicate(replica_id, new_replica_id);
1014 }
1015 50..=69 if mutation_count != 0 => {
1016 buffer.update(cx, |buffer, cx| {
1017 buffer.randomly_undo_redo(&mut rng, cx);
1018 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1019 });
1020 mutation_count -= 1;
1021 }
1022 70..=99 if network.has_unreceived(replica_id) => {
1023 let ops = network
1024 .receive(replica_id)
1025 .into_iter()
1026 .map(|op| proto::deserialize_operation(op).unwrap());
1027 if ops.len() > 0 {
1028 log::info!(
1029 "peer {} applying {} ops from the network.",
1030 replica_id,
1031 ops.len()
1032 );
1033 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1034 }
1035 }
1036 _ => {}
1037 }
1038
1039 buffer.update(cx, |buffer, _| {
1040 let ops = buffer
1041 .operations
1042 .drain(..)
1043 .map(|op| proto::serialize_operation(&op))
1044 .collect();
1045 network.broadcast(buffer.replica_id(), ops);
1046 });
1047 now += Duration::from_millis(rng.gen_range(0..=200));
1048 buffers.extend(new_buffer);
1049
1050 for buffer in &buffers {
1051 buffer.read(cx).check_invariants();
1052 }
1053
1054 if mutation_count == 0 && network.is_idle() {
1055 break;
1056 }
1057 }
1058
1059 let first_buffer = buffers[0].read(cx);
1060 for buffer in &buffers[1..] {
1061 let buffer = buffer.read(cx);
1062 assert_eq!(
1063 buffer.text(),
1064 first_buffer.text(),
1065 "Replica {} text != Replica 0 text",
1066 buffer.replica_id()
1067 );
1068 }
1069
1070 for buffer in &buffers {
1071 let buffer = buffer.read(cx).snapshot();
1072 let expected_remote_selections = active_selections
1073 .iter()
1074 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1075 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1076 .collect::<Vec<_>>();
1077 let actual_remote_selections = buffer
1078 .remote_selections_in_range(Anchor::min()..Anchor::max())
1079 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1080 .collect::<Vec<_>>();
1081 assert_eq!(actual_remote_selections, expected_remote_selections);
1082 }
1083}
1084
1085fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1086 buffer: &Buffer,
1087 range: Range<T>,
1088) -> Vec<(String, Option<DiagnosticSeverity>)> {
1089 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1090 for chunk in buffer.snapshot().chunks(range, Some(&Default::default())) {
1091 if chunks
1092 .last()
1093 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1094 {
1095 chunks.last_mut().unwrap().0.push_str(chunk.text);
1096 } else {
1097 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1098 }
1099 }
1100 chunks
1101}
1102
1103#[test]
1104fn test_contiguous_ranges() {
1105 assert_eq!(
1106 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1107 &[1..4, 5..7, 9..13]
1108 );
1109
1110 // Respects the `max_len` parameter
1111 assert_eq!(
1112 contiguous_ranges(
1113 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1114 3
1115 )
1116 .collect::<Vec<_>>(),
1117 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1118 );
1119}
1120
1121impl Buffer {
1122 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1123 &self,
1124 range: Range<T>,
1125 ) -> Option<(Range<Point>, Range<Point>)> {
1126 self.snapshot()
1127 .enclosing_bracket_ranges(range)
1128 .map(|(start, end)| {
1129 let point_start = start.start.to_point(self)..start.end.to_point(self);
1130 let point_end = end.start.to_point(self)..end.end.to_point(self);
1131 (point_start, point_end)
1132 })
1133 }
1134}
1135
1136fn rust_lang() -> Language {
1137 Language::new(
1138 LanguageConfig {
1139 name: "Rust".to_string(),
1140 path_suffixes: vec!["rs".to_string()],
1141 language_server: None,
1142 ..Default::default()
1143 },
1144 Some(tree_sitter_rust::language()),
1145 )
1146 .with_indents_query(
1147 r#"
1148 (call_expression) @indent
1149 (field_expression) @indent
1150 (_ "(" ")" @end) @indent
1151 (_ "{" "}" @end) @indent
1152 "#,
1153 )
1154 .unwrap()
1155 .with_brackets_query(
1156 r#"
1157 ("{" @open "}" @close)
1158 "#,
1159 )
1160 .unwrap()
1161}
1162
1163fn empty(point: Point) -> Range<Point> {
1164 point..point
1165}