1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::test::Network;
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 if std::env::var("RUST_LOG").is_ok() {
21 env_logger::init();
22 }
23}
24
25#[test]
26fn test_select_language() {
27 let registry = LanguageRegistry {
28 languages: vec![
29 Arc::new(Language::new(
30 LanguageConfig {
31 name: "Rust".to_string(),
32 path_suffixes: vec!["rs".to_string()],
33 ..Default::default()
34 },
35 Some(tree_sitter_rust::language()),
36 )),
37 Arc::new(Language::new(
38 LanguageConfig {
39 name: "Make".to_string(),
40 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
41 ..Default::default()
42 },
43 Some(tree_sitter_rust::language()),
44 )),
45 ],
46 };
47
48 // matching file extension
49 assert_eq!(
50 registry.select_language("zed/lib.rs").map(|l| l.name()),
51 Some("Rust")
52 );
53 assert_eq!(
54 registry.select_language("zed/lib.mk").map(|l| l.name()),
55 Some("Make")
56 );
57
58 // matching filename
59 assert_eq!(
60 registry.select_language("zed/Makefile").map(|l| l.name()),
61 Some("Make")
62 );
63
64 // matching suffix that is not the full file extension or filename
65 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
66 assert_eq!(
67 registry.select_language("zed/a.cars").map(|l| l.name()),
68 None
69 );
70 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
71}
72
73#[gpui::test]
74fn test_edit_events(cx: &mut gpui::MutableAppContext) {
75 let mut now = Instant::now();
76 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
77 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
78
79 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
80 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
81 let buffer_ops = buffer1.update(cx, |buffer, cx| {
82 let buffer_1_events = buffer_1_events.clone();
83 cx.subscribe(&buffer1, move |_, _, event, _| {
84 buffer_1_events.borrow_mut().push(event.clone())
85 })
86 .detach();
87 let buffer_2_events = buffer_2_events.clone();
88 cx.subscribe(&buffer2, move |_, _, event, _| {
89 buffer_2_events.borrow_mut().push(event.clone())
90 })
91 .detach();
92
93 // An edit emits an edited event, followed by a dirtied event,
94 // since the buffer was previously in a clean state.
95 buffer.edit(Some(2..4), "XYZ", cx);
96
97 // An empty transaction does not emit any events.
98 buffer.start_transaction();
99 buffer.end_transaction(cx);
100
101 // A transaction containing two edits emits one edited event.
102 now += Duration::from_secs(1);
103 buffer.start_transaction_at(now);
104 buffer.edit(Some(5..5), "u", cx);
105 buffer.edit(Some(6..6), "w", cx);
106 buffer.end_transaction_at(now, cx);
107
108 // Undoing a transaction emits one edited event.
109 buffer.undo(cx);
110
111 buffer.operations.clone()
112 });
113
114 // Incorporating a set of remote ops emits a single edited event,
115 // followed by a dirtied event.
116 buffer2.update(cx, |buffer, cx| {
117 buffer.apply_ops(buffer_ops, cx).unwrap();
118 });
119
120 let buffer_1_events = buffer_1_events.borrow();
121 assert_eq!(
122 *buffer_1_events,
123 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
124 );
125
126 let buffer_2_events = buffer_2_events.borrow();
127 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
128}
129
130#[gpui::test]
131async fn test_apply_diff(mut cx: gpui::TestAppContext) {
132 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
133 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
134
135 let text = "a\nccc\ndddd\nffffff\n";
136 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
137 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
138 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
139
140 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
141 let diff = buffer.read_with(&cx, |b, cx| b.diff(text.into(), cx)).await;
142 buffer.update(&mut cx, |b, cx| b.apply_diff(diff, cx));
143 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
144}
145
146#[gpui::test]
147async fn test_reparse(mut cx: gpui::TestAppContext) {
148 let text = "fn a() {}";
149 let buffer =
150 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
151
152 // Wait for the initial text to parse
153 buffer
154 .condition(&cx, |buffer, _| !buffer.is_parsing())
155 .await;
156 assert_eq!(
157 get_tree_sexp(&buffer, &cx),
158 concat!(
159 "(source_file (function_item name: (identifier) ",
160 "parameters: (parameters) ",
161 "body: (block)))"
162 )
163 );
164
165 buffer.update(&mut cx, |buffer, _| {
166 buffer.set_sync_parse_timeout(Duration::ZERO)
167 });
168
169 // Perform some edits (add parameter and variable reference)
170 // Parsing doesn't begin until the transaction is complete
171 buffer.update(&mut cx, |buf, cx| {
172 buf.start_transaction();
173
174 let offset = buf.text().find(")").unwrap();
175 buf.edit(vec![offset..offset], "b: C", cx);
176 assert!(!buf.is_parsing());
177
178 let offset = buf.text().find("}").unwrap();
179 buf.edit(vec![offset..offset], " d; ", cx);
180 assert!(!buf.is_parsing());
181
182 buf.end_transaction(cx);
183 assert_eq!(buf.text(), "fn a(b: C) { d; }");
184 assert!(buf.is_parsing());
185 });
186 buffer
187 .condition(&cx, |buffer, _| !buffer.is_parsing())
188 .await;
189 assert_eq!(
190 get_tree_sexp(&buffer, &cx),
191 concat!(
192 "(source_file (function_item name: (identifier) ",
193 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
194 "body: (block (expression_statement (identifier)))))"
195 )
196 );
197
198 // Perform a series of edits without waiting for the current parse to complete:
199 // * turn identifier into a field expression
200 // * turn field expression into a method call
201 // * add a turbofish to the method call
202 buffer.update(&mut cx, |buf, cx| {
203 let offset = buf.text().find(";").unwrap();
204 buf.edit(vec![offset..offset], ".e", cx);
205 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
206 assert!(buf.is_parsing());
207 });
208 buffer.update(&mut cx, |buf, cx| {
209 let offset = buf.text().find(";").unwrap();
210 buf.edit(vec![offset..offset], "(f)", cx);
211 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
212 assert!(buf.is_parsing());
213 });
214 buffer.update(&mut cx, |buf, cx| {
215 let offset = buf.text().find("(f)").unwrap();
216 buf.edit(vec![offset..offset], "::<G>", cx);
217 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
218 assert!(buf.is_parsing());
219 });
220 buffer
221 .condition(&cx, |buffer, _| !buffer.is_parsing())
222 .await;
223 assert_eq!(
224 get_tree_sexp(&buffer, &cx),
225 concat!(
226 "(source_file (function_item name: (identifier) ",
227 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
228 "body: (block (expression_statement (call_expression ",
229 "function: (generic_function ",
230 "function: (field_expression value: (identifier) field: (field_identifier)) ",
231 "type_arguments: (type_arguments (type_identifier))) ",
232 "arguments: (arguments (identifier)))))))",
233 )
234 );
235
236 buffer.update(&mut cx, |buf, cx| {
237 buf.undo(cx);
238 assert_eq!(buf.text(), "fn a() {}");
239 assert!(buf.is_parsing());
240 });
241 buffer
242 .condition(&cx, |buffer, _| !buffer.is_parsing())
243 .await;
244 assert_eq!(
245 get_tree_sexp(&buffer, &cx),
246 concat!(
247 "(source_file (function_item name: (identifier) ",
248 "parameters: (parameters) ",
249 "body: (block)))"
250 )
251 );
252
253 buffer.update(&mut cx, |buf, cx| {
254 buf.redo(cx);
255 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
256 assert!(buf.is_parsing());
257 });
258 buffer
259 .condition(&cx, |buffer, _| !buffer.is_parsing())
260 .await;
261 assert_eq!(
262 get_tree_sexp(&buffer, &cx),
263 concat!(
264 "(source_file (function_item name: (identifier) ",
265 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
266 "body: (block (expression_statement (call_expression ",
267 "function: (generic_function ",
268 "function: (field_expression value: (identifier) field: (field_identifier)) ",
269 "type_arguments: (type_arguments (type_identifier))) ",
270 "arguments: (arguments (identifier)))))))",
271 )
272 );
273
274 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
275 buffer.read_with(cx, |buffer, _| {
276 buffer.syntax_tree().unwrap().root_node().to_sexp()
277 })
278 }
279}
280
281#[gpui::test]
282async fn test_outline(mut cx: gpui::TestAppContext) {
283 let language = Arc::new(
284 rust_lang()
285 .with_outline_query(
286 r#"
287 (struct_item
288 "struct" @context
289 name: (_) @name) @item
290 (enum_item
291 "enum" @context
292 name: (_) @name) @item
293 (enum_variant
294 name: (_) @name) @item
295 (field_declaration
296 name: (_) @name) @item
297 (impl_item
298 "impl" @context
299 trait: (_) @name
300 "for" @context
301 type: (_) @name) @item
302 (function_item
303 "fn" @context
304 name: (_) @name) @item
305 (mod_item
306 "mod" @context
307 name: (_) @name) @item
308 "#,
309 )
310 .unwrap(),
311 );
312
313 let text = r#"
314 struct Person {
315 name: String,
316 age: usize,
317 }
318
319 mod module {
320 enum LoginState {
321 LoggedOut,
322 LoggingOn,
323 LoggedIn {
324 person: Person,
325 time: Instant,
326 }
327 }
328 }
329
330 impl Eq for Person {}
331
332 impl Drop for Person {
333 fn drop(&mut self) {
334 println!("bye");
335 }
336 }
337 "#
338 .unindent();
339
340 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
341 let outline = buffer
342 .read_with(&cx, |buffer, _| buffer.snapshot().outline(None))
343 .unwrap();
344
345 assert_eq!(
346 outline
347 .items
348 .iter()
349 .map(|item| (item.text.as_str(), item.depth))
350 .collect::<Vec<_>>(),
351 &[
352 ("struct Person", 0),
353 ("name", 1),
354 ("age", 1),
355 ("mod module", 0),
356 ("enum LoginState", 1),
357 ("LoggedOut", 2),
358 ("LoggingOn", 2),
359 ("LoggedIn", 2),
360 ("person", 3),
361 ("time", 3),
362 ("impl Eq for Person", 0),
363 ("impl Drop for Person", 0),
364 ("fn drop", 1),
365 ]
366 );
367
368 // Without space, we only match on names
369 assert_eq!(
370 search(&outline, "oon", &cx).await,
371 &[
372 ("mod module", vec![]), // included as the parent of a match
373 ("enum LoginState", vec![]), // included as the parent of a match
374 ("LoggingOn", vec![1, 7, 8]), // matches
375 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
376 ]
377 );
378
379 assert_eq!(
380 search(&outline, "dp p", &cx).await,
381 &[
382 ("impl Drop for Person", vec![5, 8, 9, 14]),
383 ("fn drop", vec![]),
384 ]
385 );
386 assert_eq!(
387 search(&outline, "dpn", &cx).await,
388 &[("impl Drop for Person", vec![5, 14, 19])]
389 );
390 assert_eq!(
391 search(&outline, "impl ", &cx).await,
392 &[
393 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
394 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
395 ("fn drop", vec![]),
396 ]
397 );
398
399 async fn search<'a>(
400 outline: &'a Outline<Anchor>,
401 query: &str,
402 cx: &gpui::TestAppContext,
403 ) -> Vec<(&'a str, Vec<usize>)> {
404 let matches = cx
405 .read(|cx| outline.search(query, cx.background().clone()))
406 .await;
407 matches
408 .into_iter()
409 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
410 .collect::<Vec<_>>()
411 }
412}
413
414#[gpui::test]
415fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
416 let buffer = cx.add_model(|cx| {
417 let text = "
418 mod x {
419 mod y {
420
421 }
422 }
423 "
424 .unindent();
425 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
426 });
427 let buffer = buffer.read(cx);
428 assert_eq!(
429 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
430 Some((
431 Point::new(0, 6)..Point::new(0, 7),
432 Point::new(4, 0)..Point::new(4, 1)
433 ))
434 );
435 assert_eq!(
436 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
437 Some((
438 Point::new(1, 10)..Point::new(1, 11),
439 Point::new(3, 4)..Point::new(3, 5)
440 ))
441 );
442 assert_eq!(
443 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
444 Some((
445 Point::new(1, 10)..Point::new(1, 11),
446 Point::new(3, 4)..Point::new(3, 5)
447 ))
448 );
449}
450
451#[gpui::test]
452fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
453 cx.add_model(|cx| {
454 let text = "fn a() {}";
455 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
456
457 buffer.edit_with_autoindent([8..8], "\n\n", cx);
458 assert_eq!(buffer.text(), "fn a() {\n \n}");
459
460 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
461 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
462
463 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
464 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
465
466 buffer
467 });
468}
469
470#[gpui::test]
471fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
472 cx.add_model(|cx| {
473 let text = "
474 fn a() {
475 c;
476 d;
477 }
478 "
479 .unindent();
480
481 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
482
483 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
484 // their indentation is not adjusted.
485 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
486 assert_eq!(
487 buffer.text(),
488 "
489 fn a() {
490 c();
491 d();
492 }
493 "
494 .unindent()
495 );
496
497 // When appending new content after these lines, the indentation is based on the
498 // preceding lines' actual indentation.
499 buffer.edit_with_autoindent(
500 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
501 "\n.f\n.g",
502 cx,
503 );
504 assert_eq!(
505 buffer.text(),
506 "
507 fn a() {
508 c
509 .f
510 .g();
511 d
512 .f
513 .g();
514 }
515 "
516 .unindent()
517 );
518 buffer
519 });
520}
521
522#[gpui::test]
523fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
524 cx.add_model(|cx| {
525 let text = "
526 fn a() {}
527 "
528 .unindent();
529
530 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
531
532 buffer.edit_with_autoindent([5..5], "\nb", cx);
533 assert_eq!(
534 buffer.text(),
535 "
536 fn a(
537 b) {}
538 "
539 .unindent()
540 );
541
542 // The indentation suggestion changed because `@end` node (a close paren)
543 // is now at the beginning of the line.
544 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
545 assert_eq!(
546 buffer.text(),
547 "
548 fn a(
549 ) {}
550 "
551 .unindent()
552 );
553
554 buffer
555 });
556}
557
558#[gpui::test]
559async fn test_diagnostics(mut cx: gpui::TestAppContext) {
560 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background());
561 let mut rust_lang = rust_lang();
562 rust_lang.config.language_server = Some(LanguageServerConfig {
563 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
564 ..Default::default()
565 });
566
567 let text = "
568 fn a() { A }
569 fn b() { BB }
570 fn c() { CCC }
571 "
572 .unindent();
573
574 let buffer = cx.add_model(|cx| {
575 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
576 .with_language(Arc::new(rust_lang), cx)
577 .with_language_server(language_server, cx)
578 });
579
580 let open_notification = fake
581 .receive_notification::<lsp::notification::DidOpenTextDocument>()
582 .await;
583
584 // Edit the buffer, moving the content down
585 buffer.update(&mut cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
586 let change_notification_1 = fake
587 .receive_notification::<lsp::notification::DidChangeTextDocument>()
588 .await;
589 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
590
591 buffer.update(&mut cx, |buffer, cx| {
592 // Receive diagnostics for an earlier version of the buffer.
593 buffer
594 .update_diagnostics(
595 vec![
596 DiagnosticEntry {
597 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
598 diagnostic: Diagnostic {
599 severity: DiagnosticSeverity::ERROR,
600 message: "undefined variable 'A'".to_string(),
601 is_disk_based: true,
602 group_id: 0,
603 is_primary: true,
604 ..Default::default()
605 },
606 },
607 DiagnosticEntry {
608 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
609 diagnostic: Diagnostic {
610 severity: DiagnosticSeverity::ERROR,
611 message: "undefined variable 'BB'".to_string(),
612 is_disk_based: true,
613 group_id: 1,
614 is_primary: true,
615 ..Default::default()
616 },
617 },
618 DiagnosticEntry {
619 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
620 diagnostic: Diagnostic {
621 severity: DiagnosticSeverity::ERROR,
622 is_disk_based: true,
623 message: "undefined variable 'CCC'".to_string(),
624 group_id: 2,
625 is_primary: true,
626 ..Default::default()
627 },
628 },
629 ],
630 Some(open_notification.text_document.version),
631 cx,
632 )
633 .unwrap();
634
635 // The diagnostics have moved down since they were created.
636 assert_eq!(
637 buffer
638 .snapshot()
639 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
640 .collect::<Vec<_>>(),
641 &[
642 DiagnosticEntry {
643 range: Point::new(3, 9)..Point::new(3, 11),
644 diagnostic: Diagnostic {
645 severity: DiagnosticSeverity::ERROR,
646 message: "undefined variable 'BB'".to_string(),
647 is_disk_based: true,
648 group_id: 1,
649 is_primary: true,
650 ..Default::default()
651 },
652 },
653 DiagnosticEntry {
654 range: Point::new(4, 9)..Point::new(4, 12),
655 diagnostic: Diagnostic {
656 severity: DiagnosticSeverity::ERROR,
657 message: "undefined variable 'CCC'".to_string(),
658 is_disk_based: true,
659 group_id: 2,
660 is_primary: true,
661 ..Default::default()
662 }
663 }
664 ]
665 );
666 assert_eq!(
667 chunks_with_diagnostics(buffer, 0..buffer.len()),
668 [
669 ("\n\nfn a() { ".to_string(), None),
670 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
671 (" }\nfn b() { ".to_string(), None),
672 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
673 (" }\nfn c() { ".to_string(), None),
674 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
675 (" }\n".to_string(), None),
676 ]
677 );
678 assert_eq!(
679 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
680 [
681 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
682 (" }\nfn c() { ".to_string(), None),
683 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
684 ]
685 );
686
687 // Ensure overlapping diagnostics are highlighted correctly.
688 buffer
689 .update_diagnostics(
690 vec![
691 DiagnosticEntry {
692 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
693 diagnostic: Diagnostic {
694 severity: DiagnosticSeverity::ERROR,
695 message: "undefined variable 'A'".to_string(),
696 is_disk_based: true,
697 group_id: 0,
698 is_primary: true,
699 ..Default::default()
700 },
701 },
702 DiagnosticEntry {
703 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
704 diagnostic: Diagnostic {
705 severity: DiagnosticSeverity::WARNING,
706 message: "unreachable statement".to_string(),
707 group_id: 1,
708 is_primary: true,
709 ..Default::default()
710 },
711 },
712 ],
713 Some(open_notification.text_document.version),
714 cx,
715 )
716 .unwrap();
717 assert_eq!(
718 buffer
719 .snapshot()
720 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
721 .collect::<Vec<_>>(),
722 &[
723 DiagnosticEntry {
724 range: Point::new(2, 9)..Point::new(2, 12),
725 diagnostic: Diagnostic {
726 severity: DiagnosticSeverity::WARNING,
727 message: "unreachable statement".to_string(),
728 group_id: 1,
729 is_primary: true,
730 ..Default::default()
731 }
732 },
733 DiagnosticEntry {
734 range: Point::new(2, 9)..Point::new(2, 10),
735 diagnostic: Diagnostic {
736 severity: DiagnosticSeverity::ERROR,
737 message: "undefined variable 'A'".to_string(),
738 is_disk_based: true,
739 group_id: 0,
740 is_primary: true,
741 ..Default::default()
742 },
743 }
744 ]
745 );
746 assert_eq!(
747 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
748 [
749 ("fn a() { ".to_string(), None),
750 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
751 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
752 ("\n".to_string(), None),
753 ]
754 );
755 assert_eq!(
756 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
757 [
758 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
759 ("\n".to_string(), None),
760 ]
761 );
762 });
763
764 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
765 // changes since the last save.
766 buffer.update(&mut cx, |buffer, cx| {
767 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
768 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
769 });
770 let change_notification_2 = fake
771 .receive_notification::<lsp::notification::DidChangeTextDocument>()
772 .await;
773 assert!(
774 change_notification_2.text_document.version > change_notification_1.text_document.version
775 );
776
777 buffer.update(&mut cx, |buffer, cx| {
778 buffer
779 .update_diagnostics(
780 vec![
781 DiagnosticEntry {
782 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
783 diagnostic: Diagnostic {
784 severity: DiagnosticSeverity::ERROR,
785 message: "undefined variable 'BB'".to_string(),
786 is_disk_based: true,
787 group_id: 1,
788 is_primary: true,
789 ..Default::default()
790 },
791 },
792 DiagnosticEntry {
793 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
794 diagnostic: Diagnostic {
795 severity: DiagnosticSeverity::ERROR,
796 message: "undefined variable 'A'".to_string(),
797 is_disk_based: true,
798 group_id: 0,
799 is_primary: true,
800 ..Default::default()
801 },
802 },
803 ],
804 Some(change_notification_2.text_document.version),
805 cx,
806 )
807 .unwrap();
808 assert_eq!(
809 buffer
810 .snapshot()
811 .diagnostics_in_range::<_, Point>(0..buffer.len())
812 .collect::<Vec<_>>(),
813 &[
814 DiagnosticEntry {
815 range: Point::new(2, 21)..Point::new(2, 22),
816 diagnostic: Diagnostic {
817 severity: DiagnosticSeverity::ERROR,
818 message: "undefined variable 'A'".to_string(),
819 is_disk_based: true,
820 group_id: 0,
821 is_primary: true,
822 ..Default::default()
823 }
824 },
825 DiagnosticEntry {
826 range: Point::new(3, 9)..Point::new(3, 11),
827 diagnostic: Diagnostic {
828 severity: DiagnosticSeverity::ERROR,
829 message: "undefined variable 'BB'".to_string(),
830 is_disk_based: true,
831 group_id: 1,
832 is_primary: true,
833 ..Default::default()
834 },
835 }
836 ]
837 );
838 });
839}
840
841#[gpui::test]
842async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) {
843 let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background());
844
845 let text = "
846 fn a() {
847 f1();
848 }
849 fn b() {
850 f2();
851 }
852 fn c() {
853 f3();
854 }
855 "
856 .unindent();
857
858 let buffer = cx.add_model(|cx| {
859 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
860 .with_language(Arc::new(rust_lang()), cx)
861 .with_language_server(language_server, cx)
862 });
863
864 let lsp_document_version = fake
865 .receive_notification::<lsp::notification::DidOpenTextDocument>()
866 .await
867 .text_document
868 .version;
869
870 // Simulate editing the buffer after the language server computes some edits.
871 buffer.update(&mut cx, |buffer, cx| {
872 buffer.edit(
873 [Point::new(0, 0)..Point::new(0, 0)],
874 "// above first function\n",
875 cx,
876 );
877 buffer.edit(
878 [Point::new(2, 0)..Point::new(2, 0)],
879 " // inside first function\n",
880 cx,
881 );
882 buffer.edit(
883 [Point::new(6, 4)..Point::new(6, 4)],
884 "// inside second function ",
885 cx,
886 );
887
888 assert_eq!(
889 buffer.text(),
890 "
891 // above first function
892 fn a() {
893 // inside first function
894 f1();
895 }
896 fn b() {
897 // inside second function f2();
898 }
899 fn c() {
900 f3();
901 }
902 "
903 .unindent()
904 );
905 });
906
907 let edits = buffer
908 .update(&mut cx, |buffer, cx| {
909 buffer.edits_from_lsp(
910 vec![
911 // replace body of first function
912 lsp::TextEdit {
913 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
914 new_text: "
915 fn a() {
916 f10();
917 }
918 "
919 .unindent(),
920 },
921 // edit inside second function
922 lsp::TextEdit {
923 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
924 new_text: "00".into(),
925 },
926 // edit inside third function via two distinct edits
927 lsp::TextEdit {
928 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
929 new_text: "4000".into(),
930 },
931 lsp::TextEdit {
932 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
933 new_text: "".into(),
934 },
935 ],
936 Some(lsp_document_version),
937 cx,
938 )
939 })
940 .await
941 .unwrap();
942
943 buffer.update(&mut cx, |buffer, cx| {
944 for (range, new_text) in edits {
945 buffer.edit([range], new_text, cx);
946 }
947 assert_eq!(
948 buffer.text(),
949 "
950 // above first function
951 fn a() {
952 // inside first function
953 f10();
954 }
955 fn b() {
956 // inside second function f200();
957 }
958 fn c() {
959 f4000();
960 }
961 "
962 .unindent()
963 );
964 });
965}
966
967#[gpui::test]
968async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppContext) {
969 let text = "
970 use a::b;
971 use a::c;
972
973 fn f() {
974 b();
975 c();
976 }
977 "
978 .unindent();
979
980 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
981
982 // Simulate the language server sending us a small edit in the form of a very large diff.
983 // Rust-analyzer does this when performing a merge-imports code action.
984 let edits = buffer
985 .update(&mut cx, |buffer, cx| {
986 buffer.edits_from_lsp(
987 [
988 // Replace the first use statement without editing the semicolon.
989 lsp::TextEdit {
990 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
991 new_text: "a::{b, c}".into(),
992 },
993 // Reinsert the remainder of the file between the semicolon and the final
994 // newline of the file.
995 lsp::TextEdit {
996 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
997 new_text: "\n\n".into(),
998 },
999 lsp::TextEdit {
1000 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1001 new_text: "
1002 fn f() {
1003 b();
1004 c();
1005 }"
1006 .unindent(),
1007 },
1008 // Delete everything after the first newline of the file.
1009 lsp::TextEdit {
1010 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1011 new_text: "".into(),
1012 },
1013 ],
1014 None,
1015 cx,
1016 )
1017 })
1018 .await
1019 .unwrap();
1020
1021 buffer.update(&mut cx, |buffer, cx| {
1022 let edits = edits
1023 .into_iter()
1024 .map(|(range, text)| {
1025 (
1026 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1027 text,
1028 )
1029 })
1030 .collect::<Vec<_>>();
1031
1032 assert_eq!(
1033 edits,
1034 [
1035 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1036 (Point::new(1, 0)..Point::new(2, 0), "".into())
1037 ]
1038 );
1039
1040 for (range, new_text) in edits {
1041 buffer.edit([range], new_text, cx);
1042 }
1043 assert_eq!(
1044 buffer.text(),
1045 "
1046 use a::{b, c};
1047
1048 fn f() {
1049 b();
1050 c();
1051 }
1052 "
1053 .unindent()
1054 );
1055 });
1056}
1057
1058#[gpui::test]
1059async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
1060 cx.add_model(|cx| {
1061 let text = concat!(
1062 "let one = ;\n", //
1063 "let two = \n",
1064 "let three = 3;\n",
1065 );
1066
1067 let mut buffer = Buffer::new(0, text, cx);
1068 buffer.set_language(Some(Arc::new(rust_lang())), cx);
1069 buffer
1070 .update_diagnostics(
1071 vec![
1072 DiagnosticEntry {
1073 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1074 diagnostic: Diagnostic {
1075 severity: DiagnosticSeverity::ERROR,
1076 message: "syntax error 1".to_string(),
1077 ..Default::default()
1078 },
1079 },
1080 DiagnosticEntry {
1081 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1082 diagnostic: Diagnostic {
1083 severity: DiagnosticSeverity::ERROR,
1084 message: "syntax error 2".to_string(),
1085 ..Default::default()
1086 },
1087 },
1088 ],
1089 None,
1090 cx,
1091 )
1092 .unwrap();
1093
1094 // An empty range is extended forward to include the following character.
1095 // At the end of a line, an empty range is extended backward to include
1096 // the preceding character.
1097 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1098 assert_eq!(
1099 chunks
1100 .iter()
1101 .map(|(s, d)| (s.as_str(), *d))
1102 .collect::<Vec<_>>(),
1103 &[
1104 ("let one = ", None),
1105 (";", Some(DiagnosticSeverity::ERROR)),
1106 ("\nlet two =", None),
1107 (" ", Some(DiagnosticSeverity::ERROR)),
1108 ("\nlet three = 3;\n", None)
1109 ]
1110 );
1111 buffer
1112 });
1113}
1114
1115#[gpui::test]
1116fn test_serialization(cx: &mut gpui::MutableAppContext) {
1117 let mut now = Instant::now();
1118
1119 let buffer1 = cx.add_model(|cx| {
1120 let mut buffer = Buffer::new(0, "abc", cx);
1121 buffer.edit([3..3], "D", cx);
1122
1123 now += Duration::from_secs(1);
1124 buffer.start_transaction_at(now);
1125 buffer.edit([4..4], "E", cx);
1126 buffer.end_transaction_at(now, cx);
1127 assert_eq!(buffer.text(), "abcDE");
1128
1129 buffer.undo(cx);
1130 assert_eq!(buffer.text(), "abcD");
1131
1132 buffer.edit([4..4], "F", cx);
1133 assert_eq!(buffer.text(), "abcDF");
1134 buffer
1135 });
1136 assert_eq!(buffer1.read(cx).text(), "abcDF");
1137
1138 let message = buffer1.read(cx).to_proto();
1139 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
1140 assert_eq!(buffer2.read(cx).text(), "abcDF");
1141}
1142
1143#[gpui::test(iterations = 100)]
1144fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
1145 let min_peers = env::var("MIN_PEERS")
1146 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
1147 .unwrap_or(1);
1148 let max_peers = env::var("MAX_PEERS")
1149 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
1150 .unwrap_or(5);
1151 let operations = env::var("OPERATIONS")
1152 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1153 .unwrap_or(10);
1154
1155 let base_text_len = rng.gen_range(0..10);
1156 let base_text = RandomCharIter::new(&mut rng)
1157 .take(base_text_len)
1158 .collect::<String>();
1159 let mut replica_ids = Vec::new();
1160 let mut buffers = Vec::new();
1161 let mut network = Network::new(rng.clone());
1162
1163 for i in 0..rng.gen_range(min_peers..=max_peers) {
1164 let buffer = cx.add_model(|cx| {
1165 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
1166 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1167 buffer
1168 });
1169 buffers.push(buffer);
1170 replica_ids.push(i as ReplicaId);
1171 network.add_peer(i as ReplicaId);
1172 log::info!("Adding initial peer with replica id {}", i);
1173 }
1174
1175 log::info!("initial text: {:?}", base_text);
1176
1177 let mut now = Instant::now();
1178 let mut mutation_count = operations;
1179 let mut active_selections = BTreeMap::default();
1180 loop {
1181 let replica_index = rng.gen_range(0..replica_ids.len());
1182 let replica_id = replica_ids[replica_index];
1183 let buffer = &mut buffers[replica_index];
1184 let mut new_buffer = None;
1185 match rng.gen_range(0..100) {
1186 0..=29 if mutation_count != 0 => {
1187 buffer.update(cx, |buffer, cx| {
1188 buffer.start_transaction_at(now);
1189 buffer.randomly_edit(&mut rng, 5, cx);
1190 buffer.end_transaction_at(now, cx);
1191 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1192 });
1193 mutation_count -= 1;
1194 }
1195 30..=39 if mutation_count != 0 => {
1196 buffer.update(cx, |buffer, cx| {
1197 let mut selections = Vec::new();
1198 for id in 0..rng.gen_range(1..=5) {
1199 let range = buffer.random_byte_range(0, &mut rng);
1200 selections.push(Selection {
1201 id,
1202 start: buffer.anchor_before(range.start),
1203 end: buffer.anchor_before(range.end),
1204 reversed: false,
1205 goal: SelectionGoal::None,
1206 });
1207 }
1208 let selections: Arc<[Selection<Anchor>]> = selections.into();
1209 log::info!(
1210 "peer {} setting active selections: {:?}",
1211 replica_id,
1212 selections
1213 );
1214 active_selections.insert(replica_id, selections.clone());
1215 buffer.set_active_selections(selections, cx);
1216 });
1217 mutation_count -= 1;
1218 }
1219 40..=49 if replica_ids.len() < max_peers => {
1220 let old_buffer = buffer.read(cx).to_proto();
1221 let new_replica_id = replica_ids.len() as ReplicaId;
1222 log::info!(
1223 "Adding new replica {} (replicating from {})",
1224 new_replica_id,
1225 replica_id
1226 );
1227 new_buffer = Some(cx.add_model(|cx| {
1228 let mut new_buffer =
1229 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1230 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1231 new_buffer
1232 }));
1233 replica_ids.push(new_replica_id);
1234 network.replicate(replica_id, new_replica_id);
1235 }
1236 50..=69 if mutation_count != 0 => {
1237 buffer.update(cx, |buffer, cx| {
1238 buffer.randomly_undo_redo(&mut rng, cx);
1239 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1240 });
1241 mutation_count -= 1;
1242 }
1243 70..=99 if network.has_unreceived(replica_id) => {
1244 let ops = network
1245 .receive(replica_id)
1246 .into_iter()
1247 .map(|op| proto::deserialize_operation(op).unwrap());
1248 if ops.len() > 0 {
1249 log::info!(
1250 "peer {} applying {} ops from the network.",
1251 replica_id,
1252 ops.len()
1253 );
1254 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1255 }
1256 }
1257 _ => {}
1258 }
1259
1260 buffer.update(cx, |buffer, _| {
1261 let ops = buffer
1262 .operations
1263 .drain(..)
1264 .map(|op| proto::serialize_operation(&op))
1265 .collect();
1266 network.broadcast(buffer.replica_id(), ops);
1267 });
1268 now += Duration::from_millis(rng.gen_range(0..=200));
1269 buffers.extend(new_buffer);
1270
1271 for buffer in &buffers {
1272 buffer.read(cx).check_invariants();
1273 }
1274
1275 if mutation_count == 0 && network.is_idle() {
1276 break;
1277 }
1278 }
1279
1280 let first_buffer = buffers[0].read(cx);
1281 for buffer in &buffers[1..] {
1282 let buffer = buffer.read(cx);
1283 assert_eq!(
1284 buffer.text(),
1285 first_buffer.text(),
1286 "Replica {} text != Replica 0 text",
1287 buffer.replica_id()
1288 );
1289 }
1290
1291 for buffer in &buffers {
1292 let buffer = buffer.read(cx).snapshot();
1293 let actual_remote_selections = buffer
1294 .remote_selections_in_range(Anchor::min()..Anchor::max())
1295 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1296 .collect::<Vec<_>>();
1297 let expected_remote_selections = active_selections
1298 .iter()
1299 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1300 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1301 .collect::<Vec<_>>();
1302 assert_eq!(actual_remote_selections, expected_remote_selections);
1303 }
1304}
1305
1306fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1307 buffer: &Buffer,
1308 range: Range<T>,
1309) -> Vec<(String, Option<DiagnosticSeverity>)> {
1310 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1311 for chunk in buffer.snapshot().chunks(range, true) {
1312 if chunks
1313 .last()
1314 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1315 {
1316 chunks.last_mut().unwrap().0.push_str(chunk.text);
1317 } else {
1318 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1319 }
1320 }
1321 chunks
1322}
1323
1324#[test]
1325fn test_contiguous_ranges() {
1326 assert_eq!(
1327 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1328 &[1..4, 5..7, 9..13]
1329 );
1330
1331 // Respects the `max_len` parameter
1332 assert_eq!(
1333 contiguous_ranges(
1334 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1335 3
1336 )
1337 .collect::<Vec<_>>(),
1338 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1339 );
1340}
1341
1342impl Buffer {
1343 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1344 &self,
1345 range: Range<T>,
1346 ) -> Option<(Range<Point>, Range<Point>)> {
1347 self.snapshot()
1348 .enclosing_bracket_ranges(range)
1349 .map(|(start, end)| {
1350 let point_start = start.start.to_point(self)..start.end.to_point(self);
1351 let point_end = end.start.to_point(self)..end.end.to_point(self);
1352 (point_start, point_end)
1353 })
1354 }
1355}
1356
1357fn rust_lang() -> Language {
1358 Language::new(
1359 LanguageConfig {
1360 name: "Rust".to_string(),
1361 path_suffixes: vec!["rs".to_string()],
1362 language_server: None,
1363 ..Default::default()
1364 },
1365 Some(tree_sitter_rust::language()),
1366 )
1367 .with_indents_query(
1368 r#"
1369 (call_expression) @indent
1370 (field_expression) @indent
1371 (_ "(" ")" @end) @indent
1372 (_ "{" "}" @end) @indent
1373 "#,
1374 )
1375 .unwrap()
1376 .with_brackets_query(
1377 r#"
1378 ("{" @open "}" @close)
1379 "#,
1380 )
1381 .unwrap()
1382}
1383
1384fn empty(point: Point) -> Range<Point> {
1385 point..point
1386}