1use super::*;
2use clock::ReplicaId;
3use collections::BTreeMap;
4use gpui::{ModelHandle, MutableAppContext};
5use rand::prelude::*;
6use std::{
7 cell::RefCell,
8 env,
9 iter::FromIterator,
10 ops::Range,
11 rc::Rc,
12 time::{Duration, Instant},
13};
14use unindent::Unindent as _;
15use util::{post_inc, test::Network};
16
17#[cfg(test)]
18#[ctor::ctor]
19fn init_logger() {
20 if std::env::var("RUST_LOG").is_ok() {
21 env_logger::init();
22 }
23}
24
25#[gpui::test]
26fn test_select_language() {
27 let registry = LanguageRegistry::new();
28 registry.add(Arc::new(Language::new(
29 LanguageConfig {
30 name: "Rust".into(),
31 path_suffixes: vec!["rs".to_string()],
32 ..Default::default()
33 },
34 Some(tree_sitter_rust::language()),
35 )));
36 registry.add(Arc::new(Language::new(
37 LanguageConfig {
38 name: "Make".into(),
39 path_suffixes: vec!["Makefile".to_string(), "mk".to_string()],
40 ..Default::default()
41 },
42 Some(tree_sitter_rust::language()),
43 )));
44
45 // matching file extension
46 assert_eq!(
47 registry.select_language("zed/lib.rs").map(|l| l.name()),
48 Some("Rust".into())
49 );
50 assert_eq!(
51 registry.select_language("zed/lib.mk").map(|l| l.name()),
52 Some("Make".into())
53 );
54
55 // matching filename
56 assert_eq!(
57 registry.select_language("zed/Makefile").map(|l| l.name()),
58 Some("Make".into())
59 );
60
61 // matching suffix that is not the full file extension or filename
62 assert_eq!(registry.select_language("zed/cars").map(|l| l.name()), None);
63 assert_eq!(
64 registry.select_language("zed/a.cars").map(|l| l.name()),
65 None
66 );
67 assert_eq!(registry.select_language("zed/sumk").map(|l| l.name()), None);
68}
69
70#[gpui::test]
71fn test_edit_events(cx: &mut gpui::MutableAppContext) {
72 let mut now = Instant::now();
73 let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
74 let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
75
76 let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx));
77 let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx));
78 let buffer_ops = buffer1.update(cx, |buffer, cx| {
79 let buffer_1_events = buffer_1_events.clone();
80 cx.subscribe(&buffer1, move |_, _, event, _| {
81 buffer_1_events.borrow_mut().push(event.clone())
82 })
83 .detach();
84 let buffer_2_events = buffer_2_events.clone();
85 cx.subscribe(&buffer2, move |_, _, event, _| {
86 buffer_2_events.borrow_mut().push(event.clone())
87 })
88 .detach();
89
90 // An edit emits an edited event, followed by a dirtied event,
91 // since the buffer was previously in a clean state.
92 buffer.edit(Some(2..4), "XYZ", cx);
93
94 // An empty transaction does not emit any events.
95 buffer.start_transaction();
96 buffer.end_transaction(cx);
97
98 // A transaction containing two edits emits one edited event.
99 now += Duration::from_secs(1);
100 buffer.start_transaction_at(now);
101 buffer.edit(Some(5..5), "u", cx);
102 buffer.edit(Some(6..6), "w", cx);
103 buffer.end_transaction_at(now, cx);
104
105 // Undoing a transaction emits one edited event.
106 buffer.undo(cx);
107
108 buffer.operations.clone()
109 });
110
111 // Incorporating a set of remote ops emits a single edited event,
112 // followed by a dirtied event.
113 buffer2.update(cx, |buffer, cx| {
114 buffer.apply_ops(buffer_ops, cx).unwrap();
115 });
116
117 let buffer_1_events = buffer_1_events.borrow();
118 assert_eq!(
119 *buffer_1_events,
120 vec![Event::Edited, Event::Dirtied, Event::Edited, Event::Edited]
121 );
122
123 let buffer_2_events = buffer_2_events.borrow();
124 assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
125}
126
127#[gpui::test]
128async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
129 let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
130 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
131
132 let text = "a\nccc\ndddd\nffffff\n";
133 let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
134 buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
135 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
136
137 let text = "a\n1\n\nccc\ndd2dd\nffffff\n";
138 let diff = buffer.read_with(cx, |b, cx| b.diff(text.into(), cx)).await;
139 buffer.update(cx, |b, cx| b.apply_diff(diff, cx));
140 cx.read(|cx| assert_eq!(buffer.read(cx).text(), text));
141}
142
143#[gpui::test]
144async fn test_reparse(cx: &mut gpui::TestAppContext) {
145 let text = "fn a() {}";
146 let buffer =
147 cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx));
148
149 // Wait for the initial text to parse
150 buffer
151 .condition(&cx, |buffer, _| !buffer.is_parsing())
152 .await;
153 assert_eq!(
154 get_tree_sexp(&buffer, &cx),
155 concat!(
156 "(source_file (function_item name: (identifier) ",
157 "parameters: (parameters) ",
158 "body: (block)))"
159 )
160 );
161
162 buffer.update(cx, |buffer, _| {
163 buffer.set_sync_parse_timeout(Duration::ZERO)
164 });
165
166 // Perform some edits (add parameter and variable reference)
167 // Parsing doesn't begin until the transaction is complete
168 buffer.update(cx, |buf, cx| {
169 buf.start_transaction();
170
171 let offset = buf.text().find(")").unwrap();
172 buf.edit(vec![offset..offset], "b: C", cx);
173 assert!(!buf.is_parsing());
174
175 let offset = buf.text().find("}").unwrap();
176 buf.edit(vec![offset..offset], " d; ", cx);
177 assert!(!buf.is_parsing());
178
179 buf.end_transaction(cx);
180 assert_eq!(buf.text(), "fn a(b: C) { d; }");
181 assert!(buf.is_parsing());
182 });
183 buffer
184 .condition(&cx, |buffer, _| !buffer.is_parsing())
185 .await;
186 assert_eq!(
187 get_tree_sexp(&buffer, &cx),
188 concat!(
189 "(source_file (function_item name: (identifier) ",
190 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
191 "body: (block (expression_statement (identifier)))))"
192 )
193 );
194
195 // Perform a series of edits without waiting for the current parse to complete:
196 // * turn identifier into a field expression
197 // * turn field expression into a method call
198 // * add a turbofish to the method call
199 buffer.update(cx, |buf, cx| {
200 let offset = buf.text().find(";").unwrap();
201 buf.edit(vec![offset..offset], ".e", cx);
202 assert_eq!(buf.text(), "fn a(b: C) { d.e; }");
203 assert!(buf.is_parsing());
204 });
205 buffer.update(cx, |buf, cx| {
206 let offset = buf.text().find(";").unwrap();
207 buf.edit(vec![offset..offset], "(f)", cx);
208 assert_eq!(buf.text(), "fn a(b: C) { d.e(f); }");
209 assert!(buf.is_parsing());
210 });
211 buffer.update(cx, |buf, cx| {
212 let offset = buf.text().find("(f)").unwrap();
213 buf.edit(vec![offset..offset], "::<G>", cx);
214 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
215 assert!(buf.is_parsing());
216 });
217 buffer
218 .condition(&cx, |buffer, _| !buffer.is_parsing())
219 .await;
220 assert_eq!(
221 get_tree_sexp(&buffer, &cx),
222 concat!(
223 "(source_file (function_item name: (identifier) ",
224 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
225 "body: (block (expression_statement (call_expression ",
226 "function: (generic_function ",
227 "function: (field_expression value: (identifier) field: (field_identifier)) ",
228 "type_arguments: (type_arguments (type_identifier))) ",
229 "arguments: (arguments (identifier)))))))",
230 )
231 );
232
233 buffer.update(cx, |buf, cx| {
234 buf.undo(cx);
235 assert_eq!(buf.text(), "fn a() {}");
236 assert!(buf.is_parsing());
237 });
238 buffer
239 .condition(&cx, |buffer, _| !buffer.is_parsing())
240 .await;
241 assert_eq!(
242 get_tree_sexp(&buffer, &cx),
243 concat!(
244 "(source_file (function_item name: (identifier) ",
245 "parameters: (parameters) ",
246 "body: (block)))"
247 )
248 );
249
250 buffer.update(cx, |buf, cx| {
251 buf.redo(cx);
252 assert_eq!(buf.text(), "fn a(b: C) { d.e::<G>(f); }");
253 assert!(buf.is_parsing());
254 });
255 buffer
256 .condition(&cx, |buffer, _| !buffer.is_parsing())
257 .await;
258 assert_eq!(
259 get_tree_sexp(&buffer, &cx),
260 concat!(
261 "(source_file (function_item name: (identifier) ",
262 "parameters: (parameters (parameter pattern: (identifier) type: (type_identifier))) ",
263 "body: (block (expression_statement (call_expression ",
264 "function: (generic_function ",
265 "function: (field_expression value: (identifier) field: (field_identifier)) ",
266 "type_arguments: (type_arguments (type_identifier))) ",
267 "arguments: (arguments (identifier)))))))",
268 )
269 );
270
271 fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
272 buffer.read_with(cx, |buffer, _| {
273 buffer.syntax_tree().unwrap().root_node().to_sexp()
274 })
275 }
276}
277
278#[gpui::test]
279async fn test_outline(cx: &mut gpui::TestAppContext) {
280 let language = Arc::new(
281 rust_lang()
282 .with_outline_query(
283 r#"
284 (struct_item
285 "struct" @context
286 name: (_) @name) @item
287 (enum_item
288 "enum" @context
289 name: (_) @name) @item
290 (enum_variant
291 name: (_) @name) @item
292 (field_declaration
293 name: (_) @name) @item
294 (impl_item
295 "impl" @context
296 trait: (_) @name
297 "for" @context
298 type: (_) @name) @item
299 (function_item
300 "fn" @context
301 name: (_) @name) @item
302 (mod_item
303 "mod" @context
304 name: (_) @name) @item
305 "#,
306 )
307 .unwrap(),
308 );
309
310 let text = r#"
311 struct Person {
312 name: String,
313 age: usize,
314 }
315
316 mod module {
317 enum LoginState {
318 LoggedOut,
319 LoggingOn,
320 LoggedIn {
321 person: Person,
322 time: Instant,
323 }
324 }
325 }
326
327 impl Eq for Person {}
328
329 impl Drop for Person {
330 fn drop(&mut self) {
331 println!("bye");
332 }
333 }
334 "#
335 .unindent();
336
337 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
338 let outline = buffer
339 .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
340 .unwrap();
341
342 assert_eq!(
343 outline
344 .items
345 .iter()
346 .map(|item| (item.text.as_str(), item.depth))
347 .collect::<Vec<_>>(),
348 &[
349 ("struct Person", 0),
350 ("name", 1),
351 ("age", 1),
352 ("mod module", 0),
353 ("enum LoginState", 1),
354 ("LoggedOut", 2),
355 ("LoggingOn", 2),
356 ("LoggedIn", 2),
357 ("person", 3),
358 ("time", 3),
359 ("impl Eq for Person", 0),
360 ("impl Drop for Person", 0),
361 ("fn drop", 1),
362 ]
363 );
364
365 // Without space, we only match on names
366 assert_eq!(
367 search(&outline, "oon", &cx).await,
368 &[
369 ("mod module", vec![]), // included as the parent of a match
370 ("enum LoginState", vec![]), // included as the parent of a match
371 ("LoggingOn", vec![1, 7, 8]), // matches
372 ("impl Drop for Person", vec![7, 18, 19]), // matches in two disjoint names
373 ]
374 );
375
376 assert_eq!(
377 search(&outline, "dp p", &cx).await,
378 &[
379 ("impl Drop for Person", vec![5, 8, 9, 14]),
380 ("fn drop", vec![]),
381 ]
382 );
383 assert_eq!(
384 search(&outline, "dpn", &cx).await,
385 &[("impl Drop for Person", vec![5, 14, 19])]
386 );
387 assert_eq!(
388 search(&outline, "impl ", &cx).await,
389 &[
390 ("impl Eq for Person", vec![0, 1, 2, 3, 4]),
391 ("impl Drop for Person", vec![0, 1, 2, 3, 4]),
392 ("fn drop", vec![]),
393 ]
394 );
395
396 async fn search<'a>(
397 outline: &'a Outline<Anchor>,
398 query: &str,
399 cx: &gpui::TestAppContext,
400 ) -> Vec<(&'a str, Vec<usize>)> {
401 let matches = cx
402 .read(|cx| outline.search(query, cx.background().clone()))
403 .await;
404 matches
405 .into_iter()
406 .map(|mat| (outline.items[mat.candidate_id].text.as_str(), mat.positions))
407 .collect::<Vec<_>>()
408 }
409}
410
411#[gpui::test]
412fn test_enclosing_bracket_ranges(cx: &mut MutableAppContext) {
413 let buffer = cx.add_model(|cx| {
414 let text = "
415 mod x {
416 mod y {
417
418 }
419 }
420 "
421 .unindent();
422 Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)
423 });
424 let buffer = buffer.read(cx);
425 assert_eq!(
426 buffer.enclosing_bracket_point_ranges(Point::new(1, 6)..Point::new(1, 6)),
427 Some((
428 Point::new(0, 6)..Point::new(0, 7),
429 Point::new(4, 0)..Point::new(4, 1)
430 ))
431 );
432 assert_eq!(
433 buffer.enclosing_bracket_point_ranges(Point::new(1, 10)..Point::new(1, 10)),
434 Some((
435 Point::new(1, 10)..Point::new(1, 11),
436 Point::new(3, 4)..Point::new(3, 5)
437 ))
438 );
439 assert_eq!(
440 buffer.enclosing_bracket_point_ranges(Point::new(3, 5)..Point::new(3, 5)),
441 Some((
442 Point::new(1, 10)..Point::new(1, 11),
443 Point::new(3, 4)..Point::new(3, 5)
444 ))
445 );
446}
447
448#[gpui::test]
449fn test_edit_with_autoindent(cx: &mut MutableAppContext) {
450 cx.add_model(|cx| {
451 let text = "fn a() {}";
452 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
453
454 buffer.edit_with_autoindent([8..8], "\n\n", cx);
455 assert_eq!(buffer.text(), "fn a() {\n \n}");
456
457 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 4)], "b()\n", cx);
458 assert_eq!(buffer.text(), "fn a() {\n b()\n \n}");
459
460 buffer.edit_with_autoindent([Point::new(2, 4)..Point::new(2, 4)], ".c", cx);
461 assert_eq!(buffer.text(), "fn a() {\n b()\n .c\n}");
462
463 buffer
464 });
465}
466
467#[gpui::test]
468fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) {
469 cx.add_model(|cx| {
470 let text = "
471 fn a() {
472 c;
473 d;
474 }
475 "
476 .unindent();
477
478 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
479
480 // Lines 2 and 3 don't match the indentation suggestion. When editing these lines,
481 // their indentation is not adjusted.
482 buffer.edit_with_autoindent([empty(Point::new(1, 1)), empty(Point::new(2, 1))], "()", cx);
483 assert_eq!(
484 buffer.text(),
485 "
486 fn a() {
487 c();
488 d();
489 }
490 "
491 .unindent()
492 );
493
494 // When appending new content after these lines, the indentation is based on the
495 // preceding lines' actual indentation.
496 buffer.edit_with_autoindent(
497 [empty(Point::new(1, 1)), empty(Point::new(2, 1))],
498 "\n.f\n.g",
499 cx,
500 );
501 assert_eq!(
502 buffer.text(),
503 "
504 fn a() {
505 c
506 .f
507 .g();
508 d
509 .f
510 .g();
511 }
512 "
513 .unindent()
514 );
515 buffer
516 });
517}
518
519#[gpui::test]
520fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppContext) {
521 cx.add_model(|cx| {
522 let text = "
523 fn a() {}
524 "
525 .unindent();
526
527 let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx);
528
529 buffer.edit_with_autoindent([5..5], "\nb", cx);
530 assert_eq!(
531 buffer.text(),
532 "
533 fn a(
534 b) {}
535 "
536 .unindent()
537 );
538
539 // The indentation suggestion changed because `@end` node (a close paren)
540 // is now at the beginning of the line.
541 buffer.edit_with_autoindent([Point::new(1, 4)..Point::new(1, 5)], "", cx);
542 assert_eq!(
543 buffer.text(),
544 "
545 fn a(
546 ) {}
547 "
548 .unindent()
549 );
550
551 buffer
552 });
553}
554
555#[gpui::test]
556async fn test_diagnostics(cx: &mut gpui::TestAppContext) {
557 let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
558 let mut rust_lang = rust_lang();
559 rust_lang.config.language_server = Some(LanguageServerConfig {
560 disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
561 ..Default::default()
562 });
563
564 let text = "
565 fn a() { A }
566 fn b() { BB }
567 fn c() { CCC }
568 "
569 .unindent();
570
571 let buffer = cx.add_model(|cx| {
572 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
573 .with_language(Arc::new(rust_lang), cx)
574 .with_language_server(language_server, cx)
575 });
576
577 let open_notification = fake
578 .receive_notification::<lsp::notification::DidOpenTextDocument>()
579 .await;
580
581 // Edit the buffer, moving the content down
582 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
583 let change_notification_1 = fake
584 .receive_notification::<lsp::notification::DidChangeTextDocument>()
585 .await;
586 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
587
588 buffer.update(cx, |buffer, cx| {
589 // Receive diagnostics for an earlier version of the buffer.
590 buffer
591 .update_diagnostics(
592 vec![
593 DiagnosticEntry {
594 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
595 diagnostic: Diagnostic {
596 severity: DiagnosticSeverity::ERROR,
597 message: "undefined variable 'A'".to_string(),
598 is_disk_based: true,
599 group_id: 0,
600 is_primary: true,
601 ..Default::default()
602 },
603 },
604 DiagnosticEntry {
605 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
606 diagnostic: Diagnostic {
607 severity: DiagnosticSeverity::ERROR,
608 message: "undefined variable 'BB'".to_string(),
609 is_disk_based: true,
610 group_id: 1,
611 is_primary: true,
612 ..Default::default()
613 },
614 },
615 DiagnosticEntry {
616 range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12),
617 diagnostic: Diagnostic {
618 severity: DiagnosticSeverity::ERROR,
619 is_disk_based: true,
620 message: "undefined variable 'CCC'".to_string(),
621 group_id: 2,
622 is_primary: true,
623 ..Default::default()
624 },
625 },
626 ],
627 Some(open_notification.text_document.version),
628 cx,
629 )
630 .unwrap();
631
632 // The diagnostics have moved down since they were created.
633 assert_eq!(
634 buffer
635 .snapshot()
636 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
637 .collect::<Vec<_>>(),
638 &[
639 DiagnosticEntry {
640 range: Point::new(3, 9)..Point::new(3, 11),
641 diagnostic: Diagnostic {
642 severity: DiagnosticSeverity::ERROR,
643 message: "undefined variable 'BB'".to_string(),
644 is_disk_based: true,
645 group_id: 1,
646 is_primary: true,
647 ..Default::default()
648 },
649 },
650 DiagnosticEntry {
651 range: Point::new(4, 9)..Point::new(4, 12),
652 diagnostic: Diagnostic {
653 severity: DiagnosticSeverity::ERROR,
654 message: "undefined variable 'CCC'".to_string(),
655 is_disk_based: true,
656 group_id: 2,
657 is_primary: true,
658 ..Default::default()
659 }
660 }
661 ]
662 );
663 assert_eq!(
664 chunks_with_diagnostics(buffer, 0..buffer.len()),
665 [
666 ("\n\nfn a() { ".to_string(), None),
667 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
668 (" }\nfn b() { ".to_string(), None),
669 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
670 (" }\nfn c() { ".to_string(), None),
671 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
672 (" }\n".to_string(), None),
673 ]
674 );
675 assert_eq!(
676 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
677 [
678 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
679 (" }\nfn c() { ".to_string(), None),
680 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
681 ]
682 );
683
684 // Ensure overlapping diagnostics are highlighted correctly.
685 buffer
686 .update_diagnostics(
687 vec![
688 DiagnosticEntry {
689 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
690 diagnostic: Diagnostic {
691 severity: DiagnosticSeverity::ERROR,
692 message: "undefined variable 'A'".to_string(),
693 is_disk_based: true,
694 group_id: 0,
695 is_primary: true,
696 ..Default::default()
697 },
698 },
699 DiagnosticEntry {
700 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12),
701 diagnostic: Diagnostic {
702 severity: DiagnosticSeverity::WARNING,
703 message: "unreachable statement".to_string(),
704 group_id: 1,
705 is_primary: true,
706 ..Default::default()
707 },
708 },
709 ],
710 Some(open_notification.text_document.version),
711 cx,
712 )
713 .unwrap();
714 assert_eq!(
715 buffer
716 .snapshot()
717 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
718 .collect::<Vec<_>>(),
719 &[
720 DiagnosticEntry {
721 range: Point::new(2, 9)..Point::new(2, 12),
722 diagnostic: Diagnostic {
723 severity: DiagnosticSeverity::WARNING,
724 message: "unreachable statement".to_string(),
725 group_id: 1,
726 is_primary: true,
727 ..Default::default()
728 }
729 },
730 DiagnosticEntry {
731 range: Point::new(2, 9)..Point::new(2, 10),
732 diagnostic: Diagnostic {
733 severity: DiagnosticSeverity::ERROR,
734 message: "undefined variable 'A'".to_string(),
735 is_disk_based: true,
736 group_id: 0,
737 is_primary: true,
738 ..Default::default()
739 },
740 }
741 ]
742 );
743 assert_eq!(
744 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
745 [
746 ("fn a() { ".to_string(), None),
747 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
748 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
749 ("\n".to_string(), None),
750 ]
751 );
752 assert_eq!(
753 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
754 [
755 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
756 ("\n".to_string(), None),
757 ]
758 );
759 });
760
761 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
762 // changes since the last save.
763 buffer.update(cx, |buffer, cx| {
764 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
765 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
766 });
767 let change_notification_2 = fake
768 .receive_notification::<lsp::notification::DidChangeTextDocument>()
769 .await;
770 assert!(
771 change_notification_2.text_document.version > change_notification_1.text_document.version
772 );
773
774 buffer.update(cx, |buffer, cx| {
775 buffer
776 .update_diagnostics(
777 vec![
778 DiagnosticEntry {
779 range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
780 diagnostic: Diagnostic {
781 severity: DiagnosticSeverity::ERROR,
782 message: "undefined variable 'BB'".to_string(),
783 is_disk_based: true,
784 group_id: 1,
785 is_primary: true,
786 ..Default::default()
787 },
788 },
789 DiagnosticEntry {
790 range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
791 diagnostic: Diagnostic {
792 severity: DiagnosticSeverity::ERROR,
793 message: "undefined variable 'A'".to_string(),
794 is_disk_based: true,
795 group_id: 0,
796 is_primary: true,
797 ..Default::default()
798 },
799 },
800 ],
801 Some(change_notification_2.text_document.version),
802 cx,
803 )
804 .unwrap();
805 assert_eq!(
806 buffer
807 .snapshot()
808 .diagnostics_in_range::<_, Point>(0..buffer.len())
809 .collect::<Vec<_>>(),
810 &[
811 DiagnosticEntry {
812 range: Point::new(2, 21)..Point::new(2, 22),
813 diagnostic: Diagnostic {
814 severity: DiagnosticSeverity::ERROR,
815 message: "undefined variable 'A'".to_string(),
816 is_disk_based: true,
817 group_id: 0,
818 is_primary: true,
819 ..Default::default()
820 }
821 },
822 DiagnosticEntry {
823 range: Point::new(3, 9)..Point::new(3, 11),
824 diagnostic: Diagnostic {
825 severity: DiagnosticSeverity::ERROR,
826 message: "undefined variable 'BB'".to_string(),
827 is_disk_based: true,
828 group_id: 1,
829 is_primary: true,
830 ..Default::default()
831 },
832 }
833 ]
834 );
835 });
836}
837
838#[gpui::test]
839async fn test_language_server_has_exited(cx: &mut gpui::TestAppContext) {
840 let (language_server, fake) = cx.update(lsp::LanguageServer::fake);
841
842 // Simulate the language server failing to start up.
843 drop(fake);
844
845 let buffer = cx.add_model(|cx| {
846 Buffer::from_file(0, "", Box::new(FakeFile::new("/some/path")), cx)
847 .with_language(Arc::new(rust_lang()), cx)
848 .with_language_server(language_server, cx)
849 });
850
851 // Run the buffer's task that retrieves the server's capabilities.
852 cx.foreground().advance_clock(Duration::from_millis(1));
853
854 buffer.read_with(cx, |buffer, _| {
855 assert!(buffer.language_server().is_none());
856 });
857}
858
859#[gpui::test]
860async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
861 let (language_server, mut fake) = cx.update(lsp::LanguageServer::fake);
862
863 let text = "
864 fn a() {
865 f1();
866 }
867 fn b() {
868 f2();
869 }
870 fn c() {
871 f3();
872 }
873 "
874 .unindent();
875
876 let buffer = cx.add_model(|cx| {
877 Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
878 .with_language(Arc::new(rust_lang()), cx)
879 .with_language_server(language_server, cx)
880 });
881
882 let lsp_document_version = fake
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document
886 .version;
887
888 // Simulate editing the buffer after the language server computes some edits.
889 buffer.update(cx, |buffer, cx| {
890 buffer.edit(
891 [Point::new(0, 0)..Point::new(0, 0)],
892 "// above first function\n",
893 cx,
894 );
895 buffer.edit(
896 [Point::new(2, 0)..Point::new(2, 0)],
897 " // inside first function\n",
898 cx,
899 );
900 buffer.edit(
901 [Point::new(6, 4)..Point::new(6, 4)],
902 "// inside second function ",
903 cx,
904 );
905
906 assert_eq!(
907 buffer.text(),
908 "
909 // above first function
910 fn a() {
911 // inside first function
912 f1();
913 }
914 fn b() {
915 // inside second function f2();
916 }
917 fn c() {
918 f3();
919 }
920 "
921 .unindent()
922 );
923 });
924
925 let edits = buffer
926 .update(cx, |buffer, cx| {
927 buffer.edits_from_lsp(
928 vec![
929 // replace body of first function
930 lsp::TextEdit {
931 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
932 new_text: "
933 fn a() {
934 f10();
935 }
936 "
937 .unindent(),
938 },
939 // edit inside second function
940 lsp::TextEdit {
941 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
942 new_text: "00".into(),
943 },
944 // edit inside third function via two distinct edits
945 lsp::TextEdit {
946 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
947 new_text: "4000".into(),
948 },
949 lsp::TextEdit {
950 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
951 new_text: "".into(),
952 },
953 ],
954 Some(lsp_document_version),
955 cx,
956 )
957 })
958 .await
959 .unwrap();
960
961 buffer.update(cx, |buffer, cx| {
962 for (range, new_text) in edits {
963 buffer.edit([range], new_text, cx);
964 }
965 assert_eq!(
966 buffer.text(),
967 "
968 // above first function
969 fn a() {
970 // inside first function
971 f10();
972 }
973 fn b() {
974 // inside second function f200();
975 }
976 fn c() {
977 f4000();
978 }
979 "
980 .unindent()
981 );
982 });
983}
984
985#[gpui::test]
986async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
987 let text = "
988 use a::b;
989 use a::c;
990
991 fn f() {
992 b();
993 c();
994 }
995 "
996 .unindent();
997
998 let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
999
1000 // Simulate the language server sending us a small edit in the form of a very large diff.
1001 // Rust-analyzer does this when performing a merge-imports code action.
1002 let edits = buffer
1003 .update(cx, |buffer, cx| {
1004 buffer.edits_from_lsp(
1005 [
1006 // Replace the first use statement without editing the semicolon.
1007 lsp::TextEdit {
1008 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1009 new_text: "a::{b, c}".into(),
1010 },
1011 // Reinsert the remainder of the file between the semicolon and the final
1012 // newline of the file.
1013 lsp::TextEdit {
1014 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1015 new_text: "\n\n".into(),
1016 },
1017 lsp::TextEdit {
1018 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1019 new_text: "
1020 fn f() {
1021 b();
1022 c();
1023 }"
1024 .unindent(),
1025 },
1026 // Delete everything after the first newline of the file.
1027 lsp::TextEdit {
1028 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1029 new_text: "".into(),
1030 },
1031 ],
1032 None,
1033 cx,
1034 )
1035 })
1036 .await
1037 .unwrap();
1038
1039 buffer.update(cx, |buffer, cx| {
1040 let edits = edits
1041 .into_iter()
1042 .map(|(range, text)| {
1043 (
1044 range.start.to_point(&buffer)..range.end.to_point(&buffer),
1045 text,
1046 )
1047 })
1048 .collect::<Vec<_>>();
1049
1050 assert_eq!(
1051 edits,
1052 [
1053 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1054 (Point::new(1, 0)..Point::new(2, 0), "".into())
1055 ]
1056 );
1057
1058 for (range, new_text) in edits {
1059 buffer.edit([range], new_text, cx);
1060 }
1061 assert_eq!(
1062 buffer.text(),
1063 "
1064 use a::{b, c};
1065
1066 fn f() {
1067 b();
1068 c();
1069 }
1070 "
1071 .unindent()
1072 );
1073 });
1074}
1075
1076#[gpui::test]
1077async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1078 cx.add_model(|cx| {
1079 let text = concat!(
1080 "let one = ;\n", //
1081 "let two = \n",
1082 "let three = 3;\n",
1083 );
1084
1085 let mut buffer = Buffer::new(0, text, cx);
1086 buffer.set_language(Some(Arc::new(rust_lang())), cx);
1087 buffer
1088 .update_diagnostics(
1089 vec![
1090 DiagnosticEntry {
1091 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
1092 diagnostic: Diagnostic {
1093 severity: DiagnosticSeverity::ERROR,
1094 message: "syntax error 1".to_string(),
1095 ..Default::default()
1096 },
1097 },
1098 DiagnosticEntry {
1099 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
1100 diagnostic: Diagnostic {
1101 severity: DiagnosticSeverity::ERROR,
1102 message: "syntax error 2".to_string(),
1103 ..Default::default()
1104 },
1105 },
1106 ],
1107 None,
1108 cx,
1109 )
1110 .unwrap();
1111
1112 // An empty range is extended forward to include the following character.
1113 // At the end of a line, an empty range is extended backward to include
1114 // the preceding character.
1115 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
1116 assert_eq!(
1117 chunks
1118 .iter()
1119 .map(|(s, d)| (s.as_str(), *d))
1120 .collect::<Vec<_>>(),
1121 &[
1122 ("let one = ", None),
1123 (";", Some(DiagnosticSeverity::ERROR)),
1124 ("\nlet two =", None),
1125 (" ", Some(DiagnosticSeverity::ERROR)),
1126 ("\nlet three = 3;\n", None)
1127 ]
1128 );
1129 buffer
1130 });
1131}
1132
1133#[gpui::test]
1134fn test_serialization(cx: &mut gpui::MutableAppContext) {
1135 let mut now = Instant::now();
1136
1137 let buffer1 = cx.add_model(|cx| {
1138 let mut buffer = Buffer::new(0, "abc", cx);
1139 buffer.edit([3..3], "D", cx);
1140
1141 now += Duration::from_secs(1);
1142 buffer.start_transaction_at(now);
1143 buffer.edit([4..4], "E", cx);
1144 buffer.end_transaction_at(now, cx);
1145 assert_eq!(buffer.text(), "abcDE");
1146
1147 buffer.undo(cx);
1148 assert_eq!(buffer.text(), "abcD");
1149
1150 buffer.edit([4..4], "F", cx);
1151 assert_eq!(buffer.text(), "abcDF");
1152 buffer
1153 });
1154 assert_eq!(buffer1.read(cx).text(), "abcDF");
1155
1156 let message = buffer1.read(cx).to_proto();
1157 let buffer2 = cx.add_model(|cx| Buffer::from_proto(1, message, None, cx).unwrap());
1158 assert_eq!(buffer2.read(cx).text(), "abcDF");
1159}
1160
1161#[gpui::test(iterations = 100)]
1162fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
1163 let min_peers = env::var("MIN_PEERS")
1164 .map(|i| i.parse().expect("invalid `MIN_PEERS` variable"))
1165 .unwrap_or(1);
1166 let max_peers = env::var("MAX_PEERS")
1167 .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
1168 .unwrap_or(5);
1169 let operations = env::var("OPERATIONS")
1170 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1171 .unwrap_or(10);
1172
1173 let base_text_len = rng.gen_range(0..10);
1174 let base_text = RandomCharIter::new(&mut rng)
1175 .take(base_text_len)
1176 .collect::<String>();
1177 let mut replica_ids = Vec::new();
1178 let mut buffers = Vec::new();
1179 let mut network = Network::new(rng.clone());
1180
1181 for i in 0..rng.gen_range(min_peers..=max_peers) {
1182 let buffer = cx.add_model(|cx| {
1183 let mut buffer = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
1184 buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1185 buffer
1186 });
1187 buffers.push(buffer);
1188 replica_ids.push(i as ReplicaId);
1189 network.add_peer(i as ReplicaId);
1190 log::info!("Adding initial peer with replica id {}", i);
1191 }
1192
1193 log::info!("initial text: {:?}", base_text);
1194
1195 let mut now = Instant::now();
1196 let mut mutation_count = operations;
1197 let mut next_diagnostic_id = 0;
1198 let mut active_selections = BTreeMap::default();
1199 loop {
1200 let replica_index = rng.gen_range(0..replica_ids.len());
1201 let replica_id = replica_ids[replica_index];
1202 let buffer = &mut buffers[replica_index];
1203 let mut new_buffer = None;
1204 match rng.gen_range(0..100) {
1205 0..=29 if mutation_count != 0 => {
1206 buffer.update(cx, |buffer, cx| {
1207 buffer.start_transaction_at(now);
1208 buffer.randomly_edit(&mut rng, 5, cx);
1209 buffer.end_transaction_at(now, cx);
1210 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1211 });
1212 mutation_count -= 1;
1213 }
1214 30..=39 if mutation_count != 0 => {
1215 buffer.update(cx, |buffer, cx| {
1216 let mut selections = Vec::new();
1217 for id in 0..rng.gen_range(1..=5) {
1218 let range = buffer.random_byte_range(0, &mut rng);
1219 selections.push(Selection {
1220 id,
1221 start: buffer.anchor_before(range.start),
1222 end: buffer.anchor_before(range.end),
1223 reversed: false,
1224 goal: SelectionGoal::None,
1225 });
1226 }
1227 let selections: Arc<[Selection<Anchor>]> = selections.into();
1228 log::info!(
1229 "peer {} setting active selections: {:?}",
1230 replica_id,
1231 selections
1232 );
1233 active_selections.insert(replica_id, selections.clone());
1234 buffer.set_active_selections(selections, cx);
1235 });
1236 mutation_count -= 1;
1237 }
1238 40..=49 if mutation_count != 0 && replica_id == 0 => {
1239 let entry_count = rng.gen_range(1..=5);
1240 buffer.update(cx, |buffer, cx| {
1241 let diagnostics = (0..entry_count)
1242 .map(|_| {
1243 let range = buffer.random_byte_range(0, &mut rng);
1244 DiagnosticEntry {
1245 range,
1246 diagnostic: Diagnostic {
1247 message: post_inc(&mut next_diagnostic_id).to_string(),
1248 ..Default::default()
1249 },
1250 }
1251 })
1252 .collect();
1253 log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics);
1254 buffer.update_diagnostics(diagnostics, None, cx).unwrap();
1255 });
1256 mutation_count -= 1;
1257 }
1258 50..=59 if replica_ids.len() < max_peers => {
1259 let old_buffer = buffer.read(cx).to_proto();
1260 let new_replica_id = replica_ids.len() as ReplicaId;
1261 log::info!(
1262 "Adding new replica {} (replicating from {})",
1263 new_replica_id,
1264 replica_id
1265 );
1266 new_buffer = Some(cx.add_model(|cx| {
1267 let mut new_buffer =
1268 Buffer::from_proto(new_replica_id, old_buffer, None, cx).unwrap();
1269 new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
1270 new_buffer
1271 }));
1272 replica_ids.push(new_replica_id);
1273 network.replicate(replica_id, new_replica_id);
1274 }
1275 60..=69 if mutation_count != 0 => {
1276 buffer.update(cx, |buffer, cx| {
1277 buffer.randomly_undo_redo(&mut rng, cx);
1278 log::info!("buffer {} text: {:?}", buffer.replica_id(), buffer.text());
1279 });
1280 mutation_count -= 1;
1281 }
1282 _ if network.has_unreceived(replica_id) => {
1283 let ops = network
1284 .receive(replica_id)
1285 .into_iter()
1286 .map(|op| proto::deserialize_operation(op).unwrap());
1287 if ops.len() > 0 {
1288 log::info!(
1289 "peer {} applying {} ops from the network.",
1290 replica_id,
1291 ops.len()
1292 );
1293 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap());
1294 }
1295 }
1296 _ => {}
1297 }
1298
1299 buffer.update(cx, |buffer, _| {
1300 let ops = buffer
1301 .operations
1302 .drain(..)
1303 .map(|op| proto::serialize_operation(&op))
1304 .collect();
1305 network.broadcast(buffer.replica_id(), ops);
1306 });
1307 now += Duration::from_millis(rng.gen_range(0..=200));
1308 buffers.extend(new_buffer);
1309
1310 for buffer in &buffers {
1311 buffer.read(cx).check_invariants();
1312 }
1313
1314 if mutation_count == 0 && network.is_idle() {
1315 break;
1316 }
1317 }
1318
1319 let first_buffer = buffers[0].read(cx).snapshot();
1320 for buffer in &buffers[1..] {
1321 let buffer = buffer.read(cx).snapshot();
1322 assert_eq!(
1323 buffer.text(),
1324 first_buffer.text(),
1325 "Replica {} text != Replica 0 text",
1326 buffer.replica_id()
1327 );
1328 assert_eq!(
1329 buffer
1330 .diagnostics_in_range::<_, usize>(0..buffer.len())
1331 .collect::<Vec<_>>(),
1332 first_buffer
1333 .diagnostics_in_range::<_, usize>(0..first_buffer.len())
1334 .collect::<Vec<_>>(),
1335 "Replica {} diagnostics != Replica 0 diagnostics",
1336 buffer.replica_id()
1337 );
1338 }
1339
1340 for buffer in &buffers {
1341 let buffer = buffer.read(cx).snapshot();
1342 let actual_remote_selections = buffer
1343 .remote_selections_in_range(Anchor::min()..Anchor::max())
1344 .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
1345 .collect::<Vec<_>>();
1346 let expected_remote_selections = active_selections
1347 .iter()
1348 .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
1349 .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
1350 .collect::<Vec<_>>();
1351 assert_eq!(actual_remote_selections, expected_remote_selections);
1352 }
1353}
1354
1355fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1356 buffer: &Buffer,
1357 range: Range<T>,
1358) -> Vec<(String, Option<DiagnosticSeverity>)> {
1359 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1360 for chunk in buffer.snapshot().chunks(range, true) {
1361 if chunks
1362 .last()
1363 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
1364 {
1365 chunks.last_mut().unwrap().0.push_str(chunk.text);
1366 } else {
1367 chunks.push((chunk.text.to_string(), chunk.diagnostic));
1368 }
1369 }
1370 chunks
1371}
1372
1373#[test]
1374fn test_contiguous_ranges() {
1375 assert_eq!(
1376 contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::<Vec<_>>(),
1377 &[1..4, 5..7, 9..13]
1378 );
1379
1380 // Respects the `max_len` parameter
1381 assert_eq!(
1382 contiguous_ranges(
1383 [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(),
1384 3
1385 )
1386 .collect::<Vec<_>>(),
1387 &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32],
1388 );
1389}
1390
1391impl Buffer {
1392 pub fn enclosing_bracket_point_ranges<T: ToOffset>(
1393 &self,
1394 range: Range<T>,
1395 ) -> Option<(Range<Point>, Range<Point>)> {
1396 self.snapshot()
1397 .enclosing_bracket_ranges(range)
1398 .map(|(start, end)| {
1399 let point_start = start.start.to_point(self)..start.end.to_point(self);
1400 let point_end = end.start.to_point(self)..end.end.to_point(self);
1401 (point_start, point_end)
1402 })
1403 }
1404}
1405
1406fn rust_lang() -> Language {
1407 Language::new(
1408 LanguageConfig {
1409 name: "Rust".into(),
1410 path_suffixes: vec!["rs".to_string()],
1411 language_server: None,
1412 ..Default::default()
1413 },
1414 Some(tree_sitter_rust::language()),
1415 )
1416 .with_indents_query(
1417 r#"
1418 (call_expression) @indent
1419 (field_expression) @indent
1420 (_ "(" ")" @end) @indent
1421 (_ "{" "}" @end) @indent
1422 "#,
1423 )
1424 .unwrap()
1425 .with_brackets_query(
1426 r#"
1427 ("{" @open "}" @close)
1428 "#,
1429 )
1430 .unwrap()
1431}
1432
1433fn empty(point: Point) -> Range<Point> {
1434 point..point
1435}