1use crate::{
2 point, size, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, ContentMask, DevicePixels,
3 Hsla, MetalAtlas, MonochromeSprite, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch,
4 Quad, ScaledPixels, Scene, Shadow, Size, Underline,
5};
6use cocoa::{
7 base::{NO, YES},
8 foundation::NSUInteger,
9 quartzcore::AutoresizingMask,
10};
11use collections::HashMap;
12use metal::{CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
13use objc::{self, msg_send, sel, sel_impl};
14use smallvec::SmallVec;
15use std::{ffi::c_void, mem, ptr, sync::Arc};
16
17const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
18const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
19
20pub(crate) struct MetalRenderer {
21 layer: metal::MetalLayer,
22 command_queue: CommandQueue,
23 paths_rasterization_pipeline_state: metal::RenderPipelineState,
24 path_sprites_pipeline_state: metal::RenderPipelineState,
25 shadows_pipeline_state: metal::RenderPipelineState,
26 quads_pipeline_state: metal::RenderPipelineState,
27 underlines_pipeline_state: metal::RenderPipelineState,
28 monochrome_sprites_pipeline_state: metal::RenderPipelineState,
29 polychrome_sprites_pipeline_state: metal::RenderPipelineState,
30 unit_vertices: metal::Buffer,
31 instances: metal::Buffer,
32 sprite_atlas: Arc<MetalAtlas>,
33}
34
35impl MetalRenderer {
36 pub fn new(is_opaque: bool) -> Self {
37 let device: metal::Device = if let Some(device) = metal::Device::system_default() {
38 device
39 } else {
40 log::error!("unable to access a compatible graphics device");
41 std::process::exit(1);
42 };
43
44 let layer = metal::MetalLayer::new();
45 layer.set_device(&device);
46 layer.set_pixel_format(MTLPixelFormat::BGRA8Unorm);
47 layer.set_presents_with_transaction(true);
48 layer.set_opaque(is_opaque);
49 unsafe {
50 let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
51 let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
52 let _: () = msg_send![
53 &*layer,
54 setAutoresizingMask: AutoresizingMask::WIDTH_SIZABLE
55 | AutoresizingMask::HEIGHT_SIZABLE
56 ];
57 }
58
59 let library = device
60 .new_library_with_data(SHADERS_METALLIB)
61 .expect("error building metal library");
62
63 fn to_float2_bits(point: crate::PointF) -> u64 {
64 unsafe {
65 let mut output = mem::transmute::<_, u32>(point.y.to_bits()) as u64;
66 output <<= 32;
67 output |= mem::transmute::<_, u32>(point.x.to_bits()) as u64;
68 output
69 }
70 }
71
72 let unit_vertices = [
73 to_float2_bits(point(0., 0.)),
74 to_float2_bits(point(1., 0.)),
75 to_float2_bits(point(0., 1.)),
76 to_float2_bits(point(0., 1.)),
77 to_float2_bits(point(1., 0.)),
78 to_float2_bits(point(1., 1.)),
79 ];
80 let unit_vertices = device.new_buffer_with_data(
81 unit_vertices.as_ptr() as *const c_void,
82 (unit_vertices.len() * mem::size_of::<u64>()) as u64,
83 MTLResourceOptions::StorageModeManaged,
84 );
85 let instances = device.new_buffer(
86 INSTANCE_BUFFER_SIZE as u64,
87 MTLResourceOptions::StorageModeManaged,
88 );
89
90 let paths_rasterization_pipeline_state = build_path_rasterization_pipeline_state(
91 &device,
92 &library,
93 "paths_rasterization",
94 "path_rasterization_vertex",
95 "path_rasterization_fragment",
96 MTLPixelFormat::R16Float,
97 );
98 let path_sprites_pipeline_state = build_pipeline_state(
99 &device,
100 &library,
101 "path_sprites",
102 "path_sprite_vertex",
103 "path_sprite_fragment",
104 MTLPixelFormat::BGRA8Unorm,
105 );
106 let shadows_pipeline_state = build_pipeline_state(
107 &device,
108 &library,
109 "shadows",
110 "shadow_vertex",
111 "shadow_fragment",
112 MTLPixelFormat::BGRA8Unorm,
113 );
114 let quads_pipeline_state = build_pipeline_state(
115 &device,
116 &library,
117 "quads",
118 "quad_vertex",
119 "quad_fragment",
120 MTLPixelFormat::BGRA8Unorm,
121 );
122 let underlines_pipeline_state = build_pipeline_state(
123 &device,
124 &library,
125 "underlines",
126 "underline_vertex",
127 "underline_fragment",
128 MTLPixelFormat::BGRA8Unorm,
129 );
130 let monochrome_sprites_pipeline_state = build_pipeline_state(
131 &device,
132 &library,
133 "monochrome_sprites",
134 "monochrome_sprite_vertex",
135 "monochrome_sprite_fragment",
136 MTLPixelFormat::BGRA8Unorm,
137 );
138 let polychrome_sprites_pipeline_state = build_pipeline_state(
139 &device,
140 &library,
141 "polychrome_sprites",
142 "polychrome_sprite_vertex",
143 "polychrome_sprite_fragment",
144 MTLPixelFormat::BGRA8Unorm,
145 );
146
147 let command_queue = device.new_command_queue();
148 let sprite_atlas = Arc::new(MetalAtlas::new(device.clone()));
149
150 Self {
151 layer,
152 command_queue,
153 paths_rasterization_pipeline_state,
154 path_sprites_pipeline_state,
155 shadows_pipeline_state,
156 quads_pipeline_state,
157 underlines_pipeline_state,
158 monochrome_sprites_pipeline_state,
159 polychrome_sprites_pipeline_state,
160 unit_vertices,
161 instances,
162 sprite_atlas,
163 }
164 }
165
166 pub fn layer(&self) -> &metal::MetalLayerRef {
167 &*self.layer
168 }
169
170 pub fn sprite_atlas(&self) -> &Arc<MetalAtlas> {
171 &self.sprite_atlas
172 }
173
174 pub fn draw(&mut self, scene: &Scene) {
175 let layer = self.layer.clone();
176 let viewport_size = layer.drawable_size();
177 let viewport_size: Size<DevicePixels> = size(
178 (viewport_size.width.ceil() as i32).into(),
179 (viewport_size.height.ceil() as i32).into(),
180 );
181 let drawable = if let Some(drawable) = layer.next_drawable() {
182 drawable
183 } else {
184 log::error!(
185 "failed to retrieve next drawable, drawable size: {:?}",
186 viewport_size
187 );
188 return;
189 };
190 let command_queue = self.command_queue.clone();
191 let command_buffer = command_queue.new_command_buffer();
192 let mut instance_offset = 0;
193
194 let path_tiles = self.rasterize_paths(scene.paths(), &mut instance_offset, &command_buffer);
195
196 let render_pass_descriptor = metal::RenderPassDescriptor::new();
197 let color_attachment = render_pass_descriptor
198 .color_attachments()
199 .object_at(0)
200 .unwrap();
201
202 color_attachment.set_texture(Some(drawable.texture()));
203 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
204 color_attachment.set_store_action(metal::MTLStoreAction::Store);
205 let alpha = if self.layer.is_opaque() { 1. } else { 0. };
206 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., alpha));
207 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
208
209 command_encoder.set_viewport(metal::MTLViewport {
210 originX: 0.0,
211 originY: 0.0,
212 width: i32::from(viewport_size.width) as f64,
213 height: i32::from(viewport_size.height) as f64,
214 znear: 0.0,
215 zfar: 1.0,
216 });
217 for batch in scene.batches() {
218 match batch {
219 PrimitiveBatch::Shadows(shadows) => {
220 self.draw_shadows(
221 shadows,
222 &mut instance_offset,
223 viewport_size,
224 command_encoder,
225 );
226 }
227 PrimitiveBatch::Quads(quads) => {
228 self.draw_quads(quads, &mut instance_offset, viewport_size, command_encoder);
229 }
230 PrimitiveBatch::Paths(paths) => {
231 self.draw_paths(
232 paths,
233 &path_tiles,
234 &mut instance_offset,
235 viewport_size,
236 command_encoder,
237 );
238 }
239 PrimitiveBatch::Underlines(underlines) => {
240 self.draw_underlines(
241 underlines,
242 &mut instance_offset,
243 viewport_size,
244 command_encoder,
245 );
246 }
247 PrimitiveBatch::MonochromeSprites {
248 texture_id,
249 sprites,
250 } => {
251 self.draw_monochrome_sprites(
252 texture_id,
253 sprites,
254 &mut instance_offset,
255 viewport_size,
256 command_encoder,
257 );
258 }
259 PrimitiveBatch::PolychromeSprites {
260 texture_id,
261 sprites,
262 } => {
263 self.draw_polychrome_sprites(
264 texture_id,
265 sprites,
266 &mut instance_offset,
267 viewport_size,
268 command_encoder,
269 );
270 }
271 }
272 }
273
274 command_encoder.end_encoding();
275
276 self.instances.did_modify_range(NSRange {
277 location: 0,
278 length: instance_offset as NSUInteger,
279 });
280
281 command_buffer.commit();
282 self.sprite_atlas.clear_textures(AtlasTextureKind::Path);
283 command_buffer.wait_until_completed();
284 drawable.present();
285 }
286
287 fn rasterize_paths(
288 &mut self,
289 paths: &[Path<ScaledPixels>],
290 offset: &mut usize,
291 command_buffer: &metal::CommandBufferRef,
292 ) -> HashMap<PathId, AtlasTile> {
293 let mut tiles = HashMap::default();
294 let mut vertices_by_texture_id = HashMap::default();
295 for path in paths {
296 let clipped_bounds = path.bounds.intersect(&path.content_mask.bounds);
297
298 let tile = self
299 .sprite_atlas
300 .allocate(clipped_bounds.size.map(Into::into), AtlasTextureKind::Path);
301 vertices_by_texture_id
302 .entry(tile.texture_id)
303 .or_insert(Vec::new())
304 .extend(path.vertices.iter().map(|vertex| PathVertex {
305 xy_position: vertex.xy_position - path.bounds.origin
306 + tile.bounds.origin.map(Into::into),
307 st_position: vertex.st_position,
308 content_mask: ContentMask {
309 bounds: tile.bounds.map(Into::into),
310 },
311 }));
312 tiles.insert(path.id, tile);
313 }
314
315 for (texture_id, vertices) in vertices_by_texture_id {
316 align_offset(offset);
317 let next_offset = *offset + vertices.len() * mem::size_of::<PathVertex<ScaledPixels>>();
318 assert!(
319 next_offset <= INSTANCE_BUFFER_SIZE,
320 "instance buffer exhausted"
321 );
322
323 let render_pass_descriptor = metal::RenderPassDescriptor::new();
324 let color_attachment = render_pass_descriptor
325 .color_attachments()
326 .object_at(0)
327 .unwrap();
328
329 let texture = self.sprite_atlas.metal_texture(texture_id);
330 color_attachment.set_texture(Some(&texture));
331 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
332 color_attachment.set_store_action(metal::MTLStoreAction::Store);
333 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
334 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
335 command_encoder.set_render_pipeline_state(&self.paths_rasterization_pipeline_state);
336 command_encoder.set_vertex_buffer(
337 PathRasterizationInputIndex::Vertices as u64,
338 Some(&self.instances),
339 *offset as u64,
340 );
341 let texture_size = Size {
342 width: DevicePixels::from(texture.width()),
343 height: DevicePixels::from(texture.height()),
344 };
345 command_encoder.set_vertex_bytes(
346 PathRasterizationInputIndex::AtlasTextureSize as u64,
347 mem::size_of_val(&texture_size) as u64,
348 &texture_size as *const Size<DevicePixels> as *const _,
349 );
350
351 let vertices_bytes_len = mem::size_of::<PathVertex<ScaledPixels>>() * vertices.len();
352 let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
353 unsafe {
354 ptr::copy_nonoverlapping(
355 vertices.as_ptr() as *const u8,
356 buffer_contents,
357 vertices_bytes_len,
358 );
359 }
360
361 command_encoder.draw_primitives(
362 metal::MTLPrimitiveType::Triangle,
363 0,
364 vertices.len() as u64,
365 );
366 command_encoder.end_encoding();
367 *offset = next_offset;
368 }
369
370 tiles
371 }
372
373 fn draw_shadows(
374 &mut self,
375 shadows: &[Shadow],
376 offset: &mut usize,
377 viewport_size: Size<DevicePixels>,
378 command_encoder: &metal::RenderCommandEncoderRef,
379 ) {
380 if shadows.is_empty() {
381 return;
382 }
383 align_offset(offset);
384
385 command_encoder.set_render_pipeline_state(&self.shadows_pipeline_state);
386 command_encoder.set_vertex_buffer(
387 ShadowInputIndex::Vertices as u64,
388 Some(&self.unit_vertices),
389 0,
390 );
391 command_encoder.set_vertex_buffer(
392 ShadowInputIndex::Shadows as u64,
393 Some(&self.instances),
394 *offset as u64,
395 );
396 command_encoder.set_fragment_buffer(
397 ShadowInputIndex::Shadows as u64,
398 Some(&self.instances),
399 *offset as u64,
400 );
401
402 command_encoder.set_vertex_bytes(
403 ShadowInputIndex::ViewportSize as u64,
404 mem::size_of_val(&viewport_size) as u64,
405 &viewport_size as *const Size<DevicePixels> as *const _,
406 );
407
408 let shadow_bytes_len = mem::size_of::<Shadow>() * shadows.len();
409 let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
410 unsafe {
411 ptr::copy_nonoverlapping(
412 shadows.as_ptr() as *const u8,
413 buffer_contents,
414 shadow_bytes_len,
415 );
416 }
417
418 let next_offset = *offset + shadow_bytes_len;
419 assert!(
420 next_offset <= INSTANCE_BUFFER_SIZE,
421 "instance buffer exhausted"
422 );
423
424 command_encoder.draw_primitives_instanced(
425 metal::MTLPrimitiveType::Triangle,
426 0,
427 6,
428 shadows.len() as u64,
429 );
430 *offset = next_offset;
431 }
432
433 fn draw_quads(
434 &mut self,
435 quads: &[Quad],
436 offset: &mut usize,
437 viewport_size: Size<DevicePixels>,
438 command_encoder: &metal::RenderCommandEncoderRef,
439 ) {
440 if quads.is_empty() {
441 return;
442 }
443 align_offset(offset);
444
445 command_encoder.set_render_pipeline_state(&self.quads_pipeline_state);
446 command_encoder.set_vertex_buffer(
447 QuadInputIndex::Vertices as u64,
448 Some(&self.unit_vertices),
449 0,
450 );
451 command_encoder.set_vertex_buffer(
452 QuadInputIndex::Quads as u64,
453 Some(&self.instances),
454 *offset as u64,
455 );
456 command_encoder.set_fragment_buffer(
457 QuadInputIndex::Quads as u64,
458 Some(&self.instances),
459 *offset as u64,
460 );
461
462 command_encoder.set_vertex_bytes(
463 QuadInputIndex::ViewportSize as u64,
464 mem::size_of_val(&viewport_size) as u64,
465 &viewport_size as *const Size<DevicePixels> as *const _,
466 );
467
468 let quad_bytes_len = mem::size_of::<Quad>() * quads.len();
469 let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
470 unsafe {
471 ptr::copy_nonoverlapping(quads.as_ptr() as *const u8, buffer_contents, quad_bytes_len);
472 }
473
474 let next_offset = *offset + quad_bytes_len;
475 assert!(
476 next_offset <= INSTANCE_BUFFER_SIZE,
477 "instance buffer exhausted"
478 );
479
480 command_encoder.draw_primitives_instanced(
481 metal::MTLPrimitiveType::Triangle,
482 0,
483 6,
484 quads.len() as u64,
485 );
486 *offset = next_offset;
487 }
488
489 fn draw_paths(
490 &mut self,
491 paths: &[Path<ScaledPixels>],
492 tiles_by_path_id: &HashMap<PathId, AtlasTile>,
493 offset: &mut usize,
494 viewport_size: Size<DevicePixels>,
495 command_encoder: &metal::RenderCommandEncoderRef,
496 ) {
497 if paths.is_empty() {
498 return;
499 }
500
501 command_encoder.set_render_pipeline_state(&self.path_sprites_pipeline_state);
502 command_encoder.set_vertex_buffer(
503 SpriteInputIndex::Vertices as u64,
504 Some(&self.unit_vertices),
505 0,
506 );
507 command_encoder.set_vertex_bytes(
508 SpriteInputIndex::ViewportSize as u64,
509 mem::size_of_val(&viewport_size) as u64,
510 &viewport_size as *const Size<DevicePixels> as *const _,
511 );
512
513 let mut prev_texture_id = None;
514 let mut sprites = SmallVec::<[_; 1]>::new();
515 let mut paths_and_tiles = paths
516 .into_iter()
517 .map(|path| (path, tiles_by_path_id.get(&path.id).unwrap()))
518 .peekable();
519
520 loop {
521 if let Some((path, tile)) = paths_and_tiles.peek() {
522 if prev_texture_id.map_or(true, |texture_id| texture_id == tile.texture_id) {
523 prev_texture_id = Some(tile.texture_id);
524 sprites.push(PathSprite {
525 bounds: Bounds {
526 origin: path.bounds.origin.map(|p| p.floor()),
527 size: tile.bounds.size.map(Into::into),
528 },
529 color: path.color,
530 tile: (*tile).clone(),
531 });
532 paths_and_tiles.next();
533 continue;
534 }
535 }
536
537 if sprites.is_empty() {
538 break;
539 } else {
540 align_offset(offset);
541 let texture_id = prev_texture_id.take().unwrap();
542 let texture: metal::Texture = self.sprite_atlas.metal_texture(texture_id);
543 let texture_size = size(
544 DevicePixels(texture.width() as i32),
545 DevicePixels(texture.height() as i32),
546 );
547
548 command_encoder.set_vertex_buffer(
549 SpriteInputIndex::Sprites as u64,
550 Some(&self.instances),
551 *offset as u64,
552 );
553 command_encoder.set_vertex_bytes(
554 SpriteInputIndex::AtlasTextureSize as u64,
555 mem::size_of_val(&texture_size) as u64,
556 &texture_size as *const Size<DevicePixels> as *const _,
557 );
558 command_encoder.set_fragment_buffer(
559 SpriteInputIndex::Sprites as u64,
560 Some(&self.instances),
561 *offset as u64,
562 );
563 command_encoder
564 .set_fragment_texture(SpriteInputIndex::AtlasTexture as u64, Some(&texture));
565
566 let sprite_bytes_len = mem::size_of::<MonochromeSprite>() * sprites.len();
567 let buffer_contents =
568 unsafe { (self.instances.contents() as *mut u8).add(*offset) };
569 unsafe {
570 ptr::copy_nonoverlapping(
571 sprites.as_ptr() as *const u8,
572 buffer_contents,
573 sprite_bytes_len,
574 );
575 }
576
577 let next_offset = *offset + sprite_bytes_len;
578 assert!(
579 next_offset <= INSTANCE_BUFFER_SIZE,
580 "instance buffer exhausted"
581 );
582
583 command_encoder.draw_primitives_instanced(
584 metal::MTLPrimitiveType::Triangle,
585 0,
586 6,
587 sprites.len() as u64,
588 );
589 *offset = next_offset;
590 sprites.clear();
591 }
592 }
593 }
594
595 fn draw_underlines(
596 &mut self,
597 underlines: &[Underline],
598 offset: &mut usize,
599 viewport_size: Size<DevicePixels>,
600 command_encoder: &metal::RenderCommandEncoderRef,
601 ) {
602 if underlines.is_empty() {
603 return;
604 }
605 align_offset(offset);
606
607 command_encoder.set_render_pipeline_state(&self.underlines_pipeline_state);
608 command_encoder.set_vertex_buffer(
609 UnderlineInputIndex::Vertices as u64,
610 Some(&self.unit_vertices),
611 0,
612 );
613 command_encoder.set_vertex_buffer(
614 UnderlineInputIndex::Underlines as u64,
615 Some(&self.instances),
616 *offset as u64,
617 );
618 command_encoder.set_fragment_buffer(
619 UnderlineInputIndex::Underlines as u64,
620 Some(&self.instances),
621 *offset as u64,
622 );
623
624 command_encoder.set_vertex_bytes(
625 UnderlineInputIndex::ViewportSize as u64,
626 mem::size_of_val(&viewport_size) as u64,
627 &viewport_size as *const Size<DevicePixels> as *const _,
628 );
629
630 let quad_bytes_len = mem::size_of::<Underline>() * underlines.len();
631 let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
632 unsafe {
633 ptr::copy_nonoverlapping(
634 underlines.as_ptr() as *const u8,
635 buffer_contents,
636 quad_bytes_len,
637 );
638 }
639
640 let next_offset = *offset + quad_bytes_len;
641 assert!(
642 next_offset <= INSTANCE_BUFFER_SIZE,
643 "instance buffer exhausted"
644 );
645
646 command_encoder.draw_primitives_instanced(
647 metal::MTLPrimitiveType::Triangle,
648 0,
649 6,
650 underlines.len() as u64,
651 );
652 *offset = next_offset;
653 }
654
655 fn draw_monochrome_sprites(
656 &mut self,
657 texture_id: AtlasTextureId,
658 sprites: &[MonochromeSprite],
659 offset: &mut usize,
660 viewport_size: Size<DevicePixels>,
661 command_encoder: &metal::RenderCommandEncoderRef,
662 ) {
663 if sprites.is_empty() {
664 return;
665 }
666 align_offset(offset);
667
668 let texture = self.sprite_atlas.metal_texture(texture_id);
669 let texture_size = size(
670 DevicePixels(texture.width() as i32),
671 DevicePixels(texture.height() as i32),
672 );
673 command_encoder.set_render_pipeline_state(&self.monochrome_sprites_pipeline_state);
674 command_encoder.set_vertex_buffer(
675 SpriteInputIndex::Vertices as u64,
676 Some(&self.unit_vertices),
677 0,
678 );
679 command_encoder.set_vertex_buffer(
680 SpriteInputIndex::Sprites as u64,
681 Some(&self.instances),
682 *offset as u64,
683 );
684 command_encoder.set_vertex_bytes(
685 SpriteInputIndex::ViewportSize as u64,
686 mem::size_of_val(&viewport_size) as u64,
687 &viewport_size as *const Size<DevicePixels> as *const _,
688 );
689 command_encoder.set_vertex_bytes(
690 SpriteInputIndex::AtlasTextureSize as u64,
691 mem::size_of_val(&texture_size) as u64,
692 &texture_size as *const Size<DevicePixels> as *const _,
693 );
694 command_encoder.set_fragment_buffer(
695 SpriteInputIndex::Sprites as u64,
696 Some(&self.instances),
697 *offset as u64,
698 );
699 command_encoder.set_fragment_texture(SpriteInputIndex::AtlasTexture as u64, Some(&texture));
700
701 let sprite_bytes_len = mem::size_of::<MonochromeSprite>() * sprites.len();
702 let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
703 unsafe {
704 ptr::copy_nonoverlapping(
705 sprites.as_ptr() as *const u8,
706 buffer_contents,
707 sprite_bytes_len,
708 );
709 }
710
711 let next_offset = *offset + sprite_bytes_len;
712 assert!(
713 next_offset <= INSTANCE_BUFFER_SIZE,
714 "instance buffer exhausted"
715 );
716
717 command_encoder.draw_primitives_instanced(
718 metal::MTLPrimitiveType::Triangle,
719 0,
720 6,
721 sprites.len() as u64,
722 );
723 *offset = next_offset;
724 }
725
726 fn draw_polychrome_sprites(
727 &mut self,
728 texture_id: AtlasTextureId,
729 sprites: &[PolychromeSprite],
730 offset: &mut usize,
731 viewport_size: Size<DevicePixels>,
732 command_encoder: &metal::RenderCommandEncoderRef,
733 ) {
734 if sprites.is_empty() {
735 return;
736 }
737 align_offset(offset);
738
739 let texture = self.sprite_atlas.metal_texture(texture_id);
740 let texture_size = size(
741 DevicePixels(texture.width() as i32),
742 DevicePixels(texture.height() as i32),
743 );
744 command_encoder.set_render_pipeline_state(&self.polychrome_sprites_pipeline_state);
745 command_encoder.set_vertex_buffer(
746 SpriteInputIndex::Vertices as u64,
747 Some(&self.unit_vertices),
748 0,
749 );
750 command_encoder.set_vertex_buffer(
751 SpriteInputIndex::Sprites as u64,
752 Some(&self.instances),
753 *offset as u64,
754 );
755 command_encoder.set_vertex_bytes(
756 SpriteInputIndex::ViewportSize as u64,
757 mem::size_of_val(&viewport_size) as u64,
758 &viewport_size as *const Size<DevicePixels> as *const _,
759 );
760 command_encoder.set_vertex_bytes(
761 SpriteInputIndex::AtlasTextureSize as u64,
762 mem::size_of_val(&texture_size) as u64,
763 &texture_size as *const Size<DevicePixels> as *const _,
764 );
765 command_encoder.set_fragment_buffer(
766 SpriteInputIndex::Sprites as u64,
767 Some(&self.instances),
768 *offset as u64,
769 );
770 command_encoder.set_fragment_texture(SpriteInputIndex::AtlasTexture as u64, Some(&texture));
771
772 let sprite_bytes_len = mem::size_of::<PolychromeSprite>() * sprites.len();
773 let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
774 unsafe {
775 ptr::copy_nonoverlapping(
776 sprites.as_ptr() as *const u8,
777 buffer_contents,
778 sprite_bytes_len,
779 );
780 }
781
782 let next_offset = *offset + sprite_bytes_len;
783 assert!(
784 next_offset <= INSTANCE_BUFFER_SIZE,
785 "instance buffer exhausted"
786 );
787
788 command_encoder.draw_primitives_instanced(
789 metal::MTLPrimitiveType::Triangle,
790 0,
791 6,
792 sprites.len() as u64,
793 );
794 *offset = next_offset;
795 }
796}
797
798fn build_pipeline_state(
799 device: &metal::DeviceRef,
800 library: &metal::LibraryRef,
801 label: &str,
802 vertex_fn_name: &str,
803 fragment_fn_name: &str,
804 pixel_format: metal::MTLPixelFormat,
805) -> metal::RenderPipelineState {
806 let vertex_fn = library
807 .get_function(vertex_fn_name, None)
808 .expect("error locating vertex function");
809 let fragment_fn = library
810 .get_function(fragment_fn_name, None)
811 .expect("error locating fragment function");
812
813 let descriptor = metal::RenderPipelineDescriptor::new();
814 descriptor.set_label(label);
815 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
816 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
817 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
818 color_attachment.set_pixel_format(pixel_format);
819 color_attachment.set_blending_enabled(true);
820 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
821 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
822 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
823 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
824 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
825 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
826
827 device
828 .new_render_pipeline_state(&descriptor)
829 .expect("could not create render pipeline state")
830}
831
832fn build_path_rasterization_pipeline_state(
833 device: &metal::DeviceRef,
834 library: &metal::LibraryRef,
835 label: &str,
836 vertex_fn_name: &str,
837 fragment_fn_name: &str,
838 pixel_format: metal::MTLPixelFormat,
839) -> metal::RenderPipelineState {
840 let vertex_fn = library
841 .get_function(vertex_fn_name, None)
842 .expect("error locating vertex function");
843 let fragment_fn = library
844 .get_function(fragment_fn_name, None)
845 .expect("error locating fragment function");
846
847 let descriptor = metal::RenderPipelineDescriptor::new();
848 descriptor.set_label(label);
849 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
850 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
851 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
852 color_attachment.set_pixel_format(pixel_format);
853 color_attachment.set_blending_enabled(true);
854 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
855 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
856 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
857 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
858 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
859 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
860
861 device
862 .new_render_pipeline_state(&descriptor)
863 .expect("could not create render pipeline state")
864}
865
866// Align to multiples of 256 make Metal happy.
867fn align_offset(offset: &mut usize) {
868 *offset = ((*offset + 255) / 256) * 256;
869}
870
871#[repr(C)]
872enum ShadowInputIndex {
873 Vertices = 0,
874 Shadows = 1,
875 ViewportSize = 2,
876}
877
878#[repr(C)]
879enum QuadInputIndex {
880 Vertices = 0,
881 Quads = 1,
882 ViewportSize = 2,
883}
884
885#[repr(C)]
886enum UnderlineInputIndex {
887 Vertices = 0,
888 Underlines = 1,
889 ViewportSize = 2,
890}
891
892#[repr(C)]
893enum SpriteInputIndex {
894 Vertices = 0,
895 Sprites = 1,
896 ViewportSize = 2,
897 AtlasTextureSize = 3,
898 AtlasTexture = 4,
899}
900
901#[repr(C)]
902enum PathRasterizationInputIndex {
903 Vertices = 0,
904 AtlasTextureSize = 1,
905}
906
907#[derive(Clone, Debug, Eq, PartialEq)]
908#[repr(C)]
909pub struct PathSprite {
910 pub bounds: Bounds<ScaledPixels>,
911 pub color: Hsla,
912 pub tile: AtlasTile,
913}