1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, Layer, Quad, Scene, Shadow},
10};
11use cocoa::foundation::NSUInteger;
12use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
13use shaders::ToFloat2 as _;
14use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
15
16const SHADERS_METALLIB: &'static [u8] =
17 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
18const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
19
20pub struct Renderer {
21 sprite_cache: SpriteCache,
22 image_cache: ImageCache,
23 path_atlases: AtlasAllocator,
24 quad_pipeline_state: metal::RenderPipelineState,
25 shadow_pipeline_state: metal::RenderPipelineState,
26 sprite_pipeline_state: metal::RenderPipelineState,
27 image_pipeline_state: metal::RenderPipelineState,
28 path_atlas_pipeline_state: metal::RenderPipelineState,
29 unit_vertices: metal::Buffer,
30 instances: metal::Buffer,
31}
32
33struct PathSprite {
34 layer_id: usize,
35 atlas_id: usize,
36 shader_data: shaders::GPUISprite,
37}
38
39impl Renderer {
40 pub fn new(
41 device: metal::Device,
42 pixel_format: metal::MTLPixelFormat,
43 scale_factor: f32,
44 fonts: Arc<dyn platform::FontSystem>,
45 ) -> Self {
46 let library = device
47 .new_library_with_data(SHADERS_METALLIB)
48 .expect("error building metal library");
49
50 let unit_vertices = [
51 (0., 0.).to_float2(),
52 (1., 0.).to_float2(),
53 (0., 1.).to_float2(),
54 (0., 1.).to_float2(),
55 (1., 0.).to_float2(),
56 (1., 1.).to_float2(),
57 ];
58 let unit_vertices = device.new_buffer_with_data(
59 unit_vertices.as_ptr() as *const c_void,
60 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
61 MTLResourceOptions::StorageModeManaged,
62 );
63 let instances = device.new_buffer(
64 INSTANCE_BUFFER_SIZE as u64,
65 MTLResourceOptions::StorageModeManaged,
66 );
67
68 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), scale_factor, fonts);
69 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768));
70 let path_atlases =
71 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
72 let quad_pipeline_state = build_pipeline_state(
73 &device,
74 &library,
75 "quad",
76 "quad_vertex",
77 "quad_fragment",
78 pixel_format,
79 );
80 let shadow_pipeline_state = build_pipeline_state(
81 &device,
82 &library,
83 "shadow",
84 "shadow_vertex",
85 "shadow_fragment",
86 pixel_format,
87 );
88 let sprite_pipeline_state = build_pipeline_state(
89 &device,
90 &library,
91 "sprite",
92 "sprite_vertex",
93 "sprite_fragment",
94 pixel_format,
95 );
96 let image_pipeline_state = build_pipeline_state(
97 &device,
98 &library,
99 "image",
100 "image_vertex",
101 "image_fragment",
102 pixel_format,
103 );
104 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
105 &device,
106 &library,
107 "path_atlas",
108 "path_atlas_vertex",
109 "path_atlas_fragment",
110 MTLPixelFormat::R16Float,
111 );
112 Self {
113 sprite_cache,
114 image_cache,
115 path_atlases,
116 quad_pipeline_state,
117 shadow_pipeline_state,
118 sprite_pipeline_state,
119 image_pipeline_state,
120 path_atlas_pipeline_state,
121 unit_vertices,
122 instances,
123 }
124 }
125
126 pub fn render(
127 &mut self,
128 scene: &Scene,
129 drawable_size: Vector2F,
130 command_buffer: &metal::CommandBufferRef,
131 output: &metal::TextureRef,
132 ) {
133 let mut offset = 0;
134
135 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
136 self.render_layers(
137 scene,
138 path_sprites,
139 &mut offset,
140 drawable_size,
141 command_buffer,
142 output,
143 );
144 self.instances.did_modify_range(NSRange {
145 location: 0,
146 length: offset as NSUInteger,
147 });
148 self.image_cache.finish_frame();
149 }
150
151 fn render_path_atlases(
152 &mut self,
153 scene: &Scene,
154 offset: &mut usize,
155 command_buffer: &metal::CommandBufferRef,
156 ) -> Vec<PathSprite> {
157 self.path_atlases.clear();
158 let mut sprites = Vec::new();
159 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
160 let mut current_atlas_id = None;
161 for (layer_id, layer) in scene.layers().enumerate() {
162 for path in layer.paths() {
163 let origin = path.bounds.origin() * scene.scale_factor();
164 let size = (path.bounds.size() * scene.scale_factor()).ceil();
165 let (alloc_id, atlas_origin) = self.path_atlases.allocate(size.to_i32());
166 let atlas_origin = atlas_origin.to_f32();
167 sprites.push(PathSprite {
168 layer_id,
169 atlas_id: alloc_id.atlas_id,
170 shader_data: shaders::GPUISprite {
171 origin: origin.floor().to_float2(),
172 target_size: size.to_float2(),
173 source_size: size.to_float2(),
174 atlas_origin: atlas_origin.to_float2(),
175 color: path.color.to_uchar4(),
176 compute_winding: 1,
177 },
178 });
179
180 if let Some(current_atlas_id) = current_atlas_id {
181 if alloc_id.atlas_id != current_atlas_id {
182 self.render_paths_to_atlas(
183 offset,
184 &vertices,
185 current_atlas_id,
186 command_buffer,
187 );
188 vertices.clear();
189 }
190 }
191
192 current_atlas_id = Some(alloc_id.atlas_id);
193
194 for vertex in &path.vertices {
195 let xy_position =
196 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
197 vertices.push(shaders::GPUIPathVertex {
198 xy_position: (atlas_origin + xy_position).to_float2(),
199 st_position: vertex.st_position.to_float2(),
200 clip_rect_origin: atlas_origin.to_float2(),
201 clip_rect_size: size.to_float2(),
202 });
203 }
204 }
205 }
206
207 if let Some(atlas_id) = current_atlas_id {
208 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
209 }
210
211 sprites
212 }
213
214 fn render_paths_to_atlas(
215 &mut self,
216 offset: &mut usize,
217 vertices: &[shaders::GPUIPathVertex],
218 atlas_id: usize,
219 command_buffer: &metal::CommandBufferRef,
220 ) {
221 align_offset(offset);
222 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
223 assert!(
224 next_offset <= INSTANCE_BUFFER_SIZE,
225 "instance buffer exhausted"
226 );
227
228 let render_pass_descriptor = metal::RenderPassDescriptor::new();
229 let color_attachment = render_pass_descriptor
230 .color_attachments()
231 .object_at(0)
232 .unwrap();
233 let texture = self.path_atlases.texture(atlas_id).unwrap();
234 color_attachment.set_texture(Some(texture));
235 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
236 color_attachment.set_store_action(metal::MTLStoreAction::Store);
237 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
238
239 let path_atlas_command_encoder =
240 command_buffer.new_render_command_encoder(render_pass_descriptor);
241 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
242 path_atlas_command_encoder.set_vertex_buffer(
243 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
244 Some(&self.instances),
245 *offset as u64,
246 );
247 path_atlas_command_encoder.set_vertex_bytes(
248 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
249 mem::size_of::<shaders::vector_float2>() as u64,
250 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
251 as *const c_void,
252 );
253
254 let buffer_contents = unsafe {
255 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
256 };
257
258 for (ix, vertex) in vertices.iter().enumerate() {
259 unsafe {
260 *buffer_contents.add(ix) = *vertex;
261 }
262 }
263
264 path_atlas_command_encoder.draw_primitives(
265 metal::MTLPrimitiveType::Triangle,
266 0,
267 vertices.len() as u64,
268 );
269 path_atlas_command_encoder.end_encoding();
270 *offset = next_offset;
271 }
272
273 fn render_layers(
274 &mut self,
275 scene: &Scene,
276 path_sprites: Vec<PathSprite>,
277 offset: &mut usize,
278 drawable_size: Vector2F,
279 command_buffer: &metal::CommandBufferRef,
280 output: &metal::TextureRef,
281 ) {
282 let render_pass_descriptor = metal::RenderPassDescriptor::new();
283 let color_attachment = render_pass_descriptor
284 .color_attachments()
285 .object_at(0)
286 .unwrap();
287 color_attachment.set_texture(Some(output));
288 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
289 color_attachment.set_store_action(metal::MTLStoreAction::Store);
290 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
291 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
292
293 command_encoder.set_viewport(metal::MTLViewport {
294 originX: 0.0,
295 originY: 0.0,
296 width: drawable_size.x() as f64,
297 height: drawable_size.y() as f64,
298 znear: 0.0,
299 zfar: 1.0,
300 });
301
302 let scale_factor = scene.scale_factor();
303 let mut path_sprites = path_sprites.into_iter().peekable();
304 for (layer_id, layer) in scene.layers().enumerate() {
305 self.clip(scene, layer, drawable_size, command_encoder);
306 self.render_shadows(
307 layer.shadows(),
308 scale_factor,
309 offset,
310 drawable_size,
311 command_encoder,
312 );
313 self.render_quads(
314 layer.quads(),
315 scale_factor,
316 offset,
317 drawable_size,
318 command_encoder,
319 );
320 self.render_path_sprites(
321 layer_id,
322 &mut path_sprites,
323 offset,
324 drawable_size,
325 command_encoder,
326 );
327 self.render_sprites(
328 layer.glyphs(),
329 layer.icons(),
330 scale_factor,
331 offset,
332 drawable_size,
333 command_encoder,
334 );
335 self.render_images(
336 layer.images(),
337 scale_factor,
338 offset,
339 drawable_size,
340 command_encoder,
341 );
342 self.render_quads(
343 layer.underlines(),
344 scale_factor,
345 offset,
346 drawable_size,
347 command_encoder,
348 );
349 }
350
351 command_encoder.end_encoding();
352 }
353
354 fn clip(
355 &mut self,
356 scene: &Scene,
357 layer: &Layer,
358 drawable_size: Vector2F,
359 command_encoder: &metal::RenderCommandEncoderRef,
360 ) {
361 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
362 vec2f(0., 0.),
363 drawable_size / scene.scale_factor(),
364 )) * scene.scale_factor())
365 .round();
366 command_encoder.set_scissor_rect(metal::MTLScissorRect {
367 x: clip_bounds.origin_x() as NSUInteger,
368 y: clip_bounds.origin_y() as NSUInteger,
369 width: clip_bounds.width() as NSUInteger,
370 height: clip_bounds.height() as NSUInteger,
371 });
372 }
373
374 fn render_shadows(
375 &mut self,
376 shadows: &[Shadow],
377 scale_factor: f32,
378 offset: &mut usize,
379 drawable_size: Vector2F,
380 command_encoder: &metal::RenderCommandEncoderRef,
381 ) {
382 if shadows.is_empty() {
383 return;
384 }
385
386 align_offset(offset);
387 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
388 assert!(
389 next_offset <= INSTANCE_BUFFER_SIZE,
390 "instance buffer exhausted"
391 );
392
393 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
394 command_encoder.set_vertex_buffer(
395 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
396 Some(&self.unit_vertices),
397 0,
398 );
399 command_encoder.set_vertex_buffer(
400 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
401 Some(&self.instances),
402 *offset as u64,
403 );
404 command_encoder.set_vertex_bytes(
405 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
406 mem::size_of::<shaders::GPUIUniforms>() as u64,
407 [shaders::GPUIUniforms {
408 viewport_size: drawable_size.to_float2(),
409 }]
410 .as_ptr() as *const c_void,
411 );
412
413 let buffer_contents = unsafe {
414 (self.instances.contents() as *mut u8).offset(*offset as isize)
415 as *mut shaders::GPUIShadow
416 };
417 for (ix, shadow) in shadows.iter().enumerate() {
418 let shape_bounds = shadow.bounds * scale_factor;
419 let shader_shadow = shaders::GPUIShadow {
420 origin: shape_bounds.origin().to_float2(),
421 size: shape_bounds.size().to_float2(),
422 corner_radius: shadow.corner_radius * scale_factor,
423 sigma: shadow.sigma,
424 color: shadow.color.to_uchar4(),
425 };
426 unsafe {
427 *(buffer_contents.offset(ix as isize)) = shader_shadow;
428 }
429 }
430
431 command_encoder.draw_primitives_instanced(
432 metal::MTLPrimitiveType::Triangle,
433 0,
434 6,
435 shadows.len() as u64,
436 );
437 *offset = next_offset;
438 }
439
440 fn render_quads(
441 &mut self,
442 quads: &[Quad],
443 scale_factor: f32,
444 offset: &mut usize,
445 drawable_size: Vector2F,
446 command_encoder: &metal::RenderCommandEncoderRef,
447 ) {
448 if quads.is_empty() {
449 return;
450 }
451 align_offset(offset);
452 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
453 assert!(
454 next_offset <= INSTANCE_BUFFER_SIZE,
455 "instance buffer exhausted"
456 );
457
458 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
459 command_encoder.set_vertex_buffer(
460 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
461 Some(&self.unit_vertices),
462 0,
463 );
464 command_encoder.set_vertex_buffer(
465 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
466 Some(&self.instances),
467 *offset as u64,
468 );
469 command_encoder.set_vertex_bytes(
470 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
471 mem::size_of::<shaders::GPUIUniforms>() as u64,
472 [shaders::GPUIUniforms {
473 viewport_size: drawable_size.to_float2(),
474 }]
475 .as_ptr() as *const c_void,
476 );
477
478 let buffer_contents = unsafe {
479 (self.instances.contents() as *mut u8).offset(*offset as isize)
480 as *mut shaders::GPUIQuad
481 };
482 for (ix, quad) in quads.iter().enumerate() {
483 let bounds = quad.bounds * scale_factor;
484 let border_width = quad.border.width * scale_factor;
485 let shader_quad = shaders::GPUIQuad {
486 origin: bounds.origin().round().to_float2(),
487 size: bounds.size().round().to_float2(),
488 background_color: quad
489 .background
490 .unwrap_or(Color::transparent_black())
491 .to_uchar4(),
492 border_top: border_width * (quad.border.top as usize as f32),
493 border_right: border_width * (quad.border.right as usize as f32),
494 border_bottom: border_width * (quad.border.bottom as usize as f32),
495 border_left: border_width * (quad.border.left as usize as f32),
496 border_color: quad.border.color.to_uchar4(),
497 corner_radius: quad.corner_radius * scale_factor,
498 };
499 unsafe {
500 *(buffer_contents.offset(ix as isize)) = shader_quad;
501 }
502 }
503
504 command_encoder.draw_primitives_instanced(
505 metal::MTLPrimitiveType::Triangle,
506 0,
507 6,
508 quads.len() as u64,
509 );
510 *offset = next_offset;
511 }
512
513 fn render_sprites(
514 &mut self,
515 glyphs: &[Glyph],
516 icons: &[Icon],
517 scale_factor: f32,
518 offset: &mut usize,
519 drawable_size: Vector2F,
520 command_encoder: &metal::RenderCommandEncoderRef,
521 ) {
522 if glyphs.is_empty() && icons.is_empty() {
523 return;
524 }
525
526 self.sprite_cache.set_scale_factor(scale_factor);
527
528 let mut sprites_by_atlas = HashMap::new();
529
530 for glyph in glyphs {
531 if let Some(sprite) = self.sprite_cache.render_glyph(
532 glyph.font_id,
533 glyph.font_size,
534 glyph.id,
535 glyph.origin,
536 ) {
537 // Snap sprite to pixel grid.
538 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
539 sprites_by_atlas
540 .entry(sprite.atlas_id)
541 .or_insert_with(Vec::new)
542 .push(shaders::GPUISprite {
543 origin: origin.to_float2(),
544 target_size: sprite.size.to_float2(),
545 source_size: sprite.size.to_float2(),
546 atlas_origin: sprite.atlas_origin.to_float2(),
547 color: glyph.color.to_uchar4(),
548 compute_winding: 0,
549 });
550 }
551 }
552
553 for icon in icons {
554 let origin = icon.bounds.origin() * scale_factor;
555 let target_size = icon.bounds.size() * scale_factor;
556 let source_size = (target_size * 2.).ceil().to_i32();
557
558 let sprite =
559 self.sprite_cache
560 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
561
562 sprites_by_atlas
563 .entry(sprite.atlas_id)
564 .or_insert_with(Vec::new)
565 .push(shaders::GPUISprite {
566 origin: origin.to_float2(),
567 target_size: target_size.to_float2(),
568 source_size: sprite.size.to_float2(),
569 atlas_origin: sprite.atlas_origin.to_float2(),
570 color: icon.color.to_uchar4(),
571 compute_winding: 0,
572 });
573 }
574
575 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
576 command_encoder.set_vertex_buffer(
577 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
578 Some(&self.unit_vertices),
579 0,
580 );
581 command_encoder.set_vertex_bytes(
582 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
583 mem::size_of::<shaders::vector_float2>() as u64,
584 [drawable_size.to_float2()].as_ptr() as *const c_void,
585 );
586
587 for (atlas_id, sprites) in sprites_by_atlas {
588 align_offset(offset);
589 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
590 assert!(
591 next_offset <= INSTANCE_BUFFER_SIZE,
592 "instance buffer exhausted"
593 );
594
595 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
596 command_encoder.set_vertex_buffer(
597 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
598 Some(&self.instances),
599 *offset as u64,
600 );
601 command_encoder.set_vertex_bytes(
602 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
603 mem::size_of::<shaders::vector_float2>() as u64,
604 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
605 as *const c_void,
606 );
607
608 command_encoder.set_fragment_texture(
609 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
610 Some(texture),
611 );
612
613 unsafe {
614 let buffer_contents = (self.instances.contents() as *mut u8)
615 .offset(*offset as isize)
616 as *mut shaders::GPUISprite;
617 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
618 }
619
620 command_encoder.draw_primitives_instanced(
621 metal::MTLPrimitiveType::Triangle,
622 0,
623 6,
624 sprites.len() as u64,
625 );
626 *offset = next_offset;
627 }
628 }
629
630 fn render_images(
631 &mut self,
632 images: &[Image],
633 scale_factor: f32,
634 offset: &mut usize,
635 drawable_size: Vector2F,
636 command_encoder: &metal::RenderCommandEncoderRef,
637 ) {
638 if images.is_empty() {
639 return;
640 }
641
642 let mut images_by_atlas = HashMap::new();
643 for image in images {
644 let origin = image.bounds.origin() * scale_factor;
645 let target_size = image.bounds.size() * scale_factor;
646 let corner_radius = image.corner_radius * scale_factor;
647 let border_width = image.border.width * scale_factor;
648 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
649 images_by_atlas
650 .entry(alloc_id.atlas_id)
651 .or_insert_with(Vec::new)
652 .push(shaders::GPUIImage {
653 origin: origin.to_float2(),
654 target_size: target_size.to_float2(),
655 source_size: atlas_bounds.size().to_float2(),
656 atlas_origin: atlas_bounds.origin().to_float2(),
657 border_top: border_width * (image.border.top as usize as f32),
658 border_right: border_width * (image.border.right as usize as f32),
659 border_bottom: border_width * (image.border.bottom as usize as f32),
660 border_left: border_width * (image.border.left as usize as f32),
661 border_color: image.border.color.to_uchar4(),
662 corner_radius,
663 });
664 }
665
666 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
667 command_encoder.set_vertex_buffer(
668 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
669 Some(&self.unit_vertices),
670 0,
671 );
672 command_encoder.set_vertex_bytes(
673 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
674 mem::size_of::<shaders::vector_float2>() as u64,
675 [drawable_size.to_float2()].as_ptr() as *const c_void,
676 );
677
678 for (atlas_id, images) in images_by_atlas {
679 align_offset(offset);
680 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
681 assert!(
682 next_offset <= INSTANCE_BUFFER_SIZE,
683 "instance buffer exhausted"
684 );
685
686 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
687 command_encoder.set_vertex_buffer(
688 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
689 Some(&self.instances),
690 *offset as u64,
691 );
692 command_encoder.set_vertex_bytes(
693 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
694 mem::size_of::<shaders::vector_float2>() as u64,
695 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
696 as *const c_void,
697 );
698 command_encoder.set_fragment_texture(
699 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
700 Some(texture),
701 );
702
703 unsafe {
704 let buffer_contents = (self.instances.contents() as *mut u8)
705 .offset(*offset as isize)
706 as *mut shaders::GPUIImage;
707 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
708 }
709
710 command_encoder.draw_primitives_instanced(
711 metal::MTLPrimitiveType::Triangle,
712 0,
713 6,
714 images.len() as u64,
715 );
716 *offset = next_offset;
717 }
718 }
719
720 fn render_path_sprites(
721 &mut self,
722 layer_id: usize,
723 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
724 offset: &mut usize,
725 drawable_size: Vector2F,
726 command_encoder: &metal::RenderCommandEncoderRef,
727 ) {
728 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
729 command_encoder.set_vertex_buffer(
730 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
731 Some(&self.unit_vertices),
732 0,
733 );
734 command_encoder.set_vertex_bytes(
735 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
736 mem::size_of::<shaders::vector_float2>() as u64,
737 [drawable_size.to_float2()].as_ptr() as *const c_void,
738 );
739
740 let mut atlas_id = None;
741 let mut atlas_sprite_count = 0;
742 align_offset(offset);
743
744 while let Some(sprite) = sprites.peek() {
745 if sprite.layer_id != layer_id {
746 break;
747 }
748
749 let sprite = sprites.next().unwrap();
750 if let Some(atlas_id) = atlas_id.as_mut() {
751 if sprite.atlas_id != *atlas_id {
752 self.render_path_sprites_for_atlas(
753 offset,
754 *atlas_id,
755 atlas_sprite_count,
756 command_encoder,
757 );
758
759 *atlas_id = sprite.atlas_id;
760 atlas_sprite_count = 0;
761 align_offset(offset);
762 }
763 } else {
764 atlas_id = Some(sprite.atlas_id);
765 }
766
767 unsafe {
768 let buffer_contents = (self.instances.contents() as *mut u8)
769 .offset(*offset as isize)
770 as *mut shaders::GPUISprite;
771 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
772 }
773
774 atlas_sprite_count += 1;
775 }
776
777 if let Some(atlas_id) = atlas_id {
778 self.render_path_sprites_for_atlas(
779 offset,
780 atlas_id,
781 atlas_sprite_count,
782 command_encoder,
783 );
784 }
785 }
786
787 fn render_path_sprites_for_atlas(
788 &mut self,
789 offset: &mut usize,
790 atlas_id: usize,
791 sprite_count: usize,
792 command_encoder: &metal::RenderCommandEncoderRef,
793 ) {
794 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
795 assert!(
796 next_offset <= INSTANCE_BUFFER_SIZE,
797 "instance buffer exhausted"
798 );
799 command_encoder.set_vertex_buffer(
800 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
801 Some(&self.instances),
802 *offset as u64,
803 );
804 let texture = self.path_atlases.texture(atlas_id).unwrap();
805 command_encoder.set_fragment_texture(
806 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
807 Some(texture),
808 );
809 command_encoder.set_vertex_bytes(
810 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
811 mem::size_of::<shaders::vector_float2>() as u64,
812 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
813 as *const c_void,
814 );
815
816 command_encoder.draw_primitives_instanced(
817 metal::MTLPrimitiveType::Triangle,
818 0,
819 6,
820 sprite_count as u64,
821 );
822 *offset = next_offset;
823 }
824}
825
826fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
827 let texture_descriptor = metal::TextureDescriptor::new();
828 texture_descriptor.set_width(2048);
829 texture_descriptor.set_height(2048);
830 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
831 texture_descriptor
832 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
833 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
834 texture_descriptor
835}
836
837fn align_offset(offset: &mut usize) {
838 let r = *offset % 256;
839 if r > 0 {
840 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
841 }
842}
843
844fn build_pipeline_state(
845 device: &metal::DeviceRef,
846 library: &metal::LibraryRef,
847 label: &str,
848 vertex_fn_name: &str,
849 fragment_fn_name: &str,
850 pixel_format: metal::MTLPixelFormat,
851) -> metal::RenderPipelineState {
852 let vertex_fn = library
853 .get_function(vertex_fn_name, None)
854 .expect("error locating vertex function");
855 let fragment_fn = library
856 .get_function(fragment_fn_name, None)
857 .expect("error locating fragment function");
858
859 let descriptor = metal::RenderPipelineDescriptor::new();
860 descriptor.set_label(label);
861 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
862 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
863 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
864 color_attachment.set_pixel_format(pixel_format);
865 color_attachment.set_blending_enabled(true);
866 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
867 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
868 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
869 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
870 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
871 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
872
873 device
874 .new_render_pipeline_state(&descriptor)
875 .expect("could not create render pipeline state")
876}
877
878fn build_path_atlas_pipeline_state(
879 device: &metal::DeviceRef,
880 library: &metal::LibraryRef,
881 label: &str,
882 vertex_fn_name: &str,
883 fragment_fn_name: &str,
884 pixel_format: metal::MTLPixelFormat,
885) -> metal::RenderPipelineState {
886 let vertex_fn = library
887 .get_function(vertex_fn_name, None)
888 .expect("error locating vertex function");
889 let fragment_fn = library
890 .get_function(fragment_fn_name, None)
891 .expect("error locating fragment function");
892
893 let descriptor = metal::RenderPipelineDescriptor::new();
894 descriptor.set_label(label);
895 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
896 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
897 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
898 color_attachment.set_pixel_format(pixel_format);
899 color_attachment.set_blending_enabled(true);
900 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
901 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
902 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
903 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
904 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
905 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
906
907 device
908 .new_render_pipeline_state(&descriptor)
909 .expect("could not create render pipeline state")
910}
911
912mod shaders {
913 #![allow(non_upper_case_globals)]
914 #![allow(non_camel_case_types)]
915 #![allow(non_snake_case)]
916
917 use crate::{
918 color::Color,
919 geometry::vector::{Vector2F, Vector2I},
920 };
921 use std::mem;
922
923 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
924
925 pub trait ToFloat2 {
926 fn to_float2(&self) -> vector_float2;
927 }
928
929 impl ToFloat2 for (f32, f32) {
930 fn to_float2(&self) -> vector_float2 {
931 unsafe {
932 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
933 output <<= 32;
934 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
935 output
936 }
937 }
938 }
939
940 impl ToFloat2 for Vector2F {
941 fn to_float2(&self) -> vector_float2 {
942 unsafe {
943 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
944 output <<= 32;
945 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
946 output
947 }
948 }
949 }
950
951 impl ToFloat2 for Vector2I {
952 fn to_float2(&self) -> vector_float2 {
953 self.to_f32().to_float2()
954 }
955 }
956
957 impl Color {
958 pub fn to_uchar4(&self) -> vector_uchar4 {
959 let mut vec = self.a as vector_uchar4;
960 vec <<= 8;
961 vec |= self.b as vector_uchar4;
962 vec <<= 8;
963 vec |= self.g as vector_uchar4;
964 vec <<= 8;
965 vec |= self.r as vector_uchar4;
966 vec
967 }
968 }
969}