1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, Layer, Quad, Scene, Shadow},
10};
11use cocoa::foundation::NSUInteger;
12use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
13use shaders::ToFloat2 as _;
14use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
15
16const SHADERS_METALLIB: &'static [u8] =
17 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
18const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
19
20pub struct Renderer {
21 sprite_cache: SpriteCache,
22 image_cache: ImageCache,
23 path_atlases: AtlasAllocator,
24 quad_pipeline_state: metal::RenderPipelineState,
25 shadow_pipeline_state: metal::RenderPipelineState,
26 sprite_pipeline_state: metal::RenderPipelineState,
27 image_pipeline_state: metal::RenderPipelineState,
28 path_atlas_pipeline_state: metal::RenderPipelineState,
29 unit_vertices: metal::Buffer,
30 instances: metal::Buffer,
31}
32
33struct PathSprite {
34 layer_id: usize,
35 atlas_id: usize,
36 shader_data: shaders::GPUISprite,
37}
38
39impl Renderer {
40 pub fn new(
41 device: metal::Device,
42 pixel_format: metal::MTLPixelFormat,
43 fonts: Arc<dyn platform::FontSystem>,
44 ) -> Self {
45 let library = device
46 .new_library_with_data(SHADERS_METALLIB)
47 .expect("error building metal library");
48
49 let unit_vertices = [
50 (0., 0.).to_float2(),
51 (1., 0.).to_float2(),
52 (0., 1.).to_float2(),
53 (0., 1.).to_float2(),
54 (1., 0.).to_float2(),
55 (1., 1.).to_float2(),
56 ];
57 let unit_vertices = device.new_buffer_with_data(
58 unit_vertices.as_ptr() as *const c_void,
59 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
60 MTLResourceOptions::StorageModeManaged,
61 );
62 let instances = device.new_buffer(
63 INSTANCE_BUFFER_SIZE as u64,
64 MTLResourceOptions::StorageModeManaged,
65 );
66
67 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
68 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768));
69 let path_atlases =
70 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
71 let quad_pipeline_state = build_pipeline_state(
72 &device,
73 &library,
74 "quad",
75 "quad_vertex",
76 "quad_fragment",
77 pixel_format,
78 );
79 let shadow_pipeline_state = build_pipeline_state(
80 &device,
81 &library,
82 "shadow",
83 "shadow_vertex",
84 "shadow_fragment",
85 pixel_format,
86 );
87 let sprite_pipeline_state = build_pipeline_state(
88 &device,
89 &library,
90 "sprite",
91 "sprite_vertex",
92 "sprite_fragment",
93 pixel_format,
94 );
95 let image_pipeline_state = build_pipeline_state(
96 &device,
97 &library,
98 "image",
99 "image_vertex",
100 "image_fragment",
101 pixel_format,
102 );
103 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
104 &device,
105 &library,
106 "path_atlas",
107 "path_atlas_vertex",
108 "path_atlas_fragment",
109 MTLPixelFormat::R8Unorm,
110 );
111 Self {
112 sprite_cache,
113 image_cache,
114 path_atlases,
115 quad_pipeline_state,
116 shadow_pipeline_state,
117 sprite_pipeline_state,
118 image_pipeline_state,
119 path_atlas_pipeline_state,
120 unit_vertices,
121 instances,
122 }
123 }
124
125 pub fn render(
126 &mut self,
127 scene: &Scene,
128 drawable_size: Vector2F,
129 command_buffer: &metal::CommandBufferRef,
130 output: &metal::TextureRef,
131 ) {
132 let mut offset = 0;
133
134 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
135 self.render_layers(
136 scene,
137 path_sprites,
138 &mut offset,
139 drawable_size,
140 command_buffer,
141 output,
142 );
143 self.instances.did_modify_range(NSRange {
144 location: 0,
145 length: offset as NSUInteger,
146 });
147 self.image_cache.finish_frame();
148 }
149
150 fn render_path_atlases(
151 &mut self,
152 scene: &Scene,
153 offset: &mut usize,
154 command_buffer: &metal::CommandBufferRef,
155 ) -> Vec<PathSprite> {
156 self.path_atlases.clear();
157 let mut sprites = Vec::new();
158 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
159 let mut current_atlas_id = None;
160 for (layer_id, layer) in scene.layers().enumerate() {
161 for path in layer.paths() {
162 let origin = path.bounds.origin() * scene.scale_factor();
163 let size = (path.bounds.size() * scene.scale_factor()).ceil();
164 let (alloc_id, atlas_origin) = self.path_atlases.allocate(size.to_i32());
165 let atlas_origin = atlas_origin.to_f32();
166 sprites.push(PathSprite {
167 layer_id,
168 atlas_id: alloc_id.atlas_id,
169 shader_data: shaders::GPUISprite {
170 origin: origin.floor().to_float2(),
171 target_size: size.to_float2(),
172 source_size: size.to_float2(),
173 atlas_origin: atlas_origin.to_float2(),
174 color: path.color.to_uchar4(),
175 compute_winding: 1,
176 },
177 });
178
179 if let Some(current_atlas_id) = current_atlas_id {
180 if alloc_id.atlas_id != current_atlas_id {
181 self.render_paths_to_atlas(
182 offset,
183 &vertices,
184 current_atlas_id,
185 command_buffer,
186 );
187 vertices.clear();
188 }
189 }
190
191 current_atlas_id = Some(alloc_id.atlas_id);
192
193 for vertex in &path.vertices {
194 let xy_position =
195 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
196 vertices.push(shaders::GPUIPathVertex {
197 xy_position: (atlas_origin + xy_position).to_float2(),
198 st_position: vertex.st_position.to_float2(),
199 clip_rect_origin: atlas_origin.to_float2(),
200 clip_rect_size: size.to_float2(),
201 });
202 }
203 }
204 }
205
206 if let Some(atlas_id) = current_atlas_id {
207 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
208 }
209
210 sprites
211 }
212
213 fn render_paths_to_atlas(
214 &mut self,
215 offset: &mut usize,
216 vertices: &[shaders::GPUIPathVertex],
217 atlas_id: usize,
218 command_buffer: &metal::CommandBufferRef,
219 ) {
220 align_offset(offset);
221 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
222 assert!(
223 next_offset <= INSTANCE_BUFFER_SIZE,
224 "instance buffer exhausted"
225 );
226
227 let render_pass_descriptor = metal::RenderPassDescriptor::new();
228 let color_attachment = render_pass_descriptor
229 .color_attachments()
230 .object_at(0)
231 .unwrap();
232 let texture = self.path_atlases.texture(atlas_id).unwrap();
233 color_attachment.set_texture(Some(texture));
234 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
235 color_attachment.set_store_action(metal::MTLStoreAction::Store);
236 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
237
238 let path_atlas_command_encoder =
239 command_buffer.new_render_command_encoder(render_pass_descriptor);
240 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
241 path_atlas_command_encoder.set_vertex_buffer(
242 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
243 Some(&self.instances),
244 *offset as u64,
245 );
246 path_atlas_command_encoder.set_vertex_bytes(
247 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
248 mem::size_of::<shaders::vector_float2>() as u64,
249 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
250 as *const c_void,
251 );
252
253 let buffer_contents = unsafe {
254 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
255 };
256
257 for (ix, vertex) in vertices.iter().enumerate() {
258 unsafe {
259 *buffer_contents.add(ix) = *vertex;
260 }
261 }
262
263 path_atlas_command_encoder.draw_primitives(
264 metal::MTLPrimitiveType::Triangle,
265 0,
266 vertices.len() as u64,
267 );
268 path_atlas_command_encoder.end_encoding();
269 *offset = next_offset;
270 }
271
272 fn render_layers(
273 &mut self,
274 scene: &Scene,
275 path_sprites: Vec<PathSprite>,
276 offset: &mut usize,
277 drawable_size: Vector2F,
278 command_buffer: &metal::CommandBufferRef,
279 output: &metal::TextureRef,
280 ) {
281 let render_pass_descriptor = metal::RenderPassDescriptor::new();
282 let color_attachment = render_pass_descriptor
283 .color_attachments()
284 .object_at(0)
285 .unwrap();
286 color_attachment.set_texture(Some(output));
287 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
288 color_attachment.set_store_action(metal::MTLStoreAction::Store);
289 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
290 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
291
292 command_encoder.set_viewport(metal::MTLViewport {
293 originX: 0.0,
294 originY: 0.0,
295 width: drawable_size.x() as f64,
296 height: drawable_size.y() as f64,
297 znear: 0.0,
298 zfar: 1.0,
299 });
300
301 let scale_factor = scene.scale_factor();
302 let mut path_sprites = path_sprites.into_iter().peekable();
303 for (layer_id, layer) in scene.layers().enumerate() {
304 self.clip(scene, layer, drawable_size, command_encoder);
305 self.render_shadows(
306 layer.shadows(),
307 scale_factor,
308 offset,
309 drawable_size,
310 command_encoder,
311 );
312 self.render_quads(
313 layer.quads(),
314 scale_factor,
315 offset,
316 drawable_size,
317 command_encoder,
318 );
319 self.render_path_sprites(
320 layer_id,
321 &mut path_sprites,
322 offset,
323 drawable_size,
324 command_encoder,
325 );
326 self.render_sprites(
327 layer.glyphs(),
328 layer.icons(),
329 scale_factor,
330 offset,
331 drawable_size,
332 command_encoder,
333 );
334 self.render_images(
335 layer.images(),
336 scale_factor,
337 offset,
338 drawable_size,
339 command_encoder,
340 );
341 self.render_quads(
342 layer.underlines(),
343 scale_factor,
344 offset,
345 drawable_size,
346 command_encoder,
347 );
348 }
349
350 command_encoder.end_encoding();
351 }
352
353 fn clip(
354 &mut self,
355 scene: &Scene,
356 layer: &Layer,
357 drawable_size: Vector2F,
358 command_encoder: &metal::RenderCommandEncoderRef,
359 ) {
360 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
361 vec2f(0., 0.),
362 drawable_size / scene.scale_factor(),
363 )) * scene.scale_factor())
364 .round();
365 command_encoder.set_scissor_rect(metal::MTLScissorRect {
366 x: clip_bounds.origin_x() as NSUInteger,
367 y: clip_bounds.origin_y() as NSUInteger,
368 width: clip_bounds.width() as NSUInteger,
369 height: clip_bounds.height() as NSUInteger,
370 });
371 }
372
373 fn render_shadows(
374 &mut self,
375 shadows: &[Shadow],
376 scale_factor: f32,
377 offset: &mut usize,
378 drawable_size: Vector2F,
379 command_encoder: &metal::RenderCommandEncoderRef,
380 ) {
381 if shadows.is_empty() {
382 return;
383 }
384
385 align_offset(offset);
386 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
387 assert!(
388 next_offset <= INSTANCE_BUFFER_SIZE,
389 "instance buffer exhausted"
390 );
391
392 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
393 command_encoder.set_vertex_buffer(
394 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
395 Some(&self.unit_vertices),
396 0,
397 );
398 command_encoder.set_vertex_buffer(
399 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
400 Some(&self.instances),
401 *offset as u64,
402 );
403 command_encoder.set_vertex_bytes(
404 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
405 mem::size_of::<shaders::GPUIUniforms>() as u64,
406 [shaders::GPUIUniforms {
407 viewport_size: drawable_size.to_float2(),
408 }]
409 .as_ptr() as *const c_void,
410 );
411
412 let buffer_contents = unsafe {
413 (self.instances.contents() as *mut u8).offset(*offset as isize)
414 as *mut shaders::GPUIShadow
415 };
416 for (ix, shadow) in shadows.iter().enumerate() {
417 let shape_bounds = shadow.bounds * scale_factor;
418 let shader_shadow = shaders::GPUIShadow {
419 origin: shape_bounds.origin().to_float2(),
420 size: shape_bounds.size().to_float2(),
421 corner_radius: shadow.corner_radius * scale_factor,
422 sigma: shadow.sigma,
423 color: shadow.color.to_uchar4(),
424 };
425 unsafe {
426 *(buffer_contents.offset(ix as isize)) = shader_shadow;
427 }
428 }
429
430 command_encoder.draw_primitives_instanced(
431 metal::MTLPrimitiveType::Triangle,
432 0,
433 6,
434 shadows.len() as u64,
435 );
436 *offset = next_offset;
437 }
438
439 fn render_quads(
440 &mut self,
441 quads: &[Quad],
442 scale_factor: f32,
443 offset: &mut usize,
444 drawable_size: Vector2F,
445 command_encoder: &metal::RenderCommandEncoderRef,
446 ) {
447 if quads.is_empty() {
448 return;
449 }
450 align_offset(offset);
451 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
452 assert!(
453 next_offset <= INSTANCE_BUFFER_SIZE,
454 "instance buffer exhausted"
455 );
456
457 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
458 command_encoder.set_vertex_buffer(
459 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
460 Some(&self.unit_vertices),
461 0,
462 );
463 command_encoder.set_vertex_buffer(
464 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
465 Some(&self.instances),
466 *offset as u64,
467 );
468 command_encoder.set_vertex_bytes(
469 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
470 mem::size_of::<shaders::GPUIUniforms>() as u64,
471 [shaders::GPUIUniforms {
472 viewport_size: drawable_size.to_float2(),
473 }]
474 .as_ptr() as *const c_void,
475 );
476
477 let buffer_contents = unsafe {
478 (self.instances.contents() as *mut u8).offset(*offset as isize)
479 as *mut shaders::GPUIQuad
480 };
481 for (ix, quad) in quads.iter().enumerate() {
482 let bounds = quad.bounds * scale_factor;
483 let border_width = quad.border.width * scale_factor;
484 let shader_quad = shaders::GPUIQuad {
485 origin: bounds.origin().round().to_float2(),
486 size: bounds.size().round().to_float2(),
487 background_color: quad
488 .background
489 .unwrap_or(Color::transparent_black())
490 .to_uchar4(),
491 border_top: border_width * (quad.border.top as usize as f32),
492 border_right: border_width * (quad.border.right as usize as f32),
493 border_bottom: border_width * (quad.border.bottom as usize as f32),
494 border_left: border_width * (quad.border.left as usize as f32),
495 border_color: quad.border.color.to_uchar4(),
496 corner_radius: quad.corner_radius * scale_factor,
497 };
498 unsafe {
499 *(buffer_contents.offset(ix as isize)) = shader_quad;
500 }
501 }
502
503 command_encoder.draw_primitives_instanced(
504 metal::MTLPrimitiveType::Triangle,
505 0,
506 6,
507 quads.len() as u64,
508 );
509 *offset = next_offset;
510 }
511
512 fn render_sprites(
513 &mut self,
514 glyphs: &[Glyph],
515 icons: &[Icon],
516 scale_factor: f32,
517 offset: &mut usize,
518 drawable_size: Vector2F,
519 command_encoder: &metal::RenderCommandEncoderRef,
520 ) {
521 if glyphs.is_empty() && icons.is_empty() {
522 return;
523 }
524
525 let mut sprites_by_atlas = HashMap::new();
526
527 for glyph in glyphs {
528 if let Some(sprite) = self.sprite_cache.render_glyph(
529 glyph.font_id,
530 glyph.font_size,
531 glyph.id,
532 glyph.origin,
533 scale_factor,
534 ) {
535 // Snap sprite to pixel grid.
536 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
537 sprites_by_atlas
538 .entry(sprite.atlas_id)
539 .or_insert_with(Vec::new)
540 .push(shaders::GPUISprite {
541 origin: origin.to_float2(),
542 target_size: sprite.size.to_float2(),
543 source_size: sprite.size.to_float2(),
544 atlas_origin: sprite.atlas_origin.to_float2(),
545 color: glyph.color.to_uchar4(),
546 compute_winding: 0,
547 });
548 }
549 }
550
551 for icon in icons {
552 let origin = icon.bounds.origin() * scale_factor;
553 let target_size = icon.bounds.size() * scale_factor;
554 let source_size = (target_size * 2.).ceil().to_i32();
555
556 let sprite =
557 self.sprite_cache
558 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
559
560 sprites_by_atlas
561 .entry(sprite.atlas_id)
562 .or_insert_with(Vec::new)
563 .push(shaders::GPUISprite {
564 origin: origin.to_float2(),
565 target_size: target_size.to_float2(),
566 source_size: sprite.size.to_float2(),
567 atlas_origin: sprite.atlas_origin.to_float2(),
568 color: icon.color.to_uchar4(),
569 compute_winding: 0,
570 });
571 }
572
573 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
574 command_encoder.set_vertex_buffer(
575 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
576 Some(&self.unit_vertices),
577 0,
578 );
579 command_encoder.set_vertex_bytes(
580 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
581 mem::size_of::<shaders::vector_float2>() as u64,
582 [drawable_size.to_float2()].as_ptr() as *const c_void,
583 );
584
585 for (atlas_id, sprites) in sprites_by_atlas {
586 align_offset(offset);
587 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
588 assert!(
589 next_offset <= INSTANCE_BUFFER_SIZE,
590 "instance buffer exhausted"
591 );
592
593 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
594 command_encoder.set_vertex_buffer(
595 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
596 Some(&self.instances),
597 *offset as u64,
598 );
599 command_encoder.set_vertex_bytes(
600 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
601 mem::size_of::<shaders::vector_float2>() as u64,
602 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
603 as *const c_void,
604 );
605
606 command_encoder.set_fragment_texture(
607 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
608 Some(texture),
609 );
610
611 unsafe {
612 let buffer_contents = (self.instances.contents() as *mut u8)
613 .offset(*offset as isize)
614 as *mut shaders::GPUISprite;
615 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
616 }
617
618 command_encoder.draw_primitives_instanced(
619 metal::MTLPrimitiveType::Triangle,
620 0,
621 6,
622 sprites.len() as u64,
623 );
624 *offset = next_offset;
625 }
626 }
627
628 fn render_images(
629 &mut self,
630 images: &[Image],
631 scale_factor: f32,
632 offset: &mut usize,
633 drawable_size: Vector2F,
634 command_encoder: &metal::RenderCommandEncoderRef,
635 ) {
636 if images.is_empty() {
637 return;
638 }
639
640 let mut images_by_atlas = HashMap::new();
641 for image in images {
642 let origin = image.bounds.origin() * scale_factor;
643 let target_size = image.bounds.size() * scale_factor;
644 let corner_radius = image.corner_radius * scale_factor;
645 let border_width = image.border.width * scale_factor;
646 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
647 images_by_atlas
648 .entry(alloc_id.atlas_id)
649 .or_insert_with(Vec::new)
650 .push(shaders::GPUIImage {
651 origin: origin.to_float2(),
652 target_size: target_size.to_float2(),
653 source_size: atlas_bounds.size().to_float2(),
654 atlas_origin: atlas_bounds.origin().to_float2(),
655 border_top: border_width * (image.border.top as usize as f32),
656 border_right: border_width * (image.border.right as usize as f32),
657 border_bottom: border_width * (image.border.bottom as usize as f32),
658 border_left: border_width * (image.border.left as usize as f32),
659 border_color: image.border.color.to_uchar4(),
660 corner_radius,
661 });
662 }
663
664 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
665 command_encoder.set_vertex_buffer(
666 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
667 Some(&self.unit_vertices),
668 0,
669 );
670 command_encoder.set_vertex_bytes(
671 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
672 mem::size_of::<shaders::vector_float2>() as u64,
673 [drawable_size.to_float2()].as_ptr() as *const c_void,
674 );
675
676 for (atlas_id, images) in images_by_atlas {
677 align_offset(offset);
678 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
679 assert!(
680 next_offset <= INSTANCE_BUFFER_SIZE,
681 "instance buffer exhausted"
682 );
683
684 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
685 command_encoder.set_vertex_buffer(
686 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
687 Some(&self.instances),
688 *offset as u64,
689 );
690 command_encoder.set_vertex_bytes(
691 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
692 mem::size_of::<shaders::vector_float2>() as u64,
693 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
694 as *const c_void,
695 );
696 command_encoder.set_fragment_texture(
697 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
698 Some(texture),
699 );
700
701 unsafe {
702 let buffer_contents = (self.instances.contents() as *mut u8)
703 .offset(*offset as isize)
704 as *mut shaders::GPUIImage;
705 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
706 }
707
708 command_encoder.draw_primitives_instanced(
709 metal::MTLPrimitiveType::Triangle,
710 0,
711 6,
712 images.len() as u64,
713 );
714 *offset = next_offset;
715 }
716 }
717
718 fn render_path_sprites(
719 &mut self,
720 layer_id: usize,
721 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
722 offset: &mut usize,
723 drawable_size: Vector2F,
724 command_encoder: &metal::RenderCommandEncoderRef,
725 ) {
726 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
727 command_encoder.set_vertex_buffer(
728 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
729 Some(&self.unit_vertices),
730 0,
731 );
732 command_encoder.set_vertex_bytes(
733 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
734 mem::size_of::<shaders::vector_float2>() as u64,
735 [drawable_size.to_float2()].as_ptr() as *const c_void,
736 );
737
738 let mut atlas_id = None;
739 let mut atlas_sprite_count = 0;
740 align_offset(offset);
741
742 while let Some(sprite) = sprites.peek() {
743 if sprite.layer_id != layer_id {
744 break;
745 }
746
747 let sprite = sprites.next().unwrap();
748 if let Some(atlas_id) = atlas_id.as_mut() {
749 if sprite.atlas_id != *atlas_id {
750 self.render_path_sprites_for_atlas(
751 offset,
752 *atlas_id,
753 atlas_sprite_count,
754 command_encoder,
755 );
756
757 *atlas_id = sprite.atlas_id;
758 atlas_sprite_count = 0;
759 align_offset(offset);
760 }
761 } else {
762 atlas_id = Some(sprite.atlas_id);
763 }
764
765 unsafe {
766 let buffer_contents = (self.instances.contents() as *mut u8)
767 .offset(*offset as isize)
768 as *mut shaders::GPUISprite;
769 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
770 }
771
772 atlas_sprite_count += 1;
773 }
774
775 if let Some(atlas_id) = atlas_id {
776 self.render_path_sprites_for_atlas(
777 offset,
778 atlas_id,
779 atlas_sprite_count,
780 command_encoder,
781 );
782 }
783 }
784
785 fn render_path_sprites_for_atlas(
786 &mut self,
787 offset: &mut usize,
788 atlas_id: usize,
789 sprite_count: usize,
790 command_encoder: &metal::RenderCommandEncoderRef,
791 ) {
792 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
793 assert!(
794 next_offset <= INSTANCE_BUFFER_SIZE,
795 "instance buffer exhausted"
796 );
797 command_encoder.set_vertex_buffer(
798 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
799 Some(&self.instances),
800 *offset as u64,
801 );
802 let texture = self.path_atlases.texture(atlas_id).unwrap();
803 command_encoder.set_fragment_texture(
804 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
805 Some(texture),
806 );
807 command_encoder.set_vertex_bytes(
808 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
809 mem::size_of::<shaders::vector_float2>() as u64,
810 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
811 as *const c_void,
812 );
813
814 command_encoder.draw_primitives_instanced(
815 metal::MTLPrimitiveType::Triangle,
816 0,
817 6,
818 sprite_count as u64,
819 );
820 *offset = next_offset;
821 }
822}
823
824fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
825 let texture_descriptor = metal::TextureDescriptor::new();
826 texture_descriptor.set_width(2048);
827 texture_descriptor.set_height(2048);
828 texture_descriptor.set_pixel_format(MTLPixelFormat::R8Unorm);
829 texture_descriptor
830 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
831 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
832 texture_descriptor
833}
834
835fn align_offset(offset: &mut usize) {
836 let r = *offset % 256;
837 if r > 0 {
838 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
839 }
840}
841
842fn build_pipeline_state(
843 device: &metal::DeviceRef,
844 library: &metal::LibraryRef,
845 label: &str,
846 vertex_fn_name: &str,
847 fragment_fn_name: &str,
848 pixel_format: metal::MTLPixelFormat,
849) -> metal::RenderPipelineState {
850 let vertex_fn = library
851 .get_function(vertex_fn_name, None)
852 .expect("error locating vertex function");
853 let fragment_fn = library
854 .get_function(fragment_fn_name, None)
855 .expect("error locating fragment function");
856
857 let descriptor = metal::RenderPipelineDescriptor::new();
858 descriptor.set_label(label);
859 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
860 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
861 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
862 color_attachment.set_pixel_format(pixel_format);
863 color_attachment.set_blending_enabled(true);
864 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
865 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
866 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
867 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
868 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
869 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
870
871 device
872 .new_render_pipeline_state(&descriptor)
873 .expect("could not create render pipeline state")
874}
875
876fn build_path_atlas_pipeline_state(
877 device: &metal::DeviceRef,
878 library: &metal::LibraryRef,
879 label: &str,
880 vertex_fn_name: &str,
881 fragment_fn_name: &str,
882 pixel_format: metal::MTLPixelFormat,
883) -> metal::RenderPipelineState {
884 let vertex_fn = library
885 .get_function(vertex_fn_name, None)
886 .expect("error locating vertex function");
887 let fragment_fn = library
888 .get_function(fragment_fn_name, None)
889 .expect("error locating fragment function");
890
891 let descriptor = metal::RenderPipelineDescriptor::new();
892 descriptor.set_label(label);
893 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
894 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
895 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
896 color_attachment.set_pixel_format(pixel_format);
897 color_attachment.set_blending_enabled(true);
898 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
899 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
900 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
901 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
902 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
903 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
904
905 device
906 .new_render_pipeline_state(&descriptor)
907 .expect("could not create render pipeline state")
908}
909
910mod shaders {
911 #![allow(non_upper_case_globals)]
912 #![allow(non_camel_case_types)]
913 #![allow(non_snake_case)]
914
915 use crate::{
916 color::Color,
917 geometry::vector::{Vector2F, Vector2I},
918 };
919 use std::mem;
920
921 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
922
923 pub trait ToFloat2 {
924 fn to_float2(&self) -> vector_float2;
925 }
926
927 impl ToFloat2 for (f32, f32) {
928 fn to_float2(&self) -> vector_float2 {
929 unsafe {
930 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
931 output <<= 32;
932 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
933 output
934 }
935 }
936 }
937
938 impl ToFloat2 for Vector2F {
939 fn to_float2(&self) -> vector_float2 {
940 unsafe {
941 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
942 output <<= 32;
943 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
944 output
945 }
946 }
947 }
948
949 impl ToFloat2 for Vector2I {
950 fn to_float2(&self) -> vector_float2 {
951 self.to_f32().to_float2()
952 }
953 }
954
955 impl Color {
956 pub fn to_uchar4(&self) -> vector_uchar4 {
957 let mut vec = self.a as vector_uchar4;
958 vec <<= 8;
959 vec |= self.b as vector_uchar4;
960 vec <<= 8;
961 vec |= self.g as vector_uchar4;
962 vec <<= 8;
963 vec |= self.r as vector_uchar4;
964 vec
965 }
966 }
967}