1use super::{
2 atlas::{self, AtlasAllocator},
3 sprite_cache::SpriteCache,
4};
5use crate::{
6 color::Color,
7 geometry::{
8 rect::{RectF, RectI},
9 vector::{vec2f, vec2i, Vector2F},
10 },
11 platform,
12 scene::{Glyph, Icon, Image, Layer, Quad, Scene, Shadow},
13};
14use cocoa::foundation::NSUInteger;
15use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
16use shaders::ToFloat2 as _;
17use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
18
19const SHADERS_METALLIB: &'static [u8] =
20 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
21const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
22
23pub struct Renderer {
24 sprite_cache: SpriteCache,
25 path_atlases: AtlasAllocator,
26 image_atlases: AtlasAllocator,
27 prev_rendered_images: HashMap<usize, (atlas::AllocId, RectI)>,
28 curr_rendered_images: HashMap<usize, (atlas::AllocId, RectI)>,
29 quad_pipeline_state: metal::RenderPipelineState,
30 shadow_pipeline_state: metal::RenderPipelineState,
31 sprite_pipeline_state: metal::RenderPipelineState,
32 image_pipeline_state: metal::RenderPipelineState,
33 path_atlas_pipeline_state: metal::RenderPipelineState,
34 unit_vertices: metal::Buffer,
35 instances: metal::Buffer,
36}
37
38struct PathSprite {
39 layer_id: usize,
40 atlas_id: usize,
41 shader_data: shaders::GPUISprite,
42}
43
44impl Renderer {
45 pub fn new(
46 device: metal::Device,
47 pixel_format: metal::MTLPixelFormat,
48 fonts: Arc<dyn platform::FontSystem>,
49 ) -> Self {
50 let library = device
51 .new_library_with_data(SHADERS_METALLIB)
52 .expect("error building metal library");
53
54 let unit_vertices = [
55 (0., 0.).to_float2(),
56 (1., 0.).to_float2(),
57 (0., 1.).to_float2(),
58 (0., 1.).to_float2(),
59 (1., 0.).to_float2(),
60 (1., 1.).to_float2(),
61 ];
62 let unit_vertices = device.new_buffer_with_data(
63 unit_vertices.as_ptr() as *const c_void,
64 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
65 MTLResourceOptions::StorageModeManaged,
66 );
67 let instances = device.new_buffer(
68 INSTANCE_BUFFER_SIZE as u64,
69 MTLResourceOptions::StorageModeManaged,
70 );
71
72 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
73 let path_atlases =
74 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
75 let image_atlases =
76 AtlasAllocator::new(device.clone(), build_image_atlas_texture_descriptor());
77 let quad_pipeline_state = build_pipeline_state(
78 &device,
79 &library,
80 "quad",
81 "quad_vertex",
82 "quad_fragment",
83 pixel_format,
84 );
85 let shadow_pipeline_state = build_pipeline_state(
86 &device,
87 &library,
88 "shadow",
89 "shadow_vertex",
90 "shadow_fragment",
91 pixel_format,
92 );
93 let sprite_pipeline_state = build_pipeline_state(
94 &device,
95 &library,
96 "sprite",
97 "sprite_vertex",
98 "sprite_fragment",
99 pixel_format,
100 );
101 let image_pipeline_state = build_pipeline_state(
102 &device,
103 &library,
104 "image",
105 "image_vertex",
106 "image_fragment",
107 pixel_format,
108 );
109 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
110 &device,
111 &library,
112 "path_atlas",
113 "path_atlas_vertex",
114 "path_atlas_fragment",
115 MTLPixelFormat::R8Unorm,
116 );
117 Self {
118 sprite_cache,
119 path_atlases,
120 image_atlases,
121 prev_rendered_images: Default::default(),
122 curr_rendered_images: Default::default(),
123 quad_pipeline_state,
124 shadow_pipeline_state,
125 sprite_pipeline_state,
126 image_pipeline_state,
127 path_atlas_pipeline_state,
128 unit_vertices,
129 instances,
130 }
131 }
132
133 pub fn render(
134 &mut self,
135 scene: &Scene,
136 drawable_size: Vector2F,
137 command_buffer: &metal::CommandBufferRef,
138 output: &metal::TextureRef,
139 ) {
140 let mut offset = 0;
141
142 mem::swap(
143 &mut self.curr_rendered_images,
144 &mut self.prev_rendered_images,
145 );
146
147 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
148 self.render_layers(
149 scene,
150 path_sprites,
151 &mut offset,
152 drawable_size,
153 command_buffer,
154 output,
155 );
156 self.instances.did_modify_range(NSRange {
157 location: 0,
158 length: offset as NSUInteger,
159 });
160
161 for (id, _) in self.prev_rendered_images.values() {
162 self.image_atlases.deallocate(*id);
163 }
164 self.prev_rendered_images.clear();
165 }
166
167 fn render_path_atlases(
168 &mut self,
169 scene: &Scene,
170 offset: &mut usize,
171 command_buffer: &metal::CommandBufferRef,
172 ) -> Vec<PathSprite> {
173 self.path_atlases.clear();
174 let mut sprites = Vec::new();
175 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
176 let mut current_atlas_id = None;
177 for (layer_id, layer) in scene.layers().enumerate() {
178 for path in layer.paths() {
179 let origin = path.bounds.origin() * scene.scale_factor();
180 let size = (path.bounds.size() * scene.scale_factor()).ceil();
181 let (alloc_id, atlas_origin) = self.path_atlases.allocate(size.to_i32());
182 let atlas_origin = atlas_origin.to_f32();
183 sprites.push(PathSprite {
184 layer_id,
185 atlas_id: alloc_id.atlas_id,
186 shader_data: shaders::GPUISprite {
187 origin: origin.floor().to_float2(),
188 target_size: size.to_float2(),
189 source_size: size.to_float2(),
190 atlas_origin: atlas_origin.to_float2(),
191 color: path.color.to_uchar4(),
192 compute_winding: 1,
193 },
194 });
195
196 if let Some(current_atlas_id) = current_atlas_id {
197 if alloc_id.atlas_id != current_atlas_id {
198 self.render_paths_to_atlas(
199 offset,
200 &vertices,
201 current_atlas_id,
202 command_buffer,
203 );
204 vertices.clear();
205 }
206 }
207
208 current_atlas_id = Some(alloc_id.atlas_id);
209
210 for vertex in &path.vertices {
211 let xy_position =
212 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
213 vertices.push(shaders::GPUIPathVertex {
214 xy_position: (atlas_origin + xy_position).to_float2(),
215 st_position: vertex.st_position.to_float2(),
216 clip_rect_origin: atlas_origin.to_float2(),
217 clip_rect_size: size.to_float2(),
218 });
219 }
220 }
221 }
222
223 if let Some(atlas_id) = current_atlas_id {
224 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
225 }
226
227 sprites
228 }
229
230 fn render_paths_to_atlas(
231 &mut self,
232 offset: &mut usize,
233 vertices: &[shaders::GPUIPathVertex],
234 atlas_id: usize,
235 command_buffer: &metal::CommandBufferRef,
236 ) {
237 align_offset(offset);
238 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
239 assert!(
240 next_offset <= INSTANCE_BUFFER_SIZE,
241 "instance buffer exhausted"
242 );
243
244 let render_pass_descriptor = metal::RenderPassDescriptor::new();
245 let color_attachment = render_pass_descriptor
246 .color_attachments()
247 .object_at(0)
248 .unwrap();
249 let texture = self.path_atlases.texture(atlas_id).unwrap();
250 color_attachment.set_texture(Some(texture));
251 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
252 color_attachment.set_store_action(metal::MTLStoreAction::Store);
253 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
254
255 let path_atlas_command_encoder =
256 command_buffer.new_render_command_encoder(render_pass_descriptor);
257 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
258 path_atlas_command_encoder.set_vertex_buffer(
259 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
260 Some(&self.instances),
261 *offset as u64,
262 );
263 path_atlas_command_encoder.set_vertex_bytes(
264 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
265 mem::size_of::<shaders::vector_float2>() as u64,
266 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
267 as *const c_void,
268 );
269
270 let buffer_contents = unsafe {
271 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
272 };
273
274 for (ix, vertex) in vertices.iter().enumerate() {
275 unsafe {
276 *buffer_contents.add(ix) = *vertex;
277 }
278 }
279
280 path_atlas_command_encoder.draw_primitives(
281 metal::MTLPrimitiveType::Triangle,
282 0,
283 vertices.len() as u64,
284 );
285 path_atlas_command_encoder.end_encoding();
286 *offset = next_offset;
287 }
288
289 fn render_layers(
290 &mut self,
291 scene: &Scene,
292 path_sprites: Vec<PathSprite>,
293 offset: &mut usize,
294 drawable_size: Vector2F,
295 command_buffer: &metal::CommandBufferRef,
296 output: &metal::TextureRef,
297 ) {
298 let render_pass_descriptor = metal::RenderPassDescriptor::new();
299 let color_attachment = render_pass_descriptor
300 .color_attachments()
301 .object_at(0)
302 .unwrap();
303 color_attachment.set_texture(Some(output));
304 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
305 color_attachment.set_store_action(metal::MTLStoreAction::Store);
306 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
307 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
308
309 command_encoder.set_viewport(metal::MTLViewport {
310 originX: 0.0,
311 originY: 0.0,
312 width: drawable_size.x() as f64,
313 height: drawable_size.y() as f64,
314 znear: 0.0,
315 zfar: 1.0,
316 });
317
318 let scale_factor = scene.scale_factor();
319 let mut path_sprites = path_sprites.into_iter().peekable();
320 for (layer_id, layer) in scene.layers().enumerate() {
321 self.clip(scene, layer, drawable_size, command_encoder);
322 self.render_shadows(
323 layer.shadows(),
324 scale_factor,
325 offset,
326 drawable_size,
327 command_encoder,
328 );
329 self.render_quads(
330 layer.quads(),
331 scale_factor,
332 offset,
333 drawable_size,
334 command_encoder,
335 );
336 self.render_path_sprites(
337 layer_id,
338 &mut path_sprites,
339 offset,
340 drawable_size,
341 command_encoder,
342 );
343 self.render_sprites(
344 layer.glyphs(),
345 layer.icons(),
346 scale_factor,
347 offset,
348 drawable_size,
349 command_encoder,
350 );
351 self.render_images(
352 layer.images(),
353 scale_factor,
354 offset,
355 drawable_size,
356 command_encoder,
357 );
358 self.render_quads(
359 layer.underlines(),
360 scale_factor,
361 offset,
362 drawable_size,
363 command_encoder,
364 );
365 }
366
367 command_encoder.end_encoding();
368 }
369
370 fn clip(
371 &mut self,
372 scene: &Scene,
373 layer: &Layer,
374 drawable_size: Vector2F,
375 command_encoder: &metal::RenderCommandEncoderRef,
376 ) {
377 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
378 vec2f(0., 0.),
379 drawable_size / scene.scale_factor(),
380 )) * scene.scale_factor())
381 .round();
382 command_encoder.set_scissor_rect(metal::MTLScissorRect {
383 x: clip_bounds.origin_x() as NSUInteger,
384 y: clip_bounds.origin_y() as NSUInteger,
385 width: clip_bounds.width() as NSUInteger,
386 height: clip_bounds.height() as NSUInteger,
387 });
388 }
389
390 fn render_shadows(
391 &mut self,
392 shadows: &[Shadow],
393 scale_factor: f32,
394 offset: &mut usize,
395 drawable_size: Vector2F,
396 command_encoder: &metal::RenderCommandEncoderRef,
397 ) {
398 if shadows.is_empty() {
399 return;
400 }
401
402 align_offset(offset);
403 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
404 assert!(
405 next_offset <= INSTANCE_BUFFER_SIZE,
406 "instance buffer exhausted"
407 );
408
409 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
410 command_encoder.set_vertex_buffer(
411 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
412 Some(&self.unit_vertices),
413 0,
414 );
415 command_encoder.set_vertex_buffer(
416 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
417 Some(&self.instances),
418 *offset as u64,
419 );
420 command_encoder.set_vertex_bytes(
421 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
422 mem::size_of::<shaders::GPUIUniforms>() as u64,
423 [shaders::GPUIUniforms {
424 viewport_size: drawable_size.to_float2(),
425 }]
426 .as_ptr() as *const c_void,
427 );
428
429 let buffer_contents = unsafe {
430 (self.instances.contents() as *mut u8).offset(*offset as isize)
431 as *mut shaders::GPUIShadow
432 };
433 for (ix, shadow) in shadows.iter().enumerate() {
434 let shape_bounds = shadow.bounds * scale_factor;
435 let shader_shadow = shaders::GPUIShadow {
436 origin: shape_bounds.origin().to_float2(),
437 size: shape_bounds.size().to_float2(),
438 corner_radius: shadow.corner_radius * scale_factor,
439 sigma: shadow.sigma,
440 color: shadow.color.to_uchar4(),
441 };
442 unsafe {
443 *(buffer_contents.offset(ix as isize)) = shader_shadow;
444 }
445 }
446
447 command_encoder.draw_primitives_instanced(
448 metal::MTLPrimitiveType::Triangle,
449 0,
450 6,
451 shadows.len() as u64,
452 );
453 *offset = next_offset;
454 }
455
456 fn render_quads(
457 &mut self,
458 quads: &[Quad],
459 scale_factor: f32,
460 offset: &mut usize,
461 drawable_size: Vector2F,
462 command_encoder: &metal::RenderCommandEncoderRef,
463 ) {
464 if quads.is_empty() {
465 return;
466 }
467 align_offset(offset);
468 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
469 assert!(
470 next_offset <= INSTANCE_BUFFER_SIZE,
471 "instance buffer exhausted"
472 );
473
474 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
475 command_encoder.set_vertex_buffer(
476 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
477 Some(&self.unit_vertices),
478 0,
479 );
480 command_encoder.set_vertex_buffer(
481 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
482 Some(&self.instances),
483 *offset as u64,
484 );
485 command_encoder.set_vertex_bytes(
486 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
487 mem::size_of::<shaders::GPUIUniforms>() as u64,
488 [shaders::GPUIUniforms {
489 viewport_size: drawable_size.to_float2(),
490 }]
491 .as_ptr() as *const c_void,
492 );
493
494 let buffer_contents = unsafe {
495 (self.instances.contents() as *mut u8).offset(*offset as isize)
496 as *mut shaders::GPUIQuad
497 };
498 for (ix, quad) in quads.iter().enumerate() {
499 let bounds = quad.bounds * scale_factor;
500 let border_width = quad.border.width * scale_factor;
501 let shader_quad = shaders::GPUIQuad {
502 origin: bounds.origin().round().to_float2(),
503 size: bounds.size().round().to_float2(),
504 background_color: quad
505 .background
506 .unwrap_or(Color::transparent_black())
507 .to_uchar4(),
508 border_top: border_width * (quad.border.top as usize as f32),
509 border_right: border_width * (quad.border.right as usize as f32),
510 border_bottom: border_width * (quad.border.bottom as usize as f32),
511 border_left: border_width * (quad.border.left as usize as f32),
512 border_color: quad.border.color.to_uchar4(),
513 corner_radius: quad.corner_radius * scale_factor,
514 };
515 unsafe {
516 *(buffer_contents.offset(ix as isize)) = shader_quad;
517 }
518 }
519
520 command_encoder.draw_primitives_instanced(
521 metal::MTLPrimitiveType::Triangle,
522 0,
523 6,
524 quads.len() as u64,
525 );
526 *offset = next_offset;
527 }
528
529 fn render_sprites(
530 &mut self,
531 glyphs: &[Glyph],
532 icons: &[Icon],
533 scale_factor: f32,
534 offset: &mut usize,
535 drawable_size: Vector2F,
536 command_encoder: &metal::RenderCommandEncoderRef,
537 ) {
538 if glyphs.is_empty() && icons.is_empty() {
539 return;
540 }
541
542 let mut sprites_by_atlas = HashMap::new();
543
544 for glyph in glyphs {
545 if let Some(sprite) = self.sprite_cache.render_glyph(
546 glyph.font_id,
547 glyph.font_size,
548 glyph.id,
549 glyph.origin,
550 scale_factor,
551 ) {
552 // Snap sprite to pixel grid.
553 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
554 sprites_by_atlas
555 .entry(sprite.atlas_id)
556 .or_insert_with(Vec::new)
557 .push(shaders::GPUISprite {
558 origin: origin.to_float2(),
559 target_size: sprite.size.to_float2(),
560 source_size: sprite.size.to_float2(),
561 atlas_origin: sprite.atlas_origin.to_float2(),
562 color: glyph.color.to_uchar4(),
563 compute_winding: 0,
564 });
565 }
566 }
567
568 for icon in icons {
569 let origin = icon.bounds.origin() * scale_factor;
570 let target_size = icon.bounds.size() * scale_factor;
571 let source_size = (target_size * 2.).ceil().to_i32();
572
573 let sprite =
574 self.sprite_cache
575 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
576
577 sprites_by_atlas
578 .entry(sprite.atlas_id)
579 .or_insert_with(Vec::new)
580 .push(shaders::GPUISprite {
581 origin: origin.to_float2(),
582 target_size: target_size.to_float2(),
583 source_size: sprite.size.to_float2(),
584 atlas_origin: sprite.atlas_origin.to_float2(),
585 color: icon.color.to_uchar4(),
586 compute_winding: 0,
587 });
588 }
589
590 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
591 command_encoder.set_vertex_buffer(
592 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
593 Some(&self.unit_vertices),
594 0,
595 );
596 command_encoder.set_vertex_bytes(
597 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
598 mem::size_of::<shaders::vector_float2>() as u64,
599 [drawable_size.to_float2()].as_ptr() as *const c_void,
600 );
601 command_encoder.set_vertex_bytes(
602 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
603 mem::size_of::<shaders::vector_float2>() as u64,
604 [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
605 );
606
607 for (atlas_id, sprites) in sprites_by_atlas {
608 align_offset(offset);
609 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
610 assert!(
611 next_offset <= INSTANCE_BUFFER_SIZE,
612 "instance buffer exhausted"
613 );
614
615 command_encoder.set_vertex_buffer(
616 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
617 Some(&self.instances),
618 *offset as u64,
619 );
620
621 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
622 command_encoder.set_fragment_texture(
623 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
624 Some(texture),
625 );
626
627 unsafe {
628 let buffer_contents = (self.instances.contents() as *mut u8)
629 .offset(*offset as isize)
630 as *mut shaders::GPUISprite;
631 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
632 }
633
634 command_encoder.draw_primitives_instanced(
635 metal::MTLPrimitiveType::Triangle,
636 0,
637 6,
638 sprites.len() as u64,
639 );
640 *offset = next_offset;
641 }
642 }
643
644 fn render_images(
645 &mut self,
646 images: &[Image],
647 scale_factor: f32,
648 offset: &mut usize,
649 drawable_size: Vector2F,
650 command_encoder: &metal::RenderCommandEncoderRef,
651 ) {
652 if images.is_empty() {
653 return;
654 }
655
656 let mut images_by_atlas = HashMap::new();
657 for image in images {
658 let origin = image.bounds.origin() * scale_factor;
659 let target_size = image.bounds.size() * scale_factor;
660 let (alloc_id, atlas_bounds) = self
661 .prev_rendered_images
662 .remove(&image.data.id)
663 .or_else(|| self.curr_rendered_images.get(&image.data.id).copied())
664 .unwrap_or_else(|| {
665 self.image_atlases
666 .upload(image.data.size(), image.data.as_bytes())
667 });
668 self.curr_rendered_images
669 .insert(image.data.id, (alloc_id, atlas_bounds));
670 images_by_atlas
671 .entry(alloc_id.atlas_id)
672 .or_insert_with(Vec::new)
673 .push(shaders::GPUIImage {
674 origin: origin.to_float2(),
675 target_size: target_size.to_float2(),
676 source_size: atlas_bounds.size().to_float2(),
677 atlas_origin: atlas_bounds.origin().to_float2(),
678 });
679 }
680
681 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
682 command_encoder.set_vertex_buffer(
683 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
684 Some(&self.unit_vertices),
685 0,
686 );
687 command_encoder.set_vertex_bytes(
688 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
689 mem::size_of::<shaders::vector_float2>() as u64,
690 [drawable_size.to_float2()].as_ptr() as *const c_void,
691 );
692
693 for (atlas_id, images) in images_by_atlas {
694 align_offset(offset);
695 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
696 assert!(
697 next_offset <= INSTANCE_BUFFER_SIZE,
698 "instance buffer exhausted"
699 );
700
701 let texture = self.image_atlases.texture(atlas_id).unwrap();
702 command_encoder.set_vertex_buffer(
703 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
704 Some(&self.instances),
705 *offset as u64,
706 );
707 command_encoder.set_vertex_bytes(
708 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
709 mem::size_of::<shaders::vector_float2>() as u64,
710 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
711 as *const c_void,
712 );
713 command_encoder.set_fragment_texture(
714 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
715 Some(texture),
716 );
717
718 unsafe {
719 let buffer_contents = (self.instances.contents() as *mut u8)
720 .offset(*offset as isize)
721 as *mut shaders::GPUIImage;
722 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
723 }
724
725 command_encoder.draw_primitives_instanced(
726 metal::MTLPrimitiveType::Triangle,
727 0,
728 6,
729 images.len() as u64,
730 );
731 *offset = next_offset;
732 }
733 }
734
735 fn render_path_sprites(
736 &mut self,
737 layer_id: usize,
738 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
739 offset: &mut usize,
740 drawable_size: Vector2F,
741 command_encoder: &metal::RenderCommandEncoderRef,
742 ) {
743 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
744 command_encoder.set_vertex_buffer(
745 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
746 Some(&self.unit_vertices),
747 0,
748 );
749 command_encoder.set_vertex_bytes(
750 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
751 mem::size_of::<shaders::vector_float2>() as u64,
752 [drawable_size.to_float2()].as_ptr() as *const c_void,
753 );
754
755 let mut atlas_id = None;
756 let mut atlas_sprite_count = 0;
757 align_offset(offset);
758
759 while let Some(sprite) = sprites.peek() {
760 if sprite.layer_id != layer_id {
761 break;
762 }
763
764 let sprite = sprites.next().unwrap();
765 if let Some(atlas_id) = atlas_id.as_mut() {
766 if sprite.atlas_id != *atlas_id {
767 self.render_path_sprites_for_atlas(
768 offset,
769 *atlas_id,
770 atlas_sprite_count,
771 command_encoder,
772 );
773
774 *atlas_id = sprite.atlas_id;
775 atlas_sprite_count = 0;
776 align_offset(offset);
777 }
778 } else {
779 atlas_id = Some(sprite.atlas_id);
780 }
781
782 unsafe {
783 let buffer_contents = (self.instances.contents() as *mut u8)
784 .offset(*offset as isize)
785 as *mut shaders::GPUISprite;
786 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
787 }
788
789 atlas_sprite_count += 1;
790 }
791
792 if let Some(atlas_id) = atlas_id {
793 self.render_path_sprites_for_atlas(
794 offset,
795 atlas_id,
796 atlas_sprite_count,
797 command_encoder,
798 );
799 }
800 }
801
802 fn render_path_sprites_for_atlas(
803 &mut self,
804 offset: &mut usize,
805 atlas_id: usize,
806 sprite_count: usize,
807 command_encoder: &metal::RenderCommandEncoderRef,
808 ) {
809 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
810 assert!(
811 next_offset <= INSTANCE_BUFFER_SIZE,
812 "instance buffer exhausted"
813 );
814 command_encoder.set_vertex_buffer(
815 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
816 Some(&self.instances),
817 *offset as u64,
818 );
819 let texture = self.path_atlases.texture(atlas_id).unwrap();
820 command_encoder.set_fragment_texture(
821 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
822 Some(texture),
823 );
824 command_encoder.set_vertex_bytes(
825 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
826 mem::size_of::<shaders::vector_float2>() as u64,
827 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
828 as *const c_void,
829 );
830
831 command_encoder.draw_primitives_instanced(
832 metal::MTLPrimitiveType::Triangle,
833 0,
834 6,
835 sprite_count as u64,
836 );
837 *offset = next_offset;
838 }
839}
840
841fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
842 let texture_descriptor = metal::TextureDescriptor::new();
843 texture_descriptor.set_width(2048);
844 texture_descriptor.set_height(2048);
845 texture_descriptor.set_pixel_format(MTLPixelFormat::R8Unorm);
846 texture_descriptor
847 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
848 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
849 texture_descriptor
850}
851
852fn build_image_atlas_texture_descriptor() -> metal::TextureDescriptor {
853 let texture_descriptor = metal::TextureDescriptor::new();
854 texture_descriptor.set_width(2048);
855 texture_descriptor.set_height(2048);
856 texture_descriptor.set_pixel_format(MTLPixelFormat::BGRA8Unorm);
857 texture_descriptor
858}
859
860fn align_offset(offset: &mut usize) {
861 let r = *offset % 256;
862 if r > 0 {
863 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
864 }
865}
866
867fn build_pipeline_state(
868 device: &metal::DeviceRef,
869 library: &metal::LibraryRef,
870 label: &str,
871 vertex_fn_name: &str,
872 fragment_fn_name: &str,
873 pixel_format: metal::MTLPixelFormat,
874) -> metal::RenderPipelineState {
875 let vertex_fn = library
876 .get_function(vertex_fn_name, None)
877 .expect("error locating vertex function");
878 let fragment_fn = library
879 .get_function(fragment_fn_name, None)
880 .expect("error locating fragment function");
881
882 let descriptor = metal::RenderPipelineDescriptor::new();
883 descriptor.set_label(label);
884 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
885 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
886 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
887 color_attachment.set_pixel_format(pixel_format);
888 color_attachment.set_blending_enabled(true);
889 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
890 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
891 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
892 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
893 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
894 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
895
896 device
897 .new_render_pipeline_state(&descriptor)
898 .expect("could not create render pipeline state")
899}
900
901fn build_path_atlas_pipeline_state(
902 device: &metal::DeviceRef,
903 library: &metal::LibraryRef,
904 label: &str,
905 vertex_fn_name: &str,
906 fragment_fn_name: &str,
907 pixel_format: metal::MTLPixelFormat,
908) -> metal::RenderPipelineState {
909 let vertex_fn = library
910 .get_function(vertex_fn_name, None)
911 .expect("error locating vertex function");
912 let fragment_fn = library
913 .get_function(fragment_fn_name, None)
914 .expect("error locating fragment function");
915
916 let descriptor = metal::RenderPipelineDescriptor::new();
917 descriptor.set_label(label);
918 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
919 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
920 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
921 color_attachment.set_pixel_format(pixel_format);
922 color_attachment.set_blending_enabled(true);
923 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
924 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
925 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
926 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
927 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
928 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
929
930 device
931 .new_render_pipeline_state(&descriptor)
932 .expect("could not create render pipeline state")
933}
934
935mod shaders {
936 #![allow(non_upper_case_globals)]
937 #![allow(non_camel_case_types)]
938 #![allow(non_snake_case)]
939
940 use crate::{
941 color::Color,
942 geometry::vector::{Vector2F, Vector2I},
943 };
944 use std::mem;
945
946 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
947
948 pub trait ToFloat2 {
949 fn to_float2(&self) -> vector_float2;
950 }
951
952 impl ToFloat2 for (f32, f32) {
953 fn to_float2(&self) -> vector_float2 {
954 unsafe {
955 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
956 output <<= 32;
957 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
958 output
959 }
960 }
961 }
962
963 impl ToFloat2 for Vector2F {
964 fn to_float2(&self) -> vector_float2 {
965 unsafe {
966 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
967 output <<= 32;
968 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
969 output
970 }
971 }
972 }
973
974 impl ToFloat2 for Vector2I {
975 fn to_float2(&self) -> vector_float2 {
976 self.to_f32().to_float2()
977 }
978 }
979
980 impl Color {
981 pub fn to_uchar4(&self) -> vector_uchar4 {
982 let mut vec = self.a as vector_uchar4;
983 vec <<= 8;
984 vec |= self.b as vector_uchar4;
985 vec <<= 8;
986 vec |= self.g as vector_uchar4;
987 vec <<= 8;
988 vec |= self.r as vector_uchar4;
989 vec
990 }
991 }
992}