1use super::{atlas::AtlasAllocator, sprite_cache::SpriteCache};
2use crate::{
3 color::ColorU,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::Layer,
10 Scene,
11};
12use cocoa::foundation::NSUInteger;
13use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
14use shaders::{ToFloat2 as _, ToUchar4 as _};
15use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
16
17const SHADERS_METALLIB: &'static [u8] =
18 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
19const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
20
21pub struct Renderer {
22 sprite_cache: SpriteCache,
23 path_atlases: AtlasAllocator,
24 quad_pipeline_state: metal::RenderPipelineState,
25 shadow_pipeline_state: metal::RenderPipelineState,
26 sprite_pipeline_state: metal::RenderPipelineState,
27 path_atlas_pipeline_state: metal::RenderPipelineState,
28 unit_vertices: metal::Buffer,
29 instances: metal::Buffer,
30}
31
32struct PathSprite {
33 layer_id: usize,
34 atlas_id: usize,
35 shader_data: shaders::GPUISprite,
36}
37
38impl Renderer {
39 pub fn new(
40 device: metal::Device,
41 pixel_format: metal::MTLPixelFormat,
42 fonts: Arc<dyn platform::FontSystem>,
43 ) -> Self {
44 let library = device
45 .new_library_with_data(SHADERS_METALLIB)
46 .expect("error building metal library");
47
48 let unit_vertices = [
49 (0., 0.).to_float2(),
50 (1., 0.).to_float2(),
51 (0., 1.).to_float2(),
52 (0., 1.).to_float2(),
53 (1., 0.).to_float2(),
54 (1., 1.).to_float2(),
55 ];
56 let unit_vertices = device.new_buffer_with_data(
57 unit_vertices.as_ptr() as *const c_void,
58 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
59 MTLResourceOptions::StorageModeManaged,
60 );
61 let instances = device.new_buffer(
62 INSTANCE_BUFFER_SIZE as u64,
63 MTLResourceOptions::StorageModeManaged,
64 );
65
66 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
67 let path_atlases = build_path_atlas_allocator(MTLPixelFormat::R8Unorm, &device);
68 let quad_pipeline_state = build_pipeline_state(
69 &device,
70 &library,
71 "quad",
72 "quad_vertex",
73 "quad_fragment",
74 pixel_format,
75 );
76 let shadow_pipeline_state = build_pipeline_state(
77 &device,
78 &library,
79 "shadow",
80 "shadow_vertex",
81 "shadow_fragment",
82 pixel_format,
83 );
84 let sprite_pipeline_state = build_pipeline_state(
85 &device,
86 &library,
87 "sprite",
88 "sprite_vertex",
89 "sprite_fragment",
90 pixel_format,
91 );
92 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
93 &device,
94 &library,
95 "path_atlas",
96 "path_atlas_vertex",
97 "path_atlas_fragment",
98 MTLPixelFormat::R8Unorm,
99 );
100 Self {
101 sprite_cache,
102 path_atlases,
103 quad_pipeline_state,
104 shadow_pipeline_state,
105 sprite_pipeline_state,
106 path_atlas_pipeline_state,
107 unit_vertices,
108 instances,
109 }
110 }
111
112 pub fn render(
113 &mut self,
114 scene: &Scene,
115 drawable_size: Vector2F,
116 command_buffer: &metal::CommandBufferRef,
117 output: &metal::TextureRef,
118 ) {
119 let mut offset = 0;
120 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
121 self.render_layers(
122 scene,
123 path_sprites,
124 &mut offset,
125 drawable_size,
126 command_buffer,
127 output,
128 );
129 self.instances.did_modify_range(NSRange {
130 location: 0,
131 length: offset as NSUInteger,
132 });
133 }
134
135 fn render_path_atlases(
136 &mut self,
137 scene: &Scene,
138 offset: &mut usize,
139 command_buffer: &metal::CommandBufferRef,
140 ) -> Vec<PathSprite> {
141 self.path_atlases.clear();
142 let mut sprites = Vec::new();
143 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
144 let mut current_atlas_id = None;
145 for (layer_id, layer) in scene.layers().iter().enumerate() {
146 for path in layer.paths() {
147 let origin = path.bounds.origin() * scene.scale_factor();
148 let size = (path.bounds.size() * scene.scale_factor()).ceil();
149 let (atlas_id, atlas_origin) = self.path_atlases.allocate(size.to_i32()).unwrap();
150 let atlas_origin = atlas_origin.to_f32();
151 sprites.push(PathSprite {
152 layer_id,
153 atlas_id,
154 shader_data: shaders::GPUISprite {
155 origin: origin.floor().to_float2(),
156 size: size.to_float2(),
157 atlas_origin: atlas_origin.to_float2(),
158 color: path.color.to_uchar4(),
159 compute_winding: 1,
160 },
161 });
162
163 if let Some(current_atlas_id) = current_atlas_id {
164 if atlas_id != current_atlas_id {
165 self.render_paths_to_atlas(
166 offset,
167 &vertices,
168 current_atlas_id,
169 command_buffer,
170 );
171 vertices.clear();
172 }
173 }
174
175 current_atlas_id = Some(atlas_id);
176
177 for vertex in &path.vertices {
178 let xy_position =
179 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
180 vertices.push(shaders::GPUIPathVertex {
181 xy_position: (atlas_origin + xy_position).to_float2(),
182 st_position: vertex.st_position.to_float2(),
183 clip_rect_origin: atlas_origin.to_float2(),
184 clip_rect_size: size.to_float2(),
185 });
186 }
187 }
188 }
189
190 if let Some(atlas_id) = current_atlas_id {
191 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
192 }
193
194 sprites
195 }
196
197 fn render_paths_to_atlas(
198 &mut self,
199 offset: &mut usize,
200 vertices: &[shaders::GPUIPathVertex],
201 atlas_id: usize,
202 command_buffer: &metal::CommandBufferRef,
203 ) {
204 align_offset(offset);
205 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
206 assert!(
207 next_offset <= INSTANCE_BUFFER_SIZE,
208 "instance buffer exhausted"
209 );
210
211 let render_pass_descriptor = metal::RenderPassDescriptor::new();
212 let color_attachment = render_pass_descriptor
213 .color_attachments()
214 .object_at(0)
215 .unwrap();
216 let texture = self.path_atlases.texture(atlas_id).unwrap();
217 color_attachment.set_texture(Some(texture));
218 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
219 color_attachment.set_store_action(metal::MTLStoreAction::Store);
220 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
221
222 let path_atlas_command_encoder =
223 command_buffer.new_render_command_encoder(render_pass_descriptor);
224 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
225 path_atlas_command_encoder.set_vertex_buffer(
226 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
227 Some(&self.instances),
228 *offset as u64,
229 );
230 path_atlas_command_encoder.set_vertex_bytes(
231 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
232 mem::size_of::<shaders::vector_float2>() as u64,
233 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
234 as *const c_void,
235 );
236
237 let buffer_contents = unsafe {
238 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
239 };
240
241 for (ix, vertex) in vertices.iter().enumerate() {
242 unsafe {
243 *buffer_contents.add(ix) = *vertex;
244 }
245 }
246
247 path_atlas_command_encoder.draw_primitives(
248 metal::MTLPrimitiveType::Triangle,
249 0,
250 vertices.len() as u64,
251 );
252 path_atlas_command_encoder.end_encoding();
253 *offset = next_offset;
254 }
255
256 fn render_layers(
257 &mut self,
258 scene: &Scene,
259 path_sprites: Vec<PathSprite>,
260 offset: &mut usize,
261 drawable_size: Vector2F,
262 command_buffer: &metal::CommandBufferRef,
263 output: &metal::TextureRef,
264 ) {
265 let render_pass_descriptor = metal::RenderPassDescriptor::new();
266 let color_attachment = render_pass_descriptor
267 .color_attachments()
268 .object_at(0)
269 .unwrap();
270 color_attachment.set_texture(Some(output));
271 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
272 color_attachment.set_store_action(metal::MTLStoreAction::Store);
273 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
274 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
275
276 command_encoder.set_viewport(metal::MTLViewport {
277 originX: 0.0,
278 originY: 0.0,
279 width: drawable_size.x() as f64,
280 height: drawable_size.y() as f64,
281 znear: 0.0,
282 zfar: 1.0,
283 });
284
285 let mut path_sprites = path_sprites.into_iter().peekable();
286
287 for (layer_id, layer) in scene.layers().iter().enumerate() {
288 self.clip(scene, layer, drawable_size, command_encoder);
289 self.render_shadows(scene, layer, offset, drawable_size, command_encoder);
290 self.render_quads(scene, layer, offset, drawable_size, command_encoder);
291 self.render_path_sprites(
292 layer_id,
293 &mut path_sprites,
294 offset,
295 drawable_size,
296 command_encoder,
297 );
298 self.render_sprites(scene, layer, offset, drawable_size, command_encoder);
299 }
300
301 command_encoder.end_encoding();
302 }
303
304 fn clip(
305 &mut self,
306 scene: &Scene,
307 layer: &Layer,
308 drawable_size: Vector2F,
309 command_encoder: &metal::RenderCommandEncoderRef,
310 ) {
311 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
312 vec2f(0., 0.),
313 drawable_size / scene.scale_factor(),
314 )) * scene.scale_factor())
315 .round();
316 command_encoder.set_scissor_rect(metal::MTLScissorRect {
317 x: clip_bounds.origin_x() as NSUInteger,
318 y: clip_bounds.origin_y() as NSUInteger,
319 width: clip_bounds.width() as NSUInteger,
320 height: clip_bounds.height() as NSUInteger,
321 });
322 }
323
324 fn render_shadows(
325 &mut self,
326 scene: &Scene,
327 layer: &Layer,
328 offset: &mut usize,
329 drawable_size: Vector2F,
330 command_encoder: &metal::RenderCommandEncoderRef,
331 ) {
332 if layer.shadows().is_empty() {
333 return;
334 }
335
336 align_offset(offset);
337 let next_offset = *offset + layer.shadows().len() * mem::size_of::<shaders::GPUIShadow>();
338 assert!(
339 next_offset <= INSTANCE_BUFFER_SIZE,
340 "instance buffer exhausted"
341 );
342
343 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
344 command_encoder.set_vertex_buffer(
345 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
346 Some(&self.unit_vertices),
347 0,
348 );
349 command_encoder.set_vertex_buffer(
350 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
351 Some(&self.instances),
352 *offset as u64,
353 );
354 command_encoder.set_vertex_bytes(
355 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
356 mem::size_of::<shaders::GPUIUniforms>() as u64,
357 [shaders::GPUIUniforms {
358 viewport_size: drawable_size.to_float2(),
359 }]
360 .as_ptr() as *const c_void,
361 );
362
363 let buffer_contents = unsafe {
364 (self.instances.contents() as *mut u8).offset(*offset as isize)
365 as *mut shaders::GPUIShadow
366 };
367 for (ix, shadow) in layer.shadows().iter().enumerate() {
368 let shape_bounds = shadow.bounds * scene.scale_factor();
369 let shader_shadow = shaders::GPUIShadow {
370 origin: shape_bounds.origin().to_float2(),
371 size: shape_bounds.size().to_float2(),
372 corner_radius: shadow.corner_radius * scene.scale_factor(),
373 sigma: shadow.sigma,
374 color: shadow.color.to_uchar4(),
375 };
376 unsafe {
377 *(buffer_contents.offset(ix as isize)) = shader_shadow;
378 }
379 }
380
381 command_encoder.draw_primitives_instanced(
382 metal::MTLPrimitiveType::Triangle,
383 0,
384 6,
385 layer.shadows().len() as u64,
386 );
387 *offset = next_offset;
388 }
389
390 fn render_quads(
391 &mut self,
392 scene: &Scene,
393 layer: &Layer,
394 offset: &mut usize,
395 drawable_size: Vector2F,
396 command_encoder: &metal::RenderCommandEncoderRef,
397 ) {
398 if layer.quads().is_empty() {
399 return;
400 }
401 align_offset(offset);
402 let next_offset = *offset + layer.quads().len() * mem::size_of::<shaders::GPUIQuad>();
403 assert!(
404 next_offset <= INSTANCE_BUFFER_SIZE,
405 "instance buffer exhausted"
406 );
407
408 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
409 command_encoder.set_vertex_buffer(
410 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
411 Some(&self.unit_vertices),
412 0,
413 );
414 command_encoder.set_vertex_buffer(
415 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
416 Some(&self.instances),
417 *offset as u64,
418 );
419 command_encoder.set_vertex_bytes(
420 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
421 mem::size_of::<shaders::GPUIUniforms>() as u64,
422 [shaders::GPUIUniforms {
423 viewport_size: drawable_size.to_float2(),
424 }]
425 .as_ptr() as *const c_void,
426 );
427
428 let buffer_contents = unsafe {
429 (self.instances.contents() as *mut u8).offset(*offset as isize)
430 as *mut shaders::GPUIQuad
431 };
432 for (ix, quad) in layer.quads().iter().enumerate() {
433 let bounds = quad.bounds * scene.scale_factor();
434 let border_width = quad.border.width * scene.scale_factor();
435 let shader_quad = shaders::GPUIQuad {
436 origin: bounds.origin().round().to_float2(),
437 size: bounds.size().round().to_float2(),
438 background_color: quad
439 .background
440 .unwrap_or(ColorU::transparent_black())
441 .to_uchar4(),
442 border_top: border_width * (quad.border.top as usize as f32),
443 border_right: border_width * (quad.border.right as usize as f32),
444 border_bottom: border_width * (quad.border.bottom as usize as f32),
445 border_left: border_width * (quad.border.left as usize as f32),
446 border_color: quad
447 .border
448 .color
449 .unwrap_or(ColorU::transparent_black())
450 .to_uchar4(),
451 corner_radius: quad.corner_radius * scene.scale_factor(),
452 };
453 unsafe {
454 *(buffer_contents.offset(ix as isize)) = shader_quad;
455 }
456 }
457
458 command_encoder.draw_primitives_instanced(
459 metal::MTLPrimitiveType::Triangle,
460 0,
461 6,
462 layer.quads().len() as u64,
463 );
464 *offset = next_offset;
465 }
466
467 fn render_sprites(
468 &mut self,
469 scene: &Scene,
470 layer: &Layer,
471 offset: &mut usize,
472 drawable_size: Vector2F,
473 command_encoder: &metal::RenderCommandEncoderRef,
474 ) {
475 if layer.glyphs().is_empty() && layer.icons().is_empty() {
476 return;
477 }
478
479 let mut sprites_by_atlas = HashMap::new();
480
481 for glyph in layer.glyphs() {
482 if let Some(sprite) = self.sprite_cache.render_glyph(
483 glyph.font_id,
484 glyph.font_size,
485 glyph.id,
486 glyph.origin,
487 scene.scale_factor(),
488 ) {
489 // Snap sprite to pixel grid.
490 let origin = (glyph.origin * scene.scale_factor()).floor() + sprite.offset.to_f32();
491 sprites_by_atlas
492 .entry(sprite.atlas_id)
493 .or_insert_with(Vec::new)
494 .push(shaders::GPUISprite {
495 origin: origin.to_float2(),
496 size: sprite.size.to_float2(),
497 atlas_origin: sprite.atlas_origin.to_float2(),
498 color: glyph.color.to_uchar4(),
499 compute_winding: 0,
500 });
501 }
502 }
503
504 for icon in layer.icons() {
505 let sprite = self.sprite_cache.render_icon(
506 icon.bounds.size(),
507 icon.path.clone(),
508 icon.svg.clone(),
509 scene.scale_factor(),
510 );
511
512 // Snap sprite to pixel grid.
513 let origin = (icon.bounds.origin() * scene.scale_factor()); //.floor();
514 sprites_by_atlas
515 .entry(sprite.atlas_id)
516 .or_insert_with(Vec::new)
517 .push(shaders::GPUISprite {
518 origin: origin.to_float2(),
519 size: sprite.size.to_float2(),
520 atlas_origin: sprite.atlas_origin.to_float2(),
521 color: icon.color.to_uchar4(),
522 compute_winding: 0,
523 });
524 }
525
526 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
527 command_encoder.set_vertex_buffer(
528 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
529 Some(&self.unit_vertices),
530 0,
531 );
532 command_encoder.set_vertex_bytes(
533 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
534 mem::size_of::<shaders::vector_float2>() as u64,
535 [drawable_size.to_float2()].as_ptr() as *const c_void,
536 );
537 command_encoder.set_vertex_bytes(
538 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
539 mem::size_of::<shaders::vector_float2>() as u64,
540 [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
541 );
542
543 for (atlas_id, sprites) in sprites_by_atlas {
544 align_offset(offset);
545 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
546 assert!(
547 next_offset <= INSTANCE_BUFFER_SIZE,
548 "instance buffer exhausted"
549 );
550
551 command_encoder.set_vertex_buffer(
552 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
553 Some(&self.instances),
554 *offset as u64,
555 );
556
557 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
558 command_encoder.set_fragment_texture(
559 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
560 Some(texture),
561 );
562
563 unsafe {
564 let buffer_contents = (self.instances.contents() as *mut u8)
565 .offset(*offset as isize)
566 as *mut shaders::GPUISprite;
567 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
568 }
569
570 command_encoder.draw_primitives_instanced(
571 metal::MTLPrimitiveType::Triangle,
572 0,
573 6,
574 sprites.len() as u64,
575 );
576 *offset = next_offset;
577 }
578 }
579
580 fn render_path_sprites(
581 &mut self,
582 layer_id: usize,
583 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
584 offset: &mut usize,
585 drawable_size: Vector2F,
586 command_encoder: &metal::RenderCommandEncoderRef,
587 ) {
588 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
589 command_encoder.set_vertex_buffer(
590 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
591 Some(&self.unit_vertices),
592 0,
593 );
594 command_encoder.set_vertex_bytes(
595 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
596 mem::size_of::<shaders::vector_float2>() as u64,
597 [drawable_size.to_float2()].as_ptr() as *const c_void,
598 );
599
600 let mut atlas_id = None;
601 let mut atlas_sprite_count = 0;
602 align_offset(offset);
603
604 while let Some(sprite) = sprites.peek() {
605 if sprite.layer_id != layer_id {
606 break;
607 }
608
609 let sprite = sprites.next().unwrap();
610 if let Some(atlas_id) = atlas_id.as_mut() {
611 if sprite.atlas_id != *atlas_id {
612 self.render_path_sprites_for_atlas(
613 offset,
614 *atlas_id,
615 atlas_sprite_count,
616 command_encoder,
617 );
618
619 *atlas_id = sprite.atlas_id;
620 atlas_sprite_count = 0;
621 align_offset(offset);
622 }
623 } else {
624 atlas_id = Some(sprite.atlas_id);
625 }
626
627 unsafe {
628 let buffer_contents = (self.instances.contents() as *mut u8)
629 .offset(*offset as isize)
630 as *mut shaders::GPUISprite;
631 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
632 }
633
634 atlas_sprite_count += 1;
635 }
636
637 if let Some(atlas_id) = atlas_id {
638 self.render_path_sprites_for_atlas(
639 offset,
640 atlas_id,
641 atlas_sprite_count,
642 command_encoder,
643 );
644 }
645 }
646
647 fn render_path_sprites_for_atlas(
648 &mut self,
649 offset: &mut usize,
650 atlas_id: usize,
651 sprite_count: usize,
652 command_encoder: &metal::RenderCommandEncoderRef,
653 ) {
654 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
655 assert!(
656 next_offset <= INSTANCE_BUFFER_SIZE,
657 "instance buffer exhausted"
658 );
659 command_encoder.set_vertex_buffer(
660 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
661 Some(&self.instances),
662 *offset as u64,
663 );
664 let texture = self.path_atlases.texture(atlas_id).unwrap();
665 command_encoder.set_fragment_texture(
666 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
667 Some(texture),
668 );
669 command_encoder.set_vertex_bytes(
670 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
671 mem::size_of::<shaders::vector_float2>() as u64,
672 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
673 as *const c_void,
674 );
675
676 command_encoder.draw_primitives_instanced(
677 metal::MTLPrimitiveType::Triangle,
678 0,
679 6,
680 sprite_count as u64,
681 );
682 *offset = next_offset;
683 }
684}
685
686fn build_path_atlas_allocator(
687 pixel_format: MTLPixelFormat,
688 device: &metal::Device,
689) -> AtlasAllocator {
690 let texture_descriptor = metal::TextureDescriptor::new();
691 texture_descriptor.set_width(2048);
692 texture_descriptor.set_height(2048);
693 texture_descriptor.set_pixel_format(pixel_format);
694 texture_descriptor
695 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
696 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
697 let path_atlases = AtlasAllocator::new(device.clone(), texture_descriptor);
698 path_atlases
699}
700
701fn align_offset(offset: &mut usize) {
702 let r = *offset % 256;
703 if r > 0 {
704 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
705 }
706}
707
708fn build_pipeline_state(
709 device: &metal::DeviceRef,
710 library: &metal::LibraryRef,
711 label: &str,
712 vertex_fn_name: &str,
713 fragment_fn_name: &str,
714 pixel_format: metal::MTLPixelFormat,
715) -> metal::RenderPipelineState {
716 let vertex_fn = library
717 .get_function(vertex_fn_name, None)
718 .expect("error locating vertex function");
719 let fragment_fn = library
720 .get_function(fragment_fn_name, None)
721 .expect("error locating fragment function");
722
723 let descriptor = metal::RenderPipelineDescriptor::new();
724 descriptor.set_label(label);
725 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
726 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
727 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
728 color_attachment.set_pixel_format(pixel_format);
729 color_attachment.set_blending_enabled(true);
730 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
731 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
732 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
733 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
734 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
735 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
736
737 device
738 .new_render_pipeline_state(&descriptor)
739 .expect("could not create render pipeline state")
740}
741
742fn build_path_atlas_pipeline_state(
743 device: &metal::DeviceRef,
744 library: &metal::LibraryRef,
745 label: &str,
746 vertex_fn_name: &str,
747 fragment_fn_name: &str,
748 pixel_format: metal::MTLPixelFormat,
749) -> metal::RenderPipelineState {
750 let vertex_fn = library
751 .get_function(vertex_fn_name, None)
752 .expect("error locating vertex function");
753 let fragment_fn = library
754 .get_function(fragment_fn_name, None)
755 .expect("error locating fragment function");
756
757 let descriptor = metal::RenderPipelineDescriptor::new();
758 descriptor.set_label(label);
759 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
760 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
761 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
762 color_attachment.set_pixel_format(pixel_format);
763 color_attachment.set_blending_enabled(true);
764 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
765 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
766 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
767 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
768 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
769 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
770
771 device
772 .new_render_pipeline_state(&descriptor)
773 .expect("could not create render pipeline state")
774}
775
776mod shaders {
777 #![allow(non_upper_case_globals)]
778 #![allow(non_camel_case_types)]
779 #![allow(non_snake_case)]
780
781 use pathfinder_geometry::vector::Vector2I;
782
783 use crate::{color::ColorU, geometry::vector::Vector2F};
784 use std::mem;
785
786 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
787
788 pub trait ToFloat2 {
789 fn to_float2(&self) -> vector_float2;
790 }
791
792 pub trait ToUchar4 {
793 fn to_uchar4(&self) -> vector_uchar4;
794 }
795
796 impl ToFloat2 for (f32, f32) {
797 fn to_float2(&self) -> vector_float2 {
798 unsafe {
799 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
800 output <<= 32;
801 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
802 output
803 }
804 }
805 }
806
807 impl ToFloat2 for Vector2F {
808 fn to_float2(&self) -> vector_float2 {
809 unsafe {
810 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
811 output <<= 32;
812 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
813 output
814 }
815 }
816 }
817
818 impl ToFloat2 for Vector2I {
819 fn to_float2(&self) -> vector_float2 {
820 self.to_f32().to_float2()
821 }
822 }
823
824 impl ToUchar4 for ColorU {
825 fn to_uchar4(&self) -> vector_uchar4 {
826 let mut vec = self.a as vector_uchar4;
827 vec <<= 8;
828 vec |= self.b as vector_uchar4;
829 vec <<= 8;
830 vec |= self.g as vector_uchar4;
831 vec <<= 8;
832 vec |= self.r as vector_uchar4;
833 vec
834 }
835 }
836}