1use super::{atlas::AtlasAllocator, sprite_cache::SpriteCache};
2use crate::{
3 color::ColorU,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::Layer,
10 Scene,
11};
12use anyhow::{anyhow, Result};
13use cocoa::foundation::NSUInteger;
14use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
15use shaders::{ToFloat2 as _, ToUchar4 as _};
16use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
17
18const SHADERS_METALLIB: &'static [u8] =
19 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
20const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
21
22pub struct Renderer {
23 sprite_cache: SpriteCache,
24 path_atlases: AtlasAllocator,
25 quad_pipeline_state: metal::RenderPipelineState,
26 shadow_pipeline_state: metal::RenderPipelineState,
27 sprite_pipeline_state: metal::RenderPipelineState,
28 path_atlas_pipeline_state: metal::RenderPipelineState,
29 unit_vertices: metal::Buffer,
30 instances: metal::Buffer,
31}
32
33struct PathSprite {
34 layer_id: usize,
35 atlas_id: usize,
36 shader_data: shaders::GPUISprite,
37}
38
39impl Renderer {
40 pub fn new(
41 device: metal::Device,
42 pixel_format: metal::MTLPixelFormat,
43 fonts: Arc<dyn platform::FontSystem>,
44 ) -> Result<Self> {
45 let library = device
46 .new_library_with_data(SHADERS_METALLIB)
47 .map_err(|message| anyhow!("error building metal library: {}", message))?;
48
49 let unit_vertices = [
50 (0., 0.).to_float2(),
51 (1., 0.).to_float2(),
52 (0., 1.).to_float2(),
53 (0., 1.).to_float2(),
54 (1., 0.).to_float2(),
55 (1., 1.).to_float2(),
56 ];
57 let unit_vertices = device.new_buffer_with_data(
58 unit_vertices.as_ptr() as *const c_void,
59 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
60 MTLResourceOptions::StorageModeManaged,
61 );
62 let instances = device.new_buffer(
63 INSTANCE_BUFFER_SIZE as u64,
64 MTLResourceOptions::StorageModeManaged,
65 );
66
67 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
68 let path_atlases = build_path_atlas_allocator(pixel_format, &device);
69 let quad_pipeline_state = build_pipeline_state(
70 &device,
71 &library,
72 "quad",
73 "quad_vertex",
74 "quad_fragment",
75 pixel_format,
76 )?;
77 let shadow_pipeline_state = build_pipeline_state(
78 &device,
79 &library,
80 "shadow",
81 "shadow_vertex",
82 "shadow_fragment",
83 pixel_format,
84 )?;
85 let sprite_pipeline_state = build_pipeline_state(
86 &device,
87 &library,
88 "sprite",
89 "sprite_vertex",
90 "sprite_fragment",
91 pixel_format,
92 )?;
93 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
94 &device,
95 &library,
96 "path_atlas",
97 "path_atlas_vertex",
98 "path_atlas_fragment",
99 pixel_format,
100 )?;
101 Ok(Self {
102 sprite_cache,
103 path_atlases,
104 quad_pipeline_state,
105 shadow_pipeline_state,
106 sprite_pipeline_state,
107 path_atlas_pipeline_state,
108 unit_vertices,
109 instances,
110 })
111 }
112
113 pub fn render(
114 &mut self,
115 scene: &Scene,
116 drawable_size: Vector2F,
117 command_buffer: &metal::CommandBufferRef,
118 output: &metal::TextureRef,
119 ) {
120 let mut offset = 0;
121 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
122 self.render_layers(
123 scene,
124 path_sprites,
125 &mut offset,
126 drawable_size,
127 command_buffer,
128 output,
129 );
130 self.instances.did_modify_range(NSRange {
131 location: 0,
132 length: offset as NSUInteger,
133 });
134 }
135
136 fn render_path_atlases(
137 &mut self,
138 scene: &Scene,
139 offset: &mut usize,
140 command_buffer: &metal::CommandBufferRef,
141 ) -> Vec<PathSprite> {
142 self.path_atlases.clear();
143 let mut sprites = Vec::new();
144 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
145 let mut current_atlas_id = None;
146 for (layer_id, layer) in scene.layers().iter().enumerate() {
147 for path in layer.paths() {
148 let origin = path.bounds.origin() * scene.scale_factor();
149 let size = (path.bounds.size() * scene.scale_factor()).ceil();
150 let (atlas_id, atlas_origin) = self.path_atlases.allocate(size.to_i32()).unwrap();
151 let atlas_origin = atlas_origin.to_f32();
152 sprites.push(PathSprite {
153 layer_id,
154 atlas_id,
155 shader_data: shaders::GPUISprite {
156 origin: origin.floor().to_float2(),
157 size: size.to_float2(),
158 atlas_origin: atlas_origin.to_float2(),
159 color: path.color.to_uchar4(),
160 compute_winding: 1,
161 },
162 });
163
164 if let Some(current_atlas_id) = current_atlas_id {
165 if atlas_id != current_atlas_id {
166 self.render_paths_to_atlas(
167 offset,
168 &vertices,
169 current_atlas_id,
170 command_buffer,
171 );
172 vertices.clear();
173 }
174 }
175
176 current_atlas_id = Some(atlas_id);
177
178 for vertex in &path.vertices {
179 let xy_position =
180 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
181 vertices.push(shaders::GPUIPathVertex {
182 xy_position: (atlas_origin + xy_position).to_float2(),
183 st_position: vertex.st_position.to_float2(),
184 });
185 }
186 }
187 }
188
189 if let Some(atlas_id) = current_atlas_id {
190 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
191 }
192
193 sprites
194 }
195
196 fn render_paths_to_atlas(
197 &mut self,
198 offset: &mut usize,
199 vertices: &[shaders::GPUIPathVertex],
200 atlas_id: usize,
201 command_buffer: &metal::CommandBufferRef,
202 ) {
203 align_offset(offset);
204 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
205 assert!(
206 next_offset <= INSTANCE_BUFFER_SIZE,
207 "instance buffer exhausted"
208 );
209
210 let render_pass_descriptor = metal::RenderPassDescriptor::new();
211 let color_attachment = render_pass_descriptor
212 .color_attachments()
213 .object_at(0)
214 .unwrap();
215 let texture = self.path_atlases.texture(atlas_id).unwrap();
216 color_attachment.set_texture(Some(texture));
217 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
218 color_attachment.set_store_action(metal::MTLStoreAction::Store);
219 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
220
221 let path_atlas_command_encoder =
222 command_buffer.new_render_command_encoder(render_pass_descriptor);
223 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
224 path_atlas_command_encoder.set_vertex_buffer(
225 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
226 Some(&self.instances),
227 *offset as u64,
228 );
229 path_atlas_command_encoder.set_vertex_bytes(
230 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
231 mem::size_of::<shaders::vector_float2>() as u64,
232 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
233 as *const c_void,
234 );
235
236 let buffer_contents = unsafe {
237 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
238 };
239
240 for (ix, vertex) in vertices.iter().enumerate() {
241 unsafe {
242 *buffer_contents.add(ix) = *vertex;
243 }
244 }
245
246 path_atlas_command_encoder.draw_primitives(
247 metal::MTLPrimitiveType::Triangle,
248 0,
249 vertices.len() as u64,
250 );
251 path_atlas_command_encoder.end_encoding();
252 *offset = next_offset;
253 }
254
255 fn render_layers(
256 &mut self,
257 scene: &Scene,
258 path_sprites: Vec<PathSprite>,
259 offset: &mut usize,
260 drawable_size: Vector2F,
261 command_buffer: &metal::CommandBufferRef,
262 output: &metal::TextureRef,
263 ) {
264 let render_pass_descriptor = metal::RenderPassDescriptor::new();
265 let color_attachment = render_pass_descriptor
266 .color_attachments()
267 .object_at(0)
268 .unwrap();
269 color_attachment.set_texture(Some(output));
270 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
271 color_attachment.set_store_action(metal::MTLStoreAction::Store);
272 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
273 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
274
275 command_encoder.set_viewport(metal::MTLViewport {
276 originX: 0.0,
277 originY: 0.0,
278 width: drawable_size.x() as f64,
279 height: drawable_size.y() as f64,
280 znear: 0.0,
281 zfar: 1.0,
282 });
283
284 let mut path_sprites = path_sprites.into_iter().peekable();
285
286 for (layer_id, layer) in scene.layers().iter().enumerate() {
287 self.clip(scene, layer, drawable_size, command_encoder);
288 self.render_shadows(scene, layer, offset, drawable_size, command_encoder);
289 self.render_quads(scene, layer, offset, drawable_size, command_encoder);
290 self.render_path_sprites(
291 layer_id,
292 &mut path_sprites,
293 offset,
294 drawable_size,
295 command_encoder,
296 );
297 self.render_glyph_sprites(scene, layer, offset, drawable_size, command_encoder);
298 }
299
300 command_encoder.end_encoding();
301 }
302
303 fn clip(
304 &mut self,
305 scene: &Scene,
306 layer: &Layer,
307 drawable_size: Vector2F,
308 command_encoder: &metal::RenderCommandEncoderRef,
309 ) {
310 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
311 vec2f(0., 0.),
312 drawable_size / scene.scale_factor(),
313 )) * scene.scale_factor())
314 .round();
315 command_encoder.set_scissor_rect(metal::MTLScissorRect {
316 x: clip_bounds.origin_x() as NSUInteger,
317 y: clip_bounds.origin_y() as NSUInteger,
318 width: clip_bounds.width() as NSUInteger,
319 height: clip_bounds.height() as NSUInteger,
320 });
321 }
322
323 fn render_shadows(
324 &mut self,
325 scene: &Scene,
326 layer: &Layer,
327 offset: &mut usize,
328 drawable_size: Vector2F,
329 command_encoder: &metal::RenderCommandEncoderRef,
330 ) {
331 if layer.shadows().is_empty() {
332 return;
333 }
334
335 align_offset(offset);
336 let next_offset = *offset + layer.shadows().len() * mem::size_of::<shaders::GPUIShadow>();
337 assert!(
338 next_offset <= INSTANCE_BUFFER_SIZE,
339 "instance buffer exhausted"
340 );
341
342 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
343 command_encoder.set_vertex_buffer(
344 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
345 Some(&self.unit_vertices),
346 0,
347 );
348 command_encoder.set_vertex_buffer(
349 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
350 Some(&self.instances),
351 *offset as u64,
352 );
353 command_encoder.set_vertex_bytes(
354 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
355 mem::size_of::<shaders::GPUIUniforms>() as u64,
356 [shaders::GPUIUniforms {
357 viewport_size: drawable_size.to_float2(),
358 }]
359 .as_ptr() as *const c_void,
360 );
361
362 let buffer_contents = unsafe {
363 (self.instances.contents() as *mut u8).offset(*offset as isize)
364 as *mut shaders::GPUIShadow
365 };
366 for (ix, shadow) in layer.shadows().iter().enumerate() {
367 let shape_bounds = shadow.bounds * scene.scale_factor();
368 let shader_shadow = shaders::GPUIShadow {
369 origin: shape_bounds.origin().to_float2(),
370 size: shape_bounds.size().to_float2(),
371 corner_radius: shadow.corner_radius * scene.scale_factor(),
372 sigma: shadow.sigma,
373 color: shadow.color.to_uchar4(),
374 };
375 unsafe {
376 *(buffer_contents.offset(ix as isize)) = shader_shadow;
377 }
378 }
379
380 command_encoder.draw_primitives_instanced(
381 metal::MTLPrimitiveType::Triangle,
382 0,
383 6,
384 layer.shadows().len() as u64,
385 );
386 *offset = next_offset;
387 }
388
389 fn render_quads(
390 &mut self,
391 scene: &Scene,
392 layer: &Layer,
393 offset: &mut usize,
394 drawable_size: Vector2F,
395 command_encoder: &metal::RenderCommandEncoderRef,
396 ) {
397 if layer.quads().is_empty() {
398 return;
399 }
400 align_offset(offset);
401 let next_offset = *offset + layer.quads().len() * mem::size_of::<shaders::GPUIQuad>();
402 assert!(
403 next_offset <= INSTANCE_BUFFER_SIZE,
404 "instance buffer exhausted"
405 );
406
407 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
408 command_encoder.set_vertex_buffer(
409 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
410 Some(&self.unit_vertices),
411 0,
412 );
413 command_encoder.set_vertex_buffer(
414 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
415 Some(&self.instances),
416 *offset as u64,
417 );
418 command_encoder.set_vertex_bytes(
419 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
420 mem::size_of::<shaders::GPUIUniforms>() as u64,
421 [shaders::GPUIUniforms {
422 viewport_size: drawable_size.to_float2(),
423 }]
424 .as_ptr() as *const c_void,
425 );
426
427 let buffer_contents = unsafe {
428 (self.instances.contents() as *mut u8).offset(*offset as isize)
429 as *mut shaders::GPUIQuad
430 };
431 for (ix, quad) in layer.quads().iter().enumerate() {
432 let bounds = quad.bounds * scene.scale_factor();
433 let border_width = quad.border.width * scene.scale_factor();
434 let shader_quad = shaders::GPUIQuad {
435 origin: bounds.origin().round().to_float2(),
436 size: bounds.size().round().to_float2(),
437 background_color: quad
438 .background
439 .unwrap_or(ColorU::transparent_black())
440 .to_uchar4(),
441 border_top: border_width * (quad.border.top as usize as f32),
442 border_right: border_width * (quad.border.right as usize as f32),
443 border_bottom: border_width * (quad.border.bottom as usize as f32),
444 border_left: border_width * (quad.border.left as usize as f32),
445 border_color: quad
446 .border
447 .color
448 .unwrap_or(ColorU::transparent_black())
449 .to_uchar4(),
450 corner_radius: quad.corner_radius * scene.scale_factor(),
451 };
452 unsafe {
453 *(buffer_contents.offset(ix as isize)) = shader_quad;
454 }
455 }
456
457 command_encoder.draw_primitives_instanced(
458 metal::MTLPrimitiveType::Triangle,
459 0,
460 6,
461 layer.quads().len() as u64,
462 );
463 *offset = next_offset;
464 }
465
466 fn render_glyph_sprites(
467 &mut self,
468 scene: &Scene,
469 layer: &Layer,
470 offset: &mut usize,
471 drawable_size: Vector2F,
472 command_encoder: &metal::RenderCommandEncoderRef,
473 ) {
474 if layer.glyphs().is_empty() {
475 return;
476 }
477
478 let mut sprites_by_atlas = HashMap::new();
479 for glyph in layer.glyphs() {
480 if let Some(sprite) = self.sprite_cache.render_glyph(
481 glyph.font_id,
482 glyph.font_size,
483 glyph.id,
484 glyph.origin,
485 scene.scale_factor(),
486 ) {
487 // Snap sprite to pixel grid.
488 let origin = (glyph.origin * scene.scale_factor()).floor() + sprite.offset.to_f32();
489 sprites_by_atlas
490 .entry(sprite.atlas_id)
491 .or_insert_with(Vec::new)
492 .push(shaders::GPUISprite {
493 origin: origin.to_float2(),
494 size: sprite.size.to_float2(),
495 atlas_origin: sprite.atlas_origin.to_float2(),
496 color: glyph.color.to_uchar4(),
497 compute_winding: 0,
498 });
499 }
500 }
501
502 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
503 command_encoder.set_vertex_buffer(
504 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
505 Some(&self.unit_vertices),
506 0,
507 );
508 command_encoder.set_vertex_bytes(
509 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
510 mem::size_of::<shaders::vector_float2>() as u64,
511 [drawable_size.to_float2()].as_ptr() as *const c_void,
512 );
513 command_encoder.set_vertex_bytes(
514 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
515 mem::size_of::<shaders::vector_float2>() as u64,
516 [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
517 );
518
519 for (atlas_id, sprites) in sprites_by_atlas {
520 align_offset(offset);
521 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
522 assert!(
523 next_offset <= INSTANCE_BUFFER_SIZE,
524 "instance buffer exhausted"
525 );
526
527 command_encoder.set_vertex_buffer(
528 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
529 Some(&self.instances),
530 *offset as u64,
531 );
532
533 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
534 command_encoder.set_fragment_texture(
535 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
536 Some(texture),
537 );
538
539 unsafe {
540 let buffer_contents = (self.instances.contents() as *mut u8)
541 .offset(*offset as isize)
542 as *mut shaders::GPUISprite;
543 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
544 }
545
546 command_encoder.draw_primitives_instanced(
547 metal::MTLPrimitiveType::Triangle,
548 0,
549 6,
550 sprites.len() as u64,
551 );
552 *offset = next_offset;
553 }
554 }
555
556 fn render_path_sprites(
557 &mut self,
558 layer_id: usize,
559 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
560 offset: &mut usize,
561 drawable_size: Vector2F,
562 command_encoder: &metal::RenderCommandEncoderRef,
563 ) {
564 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
565 command_encoder.set_vertex_buffer(
566 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
567 Some(&self.unit_vertices),
568 0,
569 );
570 command_encoder.set_vertex_bytes(
571 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
572 mem::size_of::<shaders::vector_float2>() as u64,
573 [drawable_size.to_float2()].as_ptr() as *const c_void,
574 );
575
576 let mut atlas_id = None;
577 let mut atlas_sprite_count = 0;
578 align_offset(offset);
579
580 while let Some(sprite) = sprites.peek() {
581 if sprite.layer_id != layer_id {
582 break;
583 }
584
585 let sprite = sprites.next().unwrap();
586 if let Some(atlas_id) = atlas_id.as_mut() {
587 if sprite.atlas_id != *atlas_id {
588 self.render_path_sprites_for_atlas(
589 offset,
590 *atlas_id,
591 atlas_sprite_count,
592 command_encoder,
593 );
594
595 *atlas_id = sprite.atlas_id;
596 atlas_sprite_count = 0;
597 align_offset(offset);
598 }
599 } else {
600 atlas_id = Some(sprite.atlas_id);
601 }
602
603 unsafe {
604 let buffer_contents = (self.instances.contents() as *mut u8)
605 .offset(*offset as isize)
606 as *mut shaders::GPUISprite;
607 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
608 }
609
610 atlas_sprite_count += 1;
611 }
612
613 if let Some(atlas_id) = atlas_id {
614 self.render_path_sprites_for_atlas(
615 offset,
616 atlas_id,
617 atlas_sprite_count,
618 command_encoder,
619 );
620 }
621 }
622
623 fn render_path_sprites_for_atlas(
624 &mut self,
625 offset: &mut usize,
626 atlas_id: usize,
627 sprite_count: usize,
628 command_encoder: &metal::RenderCommandEncoderRef,
629 ) {
630 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
631 assert!(
632 next_offset <= INSTANCE_BUFFER_SIZE,
633 "instance buffer exhausted"
634 );
635 command_encoder.set_vertex_buffer(
636 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
637 Some(&self.instances),
638 *offset as u64,
639 );
640 let texture = self.path_atlases.texture(atlas_id).unwrap();
641 command_encoder.set_fragment_texture(
642 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
643 Some(texture),
644 );
645 command_encoder.set_vertex_bytes(
646 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
647 mem::size_of::<shaders::vector_float2>() as u64,
648 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
649 as *const c_void,
650 );
651
652 command_encoder.draw_primitives_instanced(
653 metal::MTLPrimitiveType::Triangle,
654 0,
655 6,
656 sprite_count as u64,
657 );
658 *offset = next_offset;
659 }
660}
661
662fn build_path_atlas_allocator(
663 pixel_format: MTLPixelFormat,
664 device: &metal::Device,
665) -> AtlasAllocator {
666 let texture_descriptor = metal::TextureDescriptor::new();
667 texture_descriptor.set_width(2048);
668 texture_descriptor.set_height(2048);
669 texture_descriptor.set_pixel_format(pixel_format);
670 texture_descriptor
671 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
672 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
673 let path_atlases = AtlasAllocator::new(device.clone(), texture_descriptor);
674 path_atlases
675}
676
677fn align_offset(offset: &mut usize) {
678 let r = *offset % 256;
679 if r > 0 {
680 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
681 }
682}
683
684fn build_pipeline_state(
685 device: &metal::DeviceRef,
686 library: &metal::LibraryRef,
687 label: &str,
688 vertex_fn_name: &str,
689 fragment_fn_name: &str,
690 pixel_format: metal::MTLPixelFormat,
691) -> Result<metal::RenderPipelineState> {
692 let vertex_fn = library
693 .get_function(vertex_fn_name, None)
694 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
695 let fragment_fn = library
696 .get_function(fragment_fn_name, None)
697 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
698
699 let descriptor = metal::RenderPipelineDescriptor::new();
700 descriptor.set_label(label);
701 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
702 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
703 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
704 color_attachment.set_pixel_format(pixel_format);
705 color_attachment.set_blending_enabled(true);
706 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
707 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
708 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
709 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
710 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
711 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
712
713 device
714 .new_render_pipeline_state(&descriptor)
715 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
716}
717
718fn build_path_atlas_pipeline_state(
719 device: &metal::DeviceRef,
720 library: &metal::LibraryRef,
721 label: &str,
722 vertex_fn_name: &str,
723 fragment_fn_name: &str,
724 pixel_format: metal::MTLPixelFormat,
725) -> Result<metal::RenderPipelineState> {
726 let vertex_fn = library
727 .get_function(vertex_fn_name, None)
728 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
729 let fragment_fn = library
730 .get_function(fragment_fn_name, None)
731 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
732
733 let descriptor = metal::RenderPipelineDescriptor::new();
734 descriptor.set_label(label);
735 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
736 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
737 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
738 color_attachment.set_pixel_format(pixel_format);
739 color_attachment.set_blending_enabled(true);
740 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
741 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
742 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
743 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
744 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
745 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
746
747 device
748 .new_render_pipeline_state(&descriptor)
749 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
750}
751
752mod shaders {
753 #![allow(non_upper_case_globals)]
754 #![allow(non_camel_case_types)]
755 #![allow(non_snake_case)]
756
757 use pathfinder_geometry::vector::Vector2I;
758
759 use crate::{color::ColorU, geometry::vector::Vector2F};
760 use std::mem;
761
762 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
763
764 pub trait ToFloat2 {
765 fn to_float2(&self) -> vector_float2;
766 }
767
768 pub trait ToUchar4 {
769 fn to_uchar4(&self) -> vector_uchar4;
770 }
771
772 impl ToFloat2 for (f32, f32) {
773 fn to_float2(&self) -> vector_float2 {
774 unsafe {
775 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
776 output <<= 32;
777 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
778 output
779 }
780 }
781 }
782
783 impl ToFloat2 for Vector2F {
784 fn to_float2(&self) -> vector_float2 {
785 unsafe {
786 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
787 output <<= 32;
788 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
789 output
790 }
791 }
792 }
793
794 impl ToFloat2 for Vector2I {
795 fn to_float2(&self) -> vector_float2 {
796 self.to_f32().to_float2()
797 }
798 }
799
800 impl ToUchar4 for ColorU {
801 fn to_uchar4(&self) -> vector_uchar4 {
802 let mut vec = self.a as vector_uchar4;
803 vec <<= 8;
804 vec |= self.b as vector_uchar4;
805 vec <<= 8;
806 vec |= self.g as vector_uchar4;
807 vec <<= 8;
808 vec |= self.r as vector_uchar4;
809 vec
810 }
811 }
812}