1use super::{atlas::AtlasAllocator, sprite_cache::SpriteCache};
2use crate::{
3 color::ColorU,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F, Vector2I},
7 },
8 platform,
9 scene::Layer,
10 Scene,
11};
12use anyhow::{anyhow, Result};
13use cocoa::foundation::NSUInteger;
14use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
15use shaders::{ToFloat2 as _, ToUchar4 as _};
16use std::{collections::HashMap, ffi::c_void, mem, sync::Arc};
17
18const SHADERS_METALLIB: &'static [u8] =
19 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
20const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
21
22pub struct Renderer {
23 device: metal::Device,
24 sprite_cache: SpriteCache,
25 path_atlases: AtlasAllocator,
26 quad_pipeline_state: metal::RenderPipelineState,
27 shadow_pipeline_state: metal::RenderPipelineState,
28 sprite_pipeline_state: metal::RenderPipelineState,
29 path_stencil_pipeline_state: metal::RenderPipelineState,
30 unit_vertices: metal::Buffer,
31 instances: metal::Buffer,
32}
33
34struct PathSprite {
35 layer_id: usize,
36 atlas_id: usize,
37 sprite: shaders::GPUISprite,
38}
39
40impl Renderer {
41 pub fn new(
42 device: metal::Device,
43 pixel_format: metal::MTLPixelFormat,
44 fonts: Arc<dyn platform::FontSystem>,
45 ) -> Result<Self> {
46 let library = device
47 .new_library_with_data(SHADERS_METALLIB)
48 .map_err(|message| anyhow!("error building metal library: {}", message))?;
49
50 let unit_vertices = [
51 (0., 0.).to_float2(),
52 (1., 0.).to_float2(),
53 (0., 1.).to_float2(),
54 (0., 1.).to_float2(),
55 (1., 0.).to_float2(),
56 (1., 1.).to_float2(),
57 ];
58 let unit_vertices = device.new_buffer_with_data(
59 unit_vertices.as_ptr() as *const c_void,
60 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
61 MTLResourceOptions::StorageModeManaged,
62 );
63 let instances = device.new_buffer(
64 INSTANCE_BUFFER_SIZE as u64,
65 MTLResourceOptions::StorageModeManaged,
66 );
67
68 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), fonts);
69 let path_atlases = build_path_atlas_allocator(pixel_format, &device);
70 let quad_pipeline_state = build_pipeline_state(
71 &device,
72 &library,
73 "quad",
74 "quad_vertex",
75 "quad_fragment",
76 pixel_format,
77 )?;
78 let shadow_pipeline_state = build_pipeline_state(
79 &device,
80 &library,
81 "shadow",
82 "shadow_vertex",
83 "shadow_fragment",
84 pixel_format,
85 )?;
86 let sprite_pipeline_state = build_pipeline_state(
87 &device,
88 &library,
89 "sprite",
90 "sprite_vertex",
91 "sprite_fragment",
92 pixel_format,
93 )?;
94 let path_stencil_pipeline_state = build_stencil_pipeline_state(
95 &device,
96 &library,
97 "path_winding",
98 "path_winding_vertex",
99 "path_winding_fragment",
100 pixel_format,
101 )?;
102 Ok(Self {
103 device,
104 sprite_cache,
105 path_atlases: path_atlases,
106 quad_pipeline_state,
107 shadow_pipeline_state,
108 sprite_pipeline_state,
109 path_stencil_pipeline_state,
110 unit_vertices,
111 instances,
112 })
113 }
114
115 pub fn render(
116 &mut self,
117 scene: &Scene,
118 drawable_size: Vector2F,
119 command_buffer: &metal::CommandBufferRef,
120 output: &metal::TextureRef,
121 ) {
122 let mut offset = 0;
123 let stencils = self.render_path_stencils(scene, &mut offset, command_buffer);
124 self.render_layers(
125 scene,
126 stencils,
127 &mut offset,
128 drawable_size,
129 command_buffer,
130 output,
131 );
132 }
133
134 fn render_path_stencils(
135 &mut self,
136 scene: &Scene,
137 offset: &mut usize,
138 command_buffer: &metal::CommandBufferRef,
139 ) -> Vec<PathSprite> {
140 self.path_atlases.clear();
141 let mut stencils = Vec::new();
142 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
143 let mut current_atlas_id = None;
144 for (layer_id, layer) in scene.layers().iter().enumerate() {
145 for path in layer.paths() {
146 // Push a PathStencil struct for use later when sampling from the atlas as we draw the content of the layers
147 let origin = path.bounds.origin() * scene.scale_factor();
148 let size = (path.bounds.size() * scene.scale_factor()).ceil();
149 let (atlas_id, atlas_origin) =
150 self.path_atlases.allocate(size.ceil().to_i32()).unwrap();
151 let atlas_origin = atlas_origin.to_f32();
152 stencils.push(PathSprite {
153 layer_id,
154 atlas_id,
155 sprite: shaders::GPUISprite {
156 origin: origin.floor().to_float2(),
157 size: size.to_float2(),
158 atlas_origin: atlas_origin.to_float2(),
159 color: path.color.to_uchar4(),
160 compute_winding: 1,
161 },
162 });
163
164 if current_atlas_id.map_or(false, |current_atlas_id| atlas_id != current_atlas_id) {
165 self.render_path_stencils_for_atlas(
166 offset,
167 &vertices,
168 atlas_id,
169 command_buffer,
170 );
171 vertices.clear();
172 }
173
174 current_atlas_id = Some(atlas_id);
175
176 // Populate the vertices by translating them to their appropriate location in the atlas.
177 for vertex in &path.vertices {
178 let xy_position =
179 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
180 vertices.push(shaders::GPUIPathVertex {
181 xy_position: (atlas_origin + xy_position).to_float2(),
182 st_position: vertex.st_position.to_float2(),
183 });
184 }
185 }
186 }
187
188 if let Some(atlas_id) = current_atlas_id {
189 self.render_path_stencils_for_atlas(offset, &vertices, atlas_id, command_buffer);
190 }
191
192 stencils
193 }
194
195 fn render_path_stencils_for_atlas(
196 &mut self,
197 offset: &mut usize,
198 vertices: &[shaders::GPUIPathVertex],
199 atlas_id: usize,
200 command_buffer: &metal::CommandBufferRef,
201 ) {
202 align_offset(offset);
203 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
204 assert!(
205 next_offset <= INSTANCE_BUFFER_SIZE,
206 "instance buffer exhausted"
207 );
208
209 let render_pass_descriptor = metal::RenderPassDescriptor::new();
210 let color_attachment = render_pass_descriptor
211 .color_attachments()
212 .object_at(0)
213 .unwrap();
214 let texture = self.path_atlases.texture(atlas_id).unwrap();
215 color_attachment.set_texture(Some(texture));
216 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
217 color_attachment.set_store_action(metal::MTLStoreAction::Store);
218 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
219
220 let winding_command_encoder =
221 command_buffer.new_render_command_encoder(render_pass_descriptor);
222 winding_command_encoder.set_render_pipeline_state(&self.path_stencil_pipeline_state);
223 winding_command_encoder.set_vertex_buffer(
224 shaders::GPUIPathWindingVertexInputIndex_GPUIPathWindingVertexInputIndexVertices as u64,
225 Some(&self.instances),
226 *offset as u64,
227 );
228 winding_command_encoder.set_vertex_bytes(
229 shaders::GPUIPathWindingVertexInputIndex_GPUIPathWindingVertexInputIndexAtlasSize
230 as u64,
231 mem::size_of::<shaders::vector_float2>() as u64,
232 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
233 as *const c_void,
234 );
235
236 let buffer_contents = unsafe {
237 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
238 };
239
240 for (ix, vertex) in vertices.iter().enumerate() {
241 unsafe {
242 *buffer_contents.add(ix) = *vertex;
243 }
244 }
245
246 self.instances.did_modify_range(NSRange {
247 location: *offset as u64,
248 length: (next_offset - *offset) as u64,
249 });
250 *offset = next_offset;
251
252 winding_command_encoder.draw_primitives(
253 metal::MTLPrimitiveType::Triangle,
254 0,
255 vertices.len() as u64,
256 );
257 winding_command_encoder.end_encoding();
258 }
259
260 fn render_layers(
261 &mut self,
262 scene: &Scene,
263 path_sprites: Vec<PathSprite>,
264 offset: &mut usize,
265 drawable_size: Vector2F,
266 command_buffer: &metal::CommandBufferRef,
267 output: &metal::TextureRef,
268 ) {
269 let render_pass_descriptor = metal::RenderPassDescriptor::new();
270 let color_attachment = render_pass_descriptor
271 .color_attachments()
272 .object_at(0)
273 .unwrap();
274 color_attachment.set_texture(Some(output));
275 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
276 color_attachment.set_store_action(metal::MTLStoreAction::Store);
277 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
278 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
279
280 command_encoder.set_viewport(metal::MTLViewport {
281 originX: 0.0,
282 originY: 0.0,
283 width: drawable_size.x() as f64,
284 height: drawable_size.y() as f64,
285 znear: 0.0,
286 zfar: 1.0,
287 });
288
289 for (layer_id, layer) in scene.layers().iter().enumerate() {
290 self.clip(scene, layer, drawable_size, command_encoder);
291 self.render_shadows(scene, layer, offset, drawable_size, command_encoder);
292 self.render_quads(scene, layer, offset, drawable_size, command_encoder);
293 // TODO: Pass sprites relevant to this layer in a more efficient manner.
294 self.render_path_sprites(
295 scene,
296 layer,
297 path_sprites.iter().filter(|s| s.layer_id == layer_id),
298 offset,
299 drawable_size,
300 command_encoder,
301 );
302 self.render_glyph_sprites(scene, layer, offset, drawable_size, command_encoder);
303 }
304
305 command_encoder.end_encoding();
306 }
307
308 fn clip(
309 &mut self,
310 scene: &Scene,
311 layer: &Layer,
312 drawable_size: Vector2F,
313 command_encoder: &metal::RenderCommandEncoderRef,
314 ) {
315 let clip_bounds = layer.clip_bounds().unwrap_or(RectF::new(
316 vec2f(0., 0.),
317 drawable_size / scene.scale_factor(),
318 )) * scene.scale_factor();
319 command_encoder.set_scissor_rect(metal::MTLScissorRect {
320 x: clip_bounds.origin_x() as NSUInteger,
321 y: clip_bounds.origin_y() as NSUInteger,
322 width: clip_bounds.width() as NSUInteger,
323 height: clip_bounds.height() as NSUInteger,
324 });
325 }
326
327 fn render_shadows(
328 &mut self,
329 scene: &Scene,
330 layer: &Layer,
331 offset: &mut usize,
332 drawable_size: Vector2F,
333 command_encoder: &metal::RenderCommandEncoderRef,
334 ) {
335 if layer.shadows().is_empty() {
336 return;
337 }
338
339 align_offset(offset);
340 let next_offset = *offset + layer.shadows().len() * mem::size_of::<shaders::GPUIShadow>();
341 assert!(
342 next_offset <= INSTANCE_BUFFER_SIZE,
343 "instance buffer exhausted"
344 );
345
346 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
347 command_encoder.set_vertex_buffer(
348 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
349 Some(&self.unit_vertices),
350 0,
351 );
352 command_encoder.set_vertex_buffer(
353 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
354 Some(&self.instances),
355 *offset as u64,
356 );
357 command_encoder.set_vertex_bytes(
358 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
359 mem::size_of::<shaders::GPUIUniforms>() as u64,
360 [shaders::GPUIUniforms {
361 viewport_size: drawable_size.to_float2(),
362 }]
363 .as_ptr() as *const c_void,
364 );
365
366 let buffer_contents = unsafe {
367 (self.instances.contents() as *mut u8).offset(*offset as isize)
368 as *mut shaders::GPUIShadow
369 };
370 for (ix, shadow) in layer.shadows().iter().enumerate() {
371 let shape_bounds = shadow.bounds * scene.scale_factor();
372 let shader_shadow = shaders::GPUIShadow {
373 origin: shape_bounds.origin().to_float2(),
374 size: shape_bounds.size().to_float2(),
375 corner_radius: shadow.corner_radius * scene.scale_factor(),
376 sigma: shadow.sigma,
377 color: shadow.color.to_uchar4(),
378 };
379 unsafe {
380 *(buffer_contents.offset(ix as isize)) = shader_shadow;
381 }
382 }
383
384 self.instances.did_modify_range(NSRange {
385 location: *offset as u64,
386 length: (next_offset - *offset) as u64,
387 });
388 *offset = next_offset;
389
390 command_encoder.draw_primitives_instanced(
391 metal::MTLPrimitiveType::Triangle,
392 0,
393 6,
394 layer.shadows().len() as u64,
395 );
396 }
397
398 fn render_quads(
399 &mut self,
400 scene: &Scene,
401 layer: &Layer,
402 offset: &mut usize,
403 drawable_size: Vector2F,
404 command_encoder: &metal::RenderCommandEncoderRef,
405 ) {
406 if layer.quads().is_empty() {
407 return;
408 }
409 align_offset(offset);
410 let next_offset = *offset + layer.quads().len() * mem::size_of::<shaders::GPUIQuad>();
411 assert!(
412 next_offset <= INSTANCE_BUFFER_SIZE,
413 "instance buffer exhausted"
414 );
415
416 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
417 command_encoder.set_vertex_buffer(
418 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
419 Some(&self.unit_vertices),
420 0,
421 );
422 command_encoder.set_vertex_buffer(
423 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
424 Some(&self.instances),
425 *offset as u64,
426 );
427 command_encoder.set_vertex_bytes(
428 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
429 mem::size_of::<shaders::GPUIUniforms>() as u64,
430 [shaders::GPUIUniforms {
431 viewport_size: drawable_size.to_float2(),
432 }]
433 .as_ptr() as *const c_void,
434 );
435
436 let buffer_contents = unsafe {
437 (self.instances.contents() as *mut u8).offset(*offset as isize)
438 as *mut shaders::GPUIQuad
439 };
440 for (ix, quad) in layer.quads().iter().enumerate() {
441 let bounds = quad.bounds * scene.scale_factor();
442 let border_width = quad.border.width * scene.scale_factor();
443 let shader_quad = shaders::GPUIQuad {
444 origin: bounds.origin().to_float2(),
445 size: bounds.size().to_float2(),
446 background_color: quad
447 .background
448 .unwrap_or(ColorU::transparent_black())
449 .to_uchar4(),
450 border_top: border_width * (quad.border.top as usize as f32),
451 border_right: border_width * (quad.border.right as usize as f32),
452 border_bottom: border_width * (quad.border.bottom as usize as f32),
453 border_left: border_width * (quad.border.left as usize as f32),
454 border_color: quad
455 .border
456 .color
457 .unwrap_or(ColorU::transparent_black())
458 .to_uchar4(),
459 corner_radius: quad.corner_radius * scene.scale_factor(),
460 };
461 unsafe {
462 *(buffer_contents.offset(ix as isize)) = shader_quad;
463 }
464 }
465
466 self.instances.did_modify_range(NSRange {
467 location: *offset as u64,
468 length: (next_offset - *offset) as u64,
469 });
470 *offset = next_offset;
471
472 command_encoder.draw_primitives_instanced(
473 metal::MTLPrimitiveType::Triangle,
474 0,
475 6,
476 layer.quads().len() as u64,
477 );
478 }
479
480 fn render_glyph_sprites(
481 &mut self,
482 scene: &Scene,
483 layer: &Layer,
484 offset: &mut usize,
485 drawable_size: Vector2F,
486 command_encoder: &metal::RenderCommandEncoderRef,
487 ) {
488 if layer.glyphs().is_empty() {
489 return;
490 }
491
492 let mut sprites_by_atlas = HashMap::new();
493 for glyph in layer.glyphs() {
494 if let Some(sprite) = self.sprite_cache.render_glyph(
495 glyph.font_id,
496 glyph.font_size,
497 glyph.id,
498 glyph.origin,
499 scene.scale_factor(),
500 ) {
501 // Snap sprite to pixel grid.
502 let origin = (glyph.origin * scene.scale_factor()).floor() + sprite.offset.to_f32();
503 sprites_by_atlas
504 .entry(sprite.atlas_id)
505 .or_insert_with(Vec::new)
506 .push(shaders::GPUISprite {
507 origin: origin.to_float2(),
508 size: sprite.size.to_float2(),
509 atlas_origin: sprite.atlas_origin.to_float2(),
510 color: glyph.color.to_uchar4(),
511 compute_winding: 0,
512 });
513 }
514 }
515
516 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
517 command_encoder.set_vertex_buffer(
518 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
519 Some(&self.unit_vertices),
520 0,
521 );
522 command_encoder.set_vertex_bytes(
523 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
524 mem::size_of::<shaders::vector_float2>() as u64,
525 [drawable_size.to_float2()].as_ptr() as *const c_void,
526 );
527 command_encoder.set_vertex_bytes(
528 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
529 mem::size_of::<shaders::vector_float2>() as u64,
530 [self.sprite_cache.atlas_size().to_float2()].as_ptr() as *const c_void,
531 );
532
533 for (atlas_id, sprites) in sprites_by_atlas {
534 align_offset(offset);
535 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
536 assert!(
537 next_offset <= INSTANCE_BUFFER_SIZE,
538 "instance buffer exhausted"
539 );
540
541 command_encoder.set_vertex_buffer(
542 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
543 Some(&self.instances),
544 *offset as u64,
545 );
546
547 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
548 command_encoder.set_fragment_texture(
549 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
550 Some(texture),
551 );
552
553 unsafe {
554 let buffer_contents = (self.instances.contents() as *mut u8)
555 .offset(*offset as isize)
556 as *mut shaders::GPUISprite;
557 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
558 }
559 self.instances.did_modify_range(NSRange {
560 location: *offset as u64,
561 length: (next_offset - *offset) as u64,
562 });
563 *offset = next_offset;
564
565 command_encoder.draw_primitives_instanced(
566 metal::MTLPrimitiveType::Triangle,
567 0,
568 6,
569 sprites.len() as u64,
570 );
571 }
572 }
573
574 fn render_path_sprites<'a>(
575 &mut self,
576 scene: &Scene,
577 layer: &Layer,
578 sprites: impl Iterator<Item = &'a PathSprite>,
579 offset: &mut usize,
580 drawable_size: Vector2F,
581 command_encoder: &metal::RenderCommandEncoderRef,
582 ) {
583 let mut sprites = sprites.peekable();
584 if sprites.peek().is_none() {
585 return;
586 }
587
588 let mut sprites_by_atlas = HashMap::new();
589 for sprite in sprites {
590 sprites_by_atlas
591 .entry(sprite.atlas_id)
592 .or_insert_with(Vec::new)
593 .push(sprite.sprite);
594 }
595
596 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
597 command_encoder.set_vertex_buffer(
598 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
599 Some(&self.unit_vertices),
600 0,
601 );
602 command_encoder.set_vertex_bytes(
603 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
604 mem::size_of::<shaders::vector_float2>() as u64,
605 [drawable_size.to_float2()].as_ptr() as *const c_void,
606 );
607
608 for (atlas_id, sprites) in sprites_by_atlas {
609 align_offset(offset);
610 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
611 assert!(
612 next_offset <= INSTANCE_BUFFER_SIZE,
613 "instance buffer exhausted"
614 );
615
616 command_encoder.set_vertex_buffer(
617 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
618 Some(&self.instances),
619 *offset as u64,
620 );
621
622 let texture = self.path_atlases.texture(atlas_id).unwrap();
623 command_encoder.set_vertex_bytes(
624 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
625 mem::size_of::<shaders::vector_float2>() as u64,
626 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
627 as *const c_void,
628 );
629 command_encoder.set_fragment_texture(
630 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
631 Some(texture),
632 );
633
634 unsafe {
635 let buffer_contents = (self.instances.contents() as *mut u8)
636 .offset(*offset as isize)
637 as *mut shaders::GPUISprite;
638 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
639 }
640 self.instances.did_modify_range(NSRange {
641 location: *offset as u64,
642 length: (next_offset - *offset) as u64,
643 });
644 *offset = next_offset;
645
646 command_encoder.draw_primitives_instanced(
647 metal::MTLPrimitiveType::Triangle,
648 0,
649 6,
650 sprites.len() as u64,
651 );
652 }
653 }
654}
655
656fn build_path_atlas_allocator(
657 pixel_format: MTLPixelFormat,
658 device: &metal::Device,
659) -> AtlasAllocator {
660 let path_stencil_descriptor = metal::TextureDescriptor::new();
661 path_stencil_descriptor.set_width(2048);
662 path_stencil_descriptor.set_height(2048);
663 path_stencil_descriptor.set_pixel_format(pixel_format);
664 path_stencil_descriptor
665 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
666 path_stencil_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
667 let path_atlases = AtlasAllocator::new(device.clone(), path_stencil_descriptor);
668 path_atlases
669}
670
671fn align_offset(offset: &mut usize) {
672 let r = *offset % 256;
673 if r > 0 {
674 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
675 }
676}
677
678fn build_pipeline_state(
679 device: &metal::DeviceRef,
680 library: &metal::LibraryRef,
681 label: &str,
682 vertex_fn_name: &str,
683 fragment_fn_name: &str,
684 pixel_format: metal::MTLPixelFormat,
685) -> Result<metal::RenderPipelineState> {
686 let vertex_fn = library
687 .get_function(vertex_fn_name, None)
688 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
689 let fragment_fn = library
690 .get_function(fragment_fn_name, None)
691 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
692
693 let descriptor = metal::RenderPipelineDescriptor::new();
694 descriptor.set_label(label);
695 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
696 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
697 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
698 color_attachment.set_pixel_format(pixel_format);
699 color_attachment.set_blending_enabled(true);
700 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
701 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
702 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
703 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::SourceAlpha);
704 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
705 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
706
707 device
708 .new_render_pipeline_state(&descriptor)
709 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
710}
711
712fn build_stencil_pipeline_state(
713 device: &metal::DeviceRef,
714 library: &metal::LibraryRef,
715 label: &str,
716 vertex_fn_name: &str,
717 fragment_fn_name: &str,
718 pixel_format: metal::MTLPixelFormat,
719) -> Result<metal::RenderPipelineState> {
720 let vertex_fn = library
721 .get_function(vertex_fn_name, None)
722 .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
723 let fragment_fn = library
724 .get_function(fragment_fn_name, None)
725 .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
726
727 let descriptor = metal::RenderPipelineDescriptor::new();
728 descriptor.set_label(label);
729 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
730 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
731 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
732 color_attachment.set_pixel_format(pixel_format);
733 color_attachment.set_blending_enabled(true);
734 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
735 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
736 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
737 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
738 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
739 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
740
741 device
742 .new_render_pipeline_state(&descriptor)
743 .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
744}
745
746// fn build_stencil_pipeline_state(
747// device: &metal::DeviceRef,
748// library: &metal::LibraryRef,
749// label: &str,
750// vertex_fn_name: &str,
751// fragment_fn_name: &str,
752// pixel_format: metal::MTLPixelFormat,
753// ) -> Result<metal::RenderPipelineState> {
754// let vertex_fn = library
755// .get_function(vertex_fn_name, None)
756// .map_err(|message| anyhow!("error locating vertex function: {}", message))?;
757// let fragment_fn = library
758// .get_function(fragment_fn_name, None)
759// .map_err(|message| anyhow!("error locating fragment function: {}", message))?;
760
761// let descriptor = metal::RenderPipelineDescriptor::new();
762// descriptor.set_label(label);
763// descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
764// descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
765// descriptor.set_stencil_attachment_pixel_format(pixel_format);
766
767// device
768// .new_render_pipeline_state(&descriptor)
769// .map_err(|message| anyhow!("could not create render pipeline state: {}", message))
770// }
771
772mod shaders {
773 #![allow(non_upper_case_globals)]
774 #![allow(non_camel_case_types)]
775 #![allow(non_snake_case)]
776
777 use pathfinder_geometry::vector::Vector2I;
778
779 use crate::{color::ColorU, geometry::vector::Vector2F};
780 use std::mem;
781
782 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
783
784 pub trait ToFloat2 {
785 fn to_float2(&self) -> vector_float2;
786 }
787
788 pub trait ToUchar4 {
789 fn to_uchar4(&self) -> vector_uchar4;
790 }
791
792 impl ToFloat2 for (f32, f32) {
793 fn to_float2(&self) -> vector_float2 {
794 unsafe {
795 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
796 output <<= 32;
797 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
798 output
799 }
800 }
801 }
802
803 impl ToFloat2 for Vector2F {
804 fn to_float2(&self) -> vector_float2 {
805 unsafe {
806 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
807 output <<= 32;
808 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
809 output
810 }
811 }
812 }
813
814 impl ToFloat2 for Vector2I {
815 fn to_float2(&self) -> vector_float2 {
816 self.to_f32().to_float2()
817 }
818 }
819
820 impl ToUchar4 for ColorU {
821 fn to_uchar4(&self) -> vector_uchar4 {
822 let mut vec = self.a as vector_uchar4;
823 vec <<= 8;
824 vec |= self.b as vector_uchar4;
825 vec <<= 8;
826 vec |= self.g as vector_uchar4;
827 vec <<= 8;
828 vec |= self.r as vector_uchar4;
829 vec
830 }
831 }
832}