1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::{
12 base::{NO, YES},
13 foundation::NSUInteger,
14 quartzcore::AutoresizingMask,
15};
16use core_foundation::base::TCFType;
17use foreign_types::ForeignTypeRef;
18use log::warn;
19use media::core_video::{self, CVMetalTextureCache};
20use metal::{CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
21use objc::{self, msg_send, sel, sel_impl};
22use shaders::ToFloat2 as _;
23use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, ptr, sync::Arc, vec};
24
25const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
26const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
27
28pub struct Renderer {
29 layer: metal::MetalLayer,
30 command_queue: CommandQueue,
31 sprite_cache: SpriteCache,
32 image_cache: ImageCache,
33 path_atlases: AtlasAllocator,
34 quad_pipeline_state: metal::RenderPipelineState,
35 shadow_pipeline_state: metal::RenderPipelineState,
36 sprite_pipeline_state: metal::RenderPipelineState,
37 image_pipeline_state: metal::RenderPipelineState,
38 surface_pipeline_state: metal::RenderPipelineState,
39 path_atlas_pipeline_state: metal::RenderPipelineState,
40 underline_pipeline_state: metal::RenderPipelineState,
41 unit_vertices: metal::Buffer,
42 instances: metal::Buffer,
43 cv_texture_cache: core_video::CVMetalTextureCache,
44}
45
46struct PathSprite {
47 layer_id: usize,
48 atlas_id: usize,
49 shader_data: shaders::GPUISprite,
50}
51
52pub struct Surface {
53 pub bounds: RectF,
54 pub image_buffer: core_video::CVImageBuffer,
55}
56
57impl Renderer {
58 pub fn new(is_opaque: bool, fonts: Arc<dyn platform::FontSystem>) -> Self {
59 const PIXEL_FORMAT: MTLPixelFormat = MTLPixelFormat::BGRA8Unorm;
60
61 let device: metal::Device = if let Some(device) = metal::Device::system_default() {
62 device
63 } else {
64 log::error!("unable to access a compatible graphics device");
65 std::process::exit(1);
66 };
67
68 let layer = metal::MetalLayer::new();
69 layer.set_device(&device);
70 layer.set_pixel_format(PIXEL_FORMAT);
71 layer.set_presents_with_transaction(true);
72 layer.set_opaque(is_opaque);
73 unsafe {
74 let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
75 let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
76 let _: () = msg_send![
77 &*layer,
78 setAutoresizingMask: AutoresizingMask::WIDTH_SIZABLE
79 | AutoresizingMask::HEIGHT_SIZABLE
80 ];
81 }
82
83 let library = device
84 .new_library_with_data(SHADERS_METALLIB)
85 .expect("error building metal library");
86
87 let unit_vertices = [
88 (0., 0.).to_float2(),
89 (1., 0.).to_float2(),
90 (0., 1.).to_float2(),
91 (0., 1.).to_float2(),
92 (1., 0.).to_float2(),
93 (1., 1.).to_float2(),
94 ];
95 let unit_vertices = device.new_buffer_with_data(
96 unit_vertices.as_ptr() as *const c_void,
97 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
98 MTLResourceOptions::StorageModeManaged,
99 );
100 let instances = device.new_buffer(
101 INSTANCE_BUFFER_SIZE as u64,
102 MTLResourceOptions::StorageModeManaged,
103 );
104
105 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), 1., fonts.clone());
106 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), 1., fonts);
107 let path_atlases =
108 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
109 let quad_pipeline_state = build_pipeline_state(
110 &device,
111 &library,
112 "quad",
113 "quad_vertex",
114 "quad_fragment",
115 PIXEL_FORMAT,
116 );
117 let shadow_pipeline_state = build_pipeline_state(
118 &device,
119 &library,
120 "shadow",
121 "shadow_vertex",
122 "shadow_fragment",
123 PIXEL_FORMAT,
124 );
125 let sprite_pipeline_state = build_pipeline_state(
126 &device,
127 &library,
128 "sprite",
129 "sprite_vertex",
130 "sprite_fragment",
131 PIXEL_FORMAT,
132 );
133 let image_pipeline_state = build_pipeline_state(
134 &device,
135 &library,
136 "image",
137 "image_vertex",
138 "image_fragment",
139 PIXEL_FORMAT,
140 );
141 let surface_pipeline_state = build_pipeline_state(
142 &device,
143 &library,
144 "surface",
145 "surface_vertex",
146 "surface_fragment",
147 PIXEL_FORMAT,
148 );
149 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
150 &device,
151 &library,
152 "path_atlas",
153 "path_atlas_vertex",
154 "path_atlas_fragment",
155 MTLPixelFormat::R16Float,
156 );
157 let underline_pipeline_state = build_pipeline_state(
158 &device,
159 &library,
160 "underline",
161 "underline_vertex",
162 "underline_fragment",
163 PIXEL_FORMAT,
164 );
165 let cv_texture_cache = unsafe { CVMetalTextureCache::new(device.as_ptr()).unwrap() };
166 Self {
167 layer,
168 command_queue: device.new_command_queue(),
169 sprite_cache,
170 image_cache,
171 path_atlases,
172 quad_pipeline_state,
173 shadow_pipeline_state,
174 sprite_pipeline_state,
175 image_pipeline_state,
176 surface_pipeline_state,
177 path_atlas_pipeline_state,
178 underline_pipeline_state,
179 unit_vertices,
180 instances,
181 cv_texture_cache,
182 }
183 }
184
185 pub fn layer(&self) -> &metal::MetalLayerRef {
186 &*self.layer
187 }
188
189 pub fn render(&mut self, scene: &Scene) {
190 let layer = self.layer.clone();
191 let drawable_size = layer.drawable_size();
192 let drawable = if let Some(drawable) = layer.next_drawable() {
193 drawable
194 } else {
195 log::error!(
196 "failed to retrieve next drawable, drawable size: {:?}",
197 drawable_size
198 );
199 return;
200 };
201 let command_queue = self.command_queue.clone();
202 let command_buffer = command_queue.new_command_buffer();
203
204 self.sprite_cache.set_scale_factor(scene.scale_factor());
205 self.image_cache.set_scale_factor(scene.scale_factor());
206
207 let mut offset = 0;
208
209 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
210 self.render_layers(
211 scene,
212 path_sprites,
213 &mut offset,
214 vec2f(drawable_size.width as f32, drawable_size.height as f32),
215 command_buffer,
216 drawable.texture(),
217 );
218 self.instances.did_modify_range(NSRange {
219 location: 0,
220 length: offset as NSUInteger,
221 });
222 self.image_cache.finish_frame();
223
224 command_buffer.commit();
225 command_buffer.wait_until_completed();
226 drawable.present();
227 }
228
229 fn render_path_atlases(
230 &mut self,
231 scene: &Scene,
232 offset: &mut usize,
233 command_buffer: &metal::CommandBufferRef,
234 ) -> Vec<PathSprite> {
235 self.path_atlases.clear();
236 let mut sprites = Vec::new();
237 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
238 let mut current_atlas_id = None;
239 for (layer_id, layer) in scene.layers().enumerate() {
240 for path in layer.paths() {
241 let origin = path.bounds.origin() * scene.scale_factor();
242 let size = (path.bounds.size() * scene.scale_factor()).ceil();
243
244 let path_allocation = self.path_atlases.allocate(size.to_i32());
245 if path_allocation.is_none() {
246 // Path size was likely zero.
247 warn!("could not allocate path texture of size {:?}", size);
248 continue;
249 }
250 let (alloc_id, atlas_origin) = path_allocation.unwrap();
251 let atlas_origin = atlas_origin.to_f32();
252 sprites.push(PathSprite {
253 layer_id,
254 atlas_id: alloc_id.atlas_id,
255 shader_data: shaders::GPUISprite {
256 origin: origin.floor().to_float2(),
257 target_size: size.to_float2(),
258 source_size: size.to_float2(),
259 atlas_origin: atlas_origin.to_float2(),
260 color: path.color.to_uchar4(),
261 compute_winding: 1,
262 },
263 });
264
265 if let Some(current_atlas_id) = current_atlas_id {
266 if alloc_id.atlas_id != current_atlas_id {
267 self.render_paths_to_atlas(
268 offset,
269 &vertices,
270 current_atlas_id,
271 command_buffer,
272 );
273 vertices.clear();
274 }
275 }
276
277 current_atlas_id = Some(alloc_id.atlas_id);
278
279 for vertex in &path.vertices {
280 let xy_position =
281 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
282 vertices.push(shaders::GPUIPathVertex {
283 xy_position: (atlas_origin + xy_position).to_float2(),
284 st_position: vertex.st_position.to_float2(),
285 clip_rect_origin: atlas_origin.to_float2(),
286 clip_rect_size: size.to_float2(),
287 });
288 }
289 }
290 }
291
292 if let Some(atlas_id) = current_atlas_id {
293 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
294 }
295
296 sprites
297 }
298
299 fn render_paths_to_atlas(
300 &mut self,
301 offset: &mut usize,
302 vertices: &[shaders::GPUIPathVertex],
303 atlas_id: usize,
304 command_buffer: &metal::CommandBufferRef,
305 ) {
306 align_offset(offset);
307 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
308 assert!(
309 next_offset <= INSTANCE_BUFFER_SIZE,
310 "instance buffer exhausted"
311 );
312
313 let render_pass_descriptor = metal::RenderPassDescriptor::new();
314 let color_attachment = render_pass_descriptor
315 .color_attachments()
316 .object_at(0)
317 .unwrap();
318 let texture = self.path_atlases.texture(atlas_id).unwrap();
319 color_attachment.set_texture(Some(texture));
320 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
321 color_attachment.set_store_action(metal::MTLStoreAction::Store);
322 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
323
324 let path_atlas_command_encoder =
325 command_buffer.new_render_command_encoder(render_pass_descriptor);
326 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
327 path_atlas_command_encoder.set_vertex_buffer(
328 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
329 Some(&self.instances),
330 *offset as u64,
331 );
332 path_atlas_command_encoder.set_vertex_bytes(
333 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
334 mem::size_of::<shaders::vector_float2>() as u64,
335 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
336 as *const c_void,
337 );
338
339 let buffer_contents = unsafe {
340 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
341 };
342
343 for (ix, vertex) in vertices.iter().enumerate() {
344 unsafe {
345 *buffer_contents.add(ix) = *vertex;
346 }
347 }
348
349 path_atlas_command_encoder.draw_primitives(
350 metal::MTLPrimitiveType::Triangle,
351 0,
352 vertices.len() as u64,
353 );
354 path_atlas_command_encoder.end_encoding();
355 *offset = next_offset;
356 }
357
358 fn render_layers(
359 &mut self,
360 scene: &Scene,
361 path_sprites: Vec<PathSprite>,
362 offset: &mut usize,
363 drawable_size: Vector2F,
364 command_buffer: &metal::CommandBufferRef,
365 output: &metal::TextureRef,
366 ) {
367 let render_pass_descriptor = metal::RenderPassDescriptor::new();
368 let color_attachment = render_pass_descriptor
369 .color_attachments()
370 .object_at(0)
371 .unwrap();
372 color_attachment.set_texture(Some(output));
373 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
374 color_attachment.set_store_action(metal::MTLStoreAction::Store);
375 let alpha = if self.layer.is_opaque() { 1. } else { 0. };
376 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., alpha));
377 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
378
379 command_encoder.set_viewport(metal::MTLViewport {
380 originX: 0.0,
381 originY: 0.0,
382 width: drawable_size.x() as f64,
383 height: drawable_size.y() as f64,
384 znear: 0.0,
385 zfar: 1.0,
386 });
387
388 let scale_factor = scene.scale_factor();
389 let mut path_sprites = path_sprites.into_iter().peekable();
390 for (layer_id, layer) in scene.layers().enumerate() {
391 self.clip(scene, layer, drawable_size, command_encoder);
392 self.render_shadows(
393 layer.shadows(),
394 scale_factor,
395 offset,
396 drawable_size,
397 command_encoder,
398 );
399 self.render_quads(
400 layer.quads(),
401 scale_factor,
402 offset,
403 drawable_size,
404 command_encoder,
405 );
406 self.render_path_sprites(
407 layer_id,
408 &mut path_sprites,
409 offset,
410 drawable_size,
411 command_encoder,
412 );
413 self.render_underlines(
414 layer.underlines(),
415 scale_factor,
416 offset,
417 drawable_size,
418 command_encoder,
419 );
420 self.render_sprites(
421 layer.glyphs(),
422 layer.icons(),
423 scale_factor,
424 offset,
425 drawable_size,
426 command_encoder,
427 );
428 self.render_images(
429 layer.images(),
430 layer.image_glyphs(),
431 scale_factor,
432 offset,
433 drawable_size,
434 command_encoder,
435 );
436 self.render_surfaces(
437 layer.surfaces(),
438 scale_factor,
439 offset,
440 drawable_size,
441 command_encoder,
442 );
443 }
444
445 command_encoder.end_encoding();
446 }
447
448 fn clip(
449 &mut self,
450 scene: &Scene,
451 layer: &Layer,
452 drawable_size: Vector2F,
453 command_encoder: &metal::RenderCommandEncoderRef,
454 ) {
455 let clip_bounds = (layer
456 .clip_bounds()
457 .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
458 * scene.scale_factor())
459 .round();
460 command_encoder.set_scissor_rect(metal::MTLScissorRect {
461 x: clip_bounds.origin_x() as NSUInteger,
462 y: clip_bounds.origin_y() as NSUInteger,
463 width: clip_bounds.width() as NSUInteger,
464 height: clip_bounds.height() as NSUInteger,
465 });
466 }
467
468 fn render_shadows(
469 &mut self,
470 shadows: &[Shadow],
471 scale_factor: f32,
472 offset: &mut usize,
473 drawable_size: Vector2F,
474 command_encoder: &metal::RenderCommandEncoderRef,
475 ) {
476 if shadows.is_empty() {
477 return;
478 }
479
480 align_offset(offset);
481 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
482 assert!(
483 next_offset <= INSTANCE_BUFFER_SIZE,
484 "instance buffer exhausted"
485 );
486
487 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
488 command_encoder.set_vertex_buffer(
489 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
490 Some(&self.unit_vertices),
491 0,
492 );
493 command_encoder.set_vertex_buffer(
494 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
495 Some(&self.instances),
496 *offset as u64,
497 );
498 command_encoder.set_vertex_bytes(
499 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
500 mem::size_of::<shaders::GPUIUniforms>() as u64,
501 [shaders::GPUIUniforms {
502 viewport_size: drawable_size.to_float2(),
503 }]
504 .as_ptr() as *const c_void,
505 );
506
507 let buffer_contents = unsafe {
508 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
509 };
510 for (ix, shadow) in shadows.iter().enumerate() {
511 let shape_bounds = shadow.bounds * scale_factor;
512 let corner_radii = shadow.corner_radii * scale_factor;
513 let shader_shadow = shaders::GPUIShadow {
514 origin: shape_bounds.origin().to_float2(),
515 size: shape_bounds.size().to_float2(),
516 corner_radius_top_left: corner_radii.top_left,
517 corner_radius_top_right: corner_radii.top_right,
518 corner_radius_bottom_right: corner_radii.bottom_right,
519 corner_radius_bottom_left: corner_radii.bottom_left,
520 sigma: shadow.sigma,
521 color: shadow.color.to_uchar4(),
522 };
523 unsafe {
524 *(buffer_contents.add(ix)) = shader_shadow;
525 }
526 }
527
528 command_encoder.draw_primitives_instanced(
529 metal::MTLPrimitiveType::Triangle,
530 0,
531 6,
532 shadows.len() as u64,
533 );
534 *offset = next_offset;
535 }
536
537 fn render_quads(
538 &mut self,
539 quads: &[Quad],
540 scale_factor: f32,
541 offset: &mut usize,
542 drawable_size: Vector2F,
543 command_encoder: &metal::RenderCommandEncoderRef,
544 ) {
545 if quads.is_empty() {
546 return;
547 }
548 align_offset(offset);
549 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
550 assert!(
551 next_offset <= INSTANCE_BUFFER_SIZE,
552 "instance buffer exhausted"
553 );
554
555 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
556 command_encoder.set_vertex_buffer(
557 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
558 Some(&self.unit_vertices),
559 0,
560 );
561 command_encoder.set_vertex_buffer(
562 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
563 Some(&self.instances),
564 *offset as u64,
565 );
566 command_encoder.set_vertex_bytes(
567 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
568 mem::size_of::<shaders::GPUIUniforms>() as u64,
569 [shaders::GPUIUniforms {
570 viewport_size: drawable_size.to_float2(),
571 }]
572 .as_ptr() as *const c_void,
573 );
574
575 let buffer_contents = unsafe {
576 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
577 };
578 for (ix, quad) in quads.iter().enumerate() {
579 let bounds = quad.bounds * scale_factor;
580 let shader_quad = shaders::GPUIQuad {
581 origin: bounds.origin().round().to_float2(),
582 size: bounds.size().round().to_float2(),
583 background_color: quad
584 .background
585 .unwrap_or_else(Color::transparent_black)
586 .to_uchar4(),
587 border_top: quad.border.top * scale_factor,
588 border_right: quad.border.right * scale_factor,
589 border_bottom: quad.border.bottom * scale_factor,
590 border_left: quad.border.left * scale_factor,
591 border_color: quad.border.color.to_uchar4(),
592 corner_radius_top_left: quad.corner_radii.top_left * scale_factor,
593 corner_radius_top_right: quad.corner_radii.top_right * scale_factor,
594 corner_radius_bottom_right: quad.corner_radii.bottom_right * scale_factor,
595 corner_radius_bottom_left: quad.corner_radii.bottom_left * scale_factor,
596 };
597 unsafe {
598 *(buffer_contents.add(ix)) = shader_quad;
599 }
600 }
601
602 command_encoder.draw_primitives_instanced(
603 metal::MTLPrimitiveType::Triangle,
604 0,
605 6,
606 quads.len() as u64,
607 );
608 *offset = next_offset;
609 }
610
611 fn render_sprites(
612 &mut self,
613 glyphs: &[Glyph],
614 icons: &[Icon],
615 scale_factor: f32,
616 offset: &mut usize,
617 drawable_size: Vector2F,
618 command_encoder: &metal::RenderCommandEncoderRef,
619 ) {
620 if glyphs.is_empty() && icons.is_empty() {
621 return;
622 }
623
624 let mut sprites_by_atlas = HashMap::new();
625
626 for glyph in glyphs {
627 if let Some(sprite) = self.sprite_cache.render_glyph(
628 glyph.font_id,
629 glyph.font_size,
630 glyph.id,
631 glyph.origin,
632 ) {
633 // Snap sprite to pixel grid.
634 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
635
636 sprites_by_atlas
637 .entry(sprite.atlas_id)
638 .or_insert_with(Vec::new)
639 .push(shaders::GPUISprite {
640 origin: origin.to_float2(),
641 target_size: sprite.size.to_float2(),
642 source_size: sprite.size.to_float2(),
643 atlas_origin: sprite.atlas_origin.to_float2(),
644 color: glyph.color.to_uchar4(),
645 compute_winding: 0,
646 });
647 }
648 }
649
650 for icon in icons {
651 // Snap sprite to pixel grid.
652 let origin = (icon.bounds.origin() * scale_factor).floor();
653 let target_size = (icon.bounds.size() * scale_factor).ceil();
654 let source_size = (target_size * 2.).to_i32();
655
656 let sprite =
657 self.sprite_cache
658 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
659 if sprite.is_none() {
660 continue;
661 }
662 let sprite = sprite.unwrap();
663
664 sprites_by_atlas
665 .entry(sprite.atlas_id)
666 .or_insert_with(Vec::new)
667 .push(shaders::GPUISprite {
668 origin: origin.to_float2(),
669 target_size: target_size.to_float2(),
670 source_size: sprite.size.to_float2(),
671 atlas_origin: sprite.atlas_origin.to_float2(),
672 color: icon.color.to_uchar4(),
673 compute_winding: 0,
674 });
675 }
676
677 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
678 command_encoder.set_vertex_buffer(
679 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
680 Some(&self.unit_vertices),
681 0,
682 );
683 command_encoder.set_vertex_bytes(
684 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
685 mem::size_of::<shaders::vector_float2>() as u64,
686 [drawable_size.to_float2()].as_ptr() as *const c_void,
687 );
688
689 for (atlas_id, sprites) in sprites_by_atlas {
690 align_offset(offset);
691 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
692 assert!(
693 next_offset <= INSTANCE_BUFFER_SIZE,
694 "instance buffer exhausted"
695 );
696
697 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
698 command_encoder.set_vertex_buffer(
699 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
700 Some(&self.instances),
701 *offset as u64,
702 );
703 command_encoder.set_vertex_bytes(
704 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
705 mem::size_of::<shaders::vector_float2>() as u64,
706 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
707 as *const c_void,
708 );
709
710 command_encoder.set_fragment_texture(
711 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
712 Some(texture),
713 );
714
715 unsafe {
716 let buffer_contents =
717 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
718 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
719 }
720
721 command_encoder.draw_primitives_instanced(
722 metal::MTLPrimitiveType::Triangle,
723 0,
724 6,
725 sprites.len() as u64,
726 );
727 *offset = next_offset;
728 }
729 }
730
731 fn render_images(
732 &mut self,
733 images: &[Image],
734 image_glyphs: &[ImageGlyph],
735 scale_factor: f32,
736 offset: &mut usize,
737 drawable_size: Vector2F,
738 command_encoder: &metal::RenderCommandEncoderRef,
739 ) {
740 if images.is_empty() && image_glyphs.is_empty() {
741 return;
742 }
743
744 let mut images_by_atlas = HashMap::new();
745 for image in images {
746 let origin = image.bounds.origin() * scale_factor;
747 let target_size = image.bounds.size() * scale_factor;
748 let corner_radii = image.corner_radii * scale_factor;
749 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
750 images_by_atlas
751 .entry(alloc_id.atlas_id)
752 .or_insert_with(Vec::new)
753 .push(shaders::GPUIImage {
754 origin: origin.to_float2(),
755 target_size: target_size.to_float2(),
756 source_size: atlas_bounds.size().to_float2(),
757 atlas_origin: atlas_bounds.origin().to_float2(),
758 border_top: image.border.top * scale_factor,
759 border_right: image.border.right * scale_factor,
760 border_bottom: image.border.bottom * scale_factor,
761 border_left: image.border.left * scale_factor,
762 border_color: image.border.color.to_uchar4(),
763 corner_radius_top_left: corner_radii.top_left,
764 corner_radius_top_right: corner_radii.top_right,
765 corner_radius_bottom_right: corner_radii.bottom_right,
766 corner_radius_bottom_left: corner_radii.bottom_left,
767 grayscale: image.grayscale as u8,
768 });
769 }
770
771 for image_glyph in image_glyphs {
772 let origin = (image_glyph.origin * scale_factor).floor();
773 if let Some((alloc_id, atlas_bounds, glyph_origin)) =
774 self.image_cache.render_glyph(image_glyph)
775 {
776 images_by_atlas
777 .entry(alloc_id.atlas_id)
778 .or_insert_with(Vec::new)
779 .push(shaders::GPUIImage {
780 origin: (origin + glyph_origin.to_f32()).to_float2(),
781 target_size: atlas_bounds.size().to_float2(),
782 source_size: atlas_bounds.size().to_float2(),
783 atlas_origin: atlas_bounds.origin().to_float2(),
784 border_top: 0.,
785 border_right: 0.,
786 border_bottom: 0.,
787 border_left: 0.,
788 border_color: Default::default(),
789 corner_radius_top_left: 0.,
790 corner_radius_top_right: 0.,
791 corner_radius_bottom_right: 0.,
792 corner_radius_bottom_left: 0.,
793 grayscale: false as u8,
794 });
795 } else {
796 log::warn!("could not render glyph with id {}", image_glyph.id);
797 }
798 }
799
800 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
801 command_encoder.set_vertex_buffer(
802 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
803 Some(&self.unit_vertices),
804 0,
805 );
806 command_encoder.set_vertex_bytes(
807 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
808 mem::size_of::<shaders::vector_float2>() as u64,
809 [drawable_size.to_float2()].as_ptr() as *const c_void,
810 );
811
812 for (atlas_id, images) in images_by_atlas {
813 align_offset(offset);
814 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
815 assert!(
816 next_offset <= INSTANCE_BUFFER_SIZE,
817 "instance buffer exhausted"
818 );
819
820 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
821 command_encoder.set_vertex_buffer(
822 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
823 Some(&self.instances),
824 *offset as u64,
825 );
826 command_encoder.set_vertex_bytes(
827 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
828 mem::size_of::<shaders::vector_float2>() as u64,
829 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
830 as *const c_void,
831 );
832 command_encoder.set_fragment_texture(
833 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
834 Some(texture),
835 );
836
837 unsafe {
838 let buffer_contents =
839 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
840 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
841 }
842
843 command_encoder.draw_primitives_instanced(
844 metal::MTLPrimitiveType::Triangle,
845 0,
846 6,
847 images.len() as u64,
848 );
849 *offset = next_offset;
850 }
851 }
852
853 fn render_surfaces(
854 &mut self,
855 surfaces: &[Surface],
856 scale_factor: f32,
857 offset: &mut usize,
858 drawable_size: Vector2F,
859 command_encoder: &metal::RenderCommandEncoderRef,
860 ) {
861 if surfaces.is_empty() {
862 return;
863 }
864
865 command_encoder.set_render_pipeline_state(&self.surface_pipeline_state);
866 command_encoder.set_vertex_buffer(
867 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexVertices as u64,
868 Some(&self.unit_vertices),
869 0,
870 );
871 command_encoder.set_vertex_bytes(
872 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexViewportSize as u64,
873 mem::size_of::<shaders::vector_float2>() as u64,
874 [drawable_size.to_float2()].as_ptr() as *const c_void,
875 );
876
877 for surface in surfaces {
878 let origin = surface.bounds.origin() * scale_factor;
879 let source_size = vec2i(
880 surface.image_buffer.width() as i32,
881 surface.image_buffer.height() as i32,
882 );
883 let target_size = surface.bounds.size() * scale_factor;
884
885 assert_eq!(
886 surface.image_buffer.pixel_format_type(),
887 core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
888 );
889
890 let y_texture = unsafe {
891 self.cv_texture_cache
892 .create_texture_from_image(
893 surface.image_buffer.as_concrete_TypeRef(),
894 ptr::null(),
895 MTLPixelFormat::R8Unorm,
896 surface.image_buffer.plane_width(0),
897 surface.image_buffer.plane_height(0),
898 0,
899 )
900 .unwrap()
901 };
902 let cb_cr_texture = unsafe {
903 self.cv_texture_cache
904 .create_texture_from_image(
905 surface.image_buffer.as_concrete_TypeRef(),
906 ptr::null(),
907 MTLPixelFormat::RG8Unorm,
908 surface.image_buffer.plane_width(1),
909 surface.image_buffer.plane_height(1),
910 1,
911 )
912 .unwrap()
913 };
914
915 align_offset(offset);
916 let next_offset = *offset + mem::size_of::<shaders::GPUISurface>();
917 assert!(
918 next_offset <= INSTANCE_BUFFER_SIZE,
919 "instance buffer exhausted"
920 );
921
922 command_encoder.set_vertex_buffer(
923 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexSurfaces as u64,
924 Some(&self.instances),
925 *offset as u64,
926 );
927 command_encoder.set_vertex_bytes(
928 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexAtlasSize as u64,
929 mem::size_of::<shaders::vector_float2>() as u64,
930 [source_size.to_float2()].as_ptr() as *const c_void,
931 );
932 command_encoder.set_fragment_texture(
933 shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexYAtlas as u64,
934 Some(y_texture.as_texture_ref()),
935 );
936 command_encoder.set_fragment_texture(
937 shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexCbCrAtlas
938 as u64,
939 Some(cb_cr_texture.as_texture_ref()),
940 );
941
942 unsafe {
943 let buffer_contents = (self.instances.contents() as *mut u8).add(*offset)
944 as *mut shaders::GPUISurface;
945 std::ptr::write(
946 buffer_contents,
947 shaders::GPUISurface {
948 origin: origin.to_float2(),
949 target_size: target_size.to_float2(),
950 source_size: source_size.to_float2(),
951 },
952 );
953 }
954
955 command_encoder.draw_primitives(metal::MTLPrimitiveType::Triangle, 0, 6);
956 *offset = next_offset;
957 }
958 }
959
960 fn render_path_sprites(
961 &mut self,
962 layer_id: usize,
963 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
964 offset: &mut usize,
965 drawable_size: Vector2F,
966 command_encoder: &metal::RenderCommandEncoderRef,
967 ) {
968 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
969 command_encoder.set_vertex_buffer(
970 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
971 Some(&self.unit_vertices),
972 0,
973 );
974 command_encoder.set_vertex_bytes(
975 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
976 mem::size_of::<shaders::vector_float2>() as u64,
977 [drawable_size.to_float2()].as_ptr() as *const c_void,
978 );
979
980 let mut atlas_id = None;
981 let mut atlas_sprite_count = 0;
982 align_offset(offset);
983
984 while let Some(sprite) = sprites.peek() {
985 if sprite.layer_id != layer_id {
986 break;
987 }
988
989 let sprite = sprites.next().unwrap();
990 if let Some(atlas_id) = atlas_id.as_mut() {
991 if sprite.atlas_id != *atlas_id {
992 self.render_path_sprites_for_atlas(
993 offset,
994 *atlas_id,
995 atlas_sprite_count,
996 command_encoder,
997 );
998
999 *atlas_id = sprite.atlas_id;
1000 atlas_sprite_count = 0;
1001 align_offset(offset);
1002 }
1003 } else {
1004 atlas_id = Some(sprite.atlas_id);
1005 }
1006
1007 unsafe {
1008 let buffer_contents =
1009 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
1010 *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
1011 }
1012
1013 atlas_sprite_count += 1;
1014 }
1015
1016 if let Some(atlas_id) = atlas_id {
1017 self.render_path_sprites_for_atlas(
1018 offset,
1019 atlas_id,
1020 atlas_sprite_count,
1021 command_encoder,
1022 );
1023 }
1024 }
1025
1026 fn render_path_sprites_for_atlas(
1027 &mut self,
1028 offset: &mut usize,
1029 atlas_id: usize,
1030 sprite_count: usize,
1031 command_encoder: &metal::RenderCommandEncoderRef,
1032 ) {
1033 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
1034 assert!(
1035 next_offset <= INSTANCE_BUFFER_SIZE,
1036 "instance buffer exhausted"
1037 );
1038 command_encoder.set_vertex_buffer(
1039 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
1040 Some(&self.instances),
1041 *offset as u64,
1042 );
1043 let texture = self.path_atlases.texture(atlas_id).unwrap();
1044 command_encoder.set_fragment_texture(
1045 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
1046 Some(texture),
1047 );
1048 command_encoder.set_vertex_bytes(
1049 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
1050 mem::size_of::<shaders::vector_float2>() as u64,
1051 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
1052 as *const c_void,
1053 );
1054
1055 command_encoder.draw_primitives_instanced(
1056 metal::MTLPrimitiveType::Triangle,
1057 0,
1058 6,
1059 sprite_count as u64,
1060 );
1061 *offset = next_offset;
1062 }
1063
1064 fn render_underlines(
1065 &mut self,
1066 underlines: &[Underline],
1067 scale_factor: f32,
1068 offset: &mut usize,
1069 drawable_size: Vector2F,
1070 command_encoder: &metal::RenderCommandEncoderRef,
1071 ) {
1072 if underlines.is_empty() {
1073 return;
1074 }
1075 align_offset(offset);
1076 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
1077 assert!(
1078 next_offset <= INSTANCE_BUFFER_SIZE,
1079 "instance buffer exhausted"
1080 );
1081
1082 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
1083 command_encoder.set_vertex_buffer(
1084 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
1085 Some(&self.unit_vertices),
1086 0,
1087 );
1088 command_encoder.set_vertex_buffer(
1089 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
1090 Some(&self.instances),
1091 *offset as u64,
1092 );
1093 command_encoder.set_vertex_bytes(
1094 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
1095 mem::size_of::<shaders::GPUIUniforms>() as u64,
1096 [shaders::GPUIUniforms {
1097 viewport_size: drawable_size.to_float2(),
1098 }]
1099 .as_ptr() as *const c_void,
1100 );
1101
1102 let buffer_contents = unsafe {
1103 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
1104 };
1105 for (ix, underline) in underlines.iter().enumerate() {
1106 let origin = underline.origin * scale_factor;
1107 let mut height = underline.thickness;
1108 if underline.squiggly {
1109 height *= 3.;
1110 }
1111 let size = vec2f(underline.width, height) * scale_factor;
1112 let shader_underline = shaders::GPUIUnderline {
1113 origin: origin.round().to_float2(),
1114 size: size.round().to_float2(),
1115 thickness: underline.thickness * scale_factor,
1116 color: underline.color.to_uchar4(),
1117 squiggly: underline.squiggly as u8,
1118 };
1119 unsafe {
1120 *(buffer_contents.add(ix)) = shader_underline;
1121 }
1122 }
1123
1124 command_encoder.draw_primitives_instanced(
1125 metal::MTLPrimitiveType::Triangle,
1126 0,
1127 6,
1128 underlines.len() as u64,
1129 );
1130 *offset = next_offset;
1131 }
1132}
1133
1134fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
1135 let texture_descriptor = metal::TextureDescriptor::new();
1136 texture_descriptor.set_width(2048);
1137 texture_descriptor.set_height(2048);
1138 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
1139 texture_descriptor
1140 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
1141 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
1142 texture_descriptor
1143}
1144
1145fn align_offset(offset: &mut usize) {
1146 let r = *offset % 256;
1147 if r > 0 {
1148 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
1149 }
1150}
1151
1152fn build_pipeline_state(
1153 device: &metal::DeviceRef,
1154 library: &metal::LibraryRef,
1155 label: &str,
1156 vertex_fn_name: &str,
1157 fragment_fn_name: &str,
1158 pixel_format: metal::MTLPixelFormat,
1159) -> metal::RenderPipelineState {
1160 let vertex_fn = library
1161 .get_function(vertex_fn_name, None)
1162 .expect("error locating vertex function");
1163 let fragment_fn = library
1164 .get_function(fragment_fn_name, None)
1165 .expect("error locating fragment function");
1166
1167 let descriptor = metal::RenderPipelineDescriptor::new();
1168 descriptor.set_label(label);
1169 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1170 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1171 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1172 color_attachment.set_pixel_format(pixel_format);
1173 color_attachment.set_blending_enabled(true);
1174 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1175 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1176 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
1177 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1178 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
1179 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1180
1181 device
1182 .new_render_pipeline_state(&descriptor)
1183 .expect("could not create render pipeline state")
1184}
1185
1186fn build_path_atlas_pipeline_state(
1187 device: &metal::DeviceRef,
1188 library: &metal::LibraryRef,
1189 label: &str,
1190 vertex_fn_name: &str,
1191 fragment_fn_name: &str,
1192 pixel_format: metal::MTLPixelFormat,
1193) -> metal::RenderPipelineState {
1194 let vertex_fn = library
1195 .get_function(vertex_fn_name, None)
1196 .expect("error locating vertex function");
1197 let fragment_fn = library
1198 .get_function(fragment_fn_name, None)
1199 .expect("error locating fragment function");
1200
1201 let descriptor = metal::RenderPipelineDescriptor::new();
1202 descriptor.set_label(label);
1203 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1204 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1205 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1206 color_attachment.set_pixel_format(pixel_format);
1207 color_attachment.set_blending_enabled(true);
1208 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1209 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1210 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1211 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1212 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1213 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1214
1215 device
1216 .new_render_pipeline_state(&descriptor)
1217 .expect("could not create render pipeline state")
1218}
1219
1220mod shaders {
1221 #![allow(non_upper_case_globals)]
1222 #![allow(non_camel_case_types)]
1223 #![allow(non_snake_case)]
1224
1225 use crate::{
1226 color::Color,
1227 geometry::vector::{Vector2F, Vector2I},
1228 };
1229 use std::mem;
1230
1231 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1232
1233 pub trait ToFloat2 {
1234 fn to_float2(&self) -> vector_float2;
1235 }
1236
1237 impl ToFloat2 for (f32, f32) {
1238 fn to_float2(&self) -> vector_float2 {
1239 unsafe {
1240 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1241 output <<= 32;
1242 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1243 output
1244 }
1245 }
1246 }
1247
1248 impl ToFloat2 for Vector2F {
1249 fn to_float2(&self) -> vector_float2 {
1250 unsafe {
1251 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1252 output <<= 32;
1253 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1254 output
1255 }
1256 }
1257 }
1258
1259 impl ToFloat2 for Vector2I {
1260 fn to_float2(&self) -> vector_float2 {
1261 self.to_f32().to_float2()
1262 }
1263 }
1264
1265 impl Color {
1266 pub fn to_uchar4(&self) -> vector_uchar4 {
1267 let mut vec = self.a as vector_uchar4;
1268 vec <<= 8;
1269 vec |= self.b as vector_uchar4;
1270 vec <<= 8;
1271 vec |= self.g as vector_uchar4;
1272 vec <<= 8;
1273 vec |= self.r as vector_uchar4;
1274 vec
1275 }
1276 }
1277}