1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::{
12 base::{NO, YES},
13 foundation::NSUInteger,
14 quartzcore::AutoresizingMask,
15};
16use core_foundation::base::TCFType;
17use foreign_types::ForeignTypeRef;
18use log::warn;
19use media::core_video::{self, CVMetalTextureCache};
20use metal::{CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
21use objc::{self, msg_send, sel, sel_impl};
22use shaders::ToFloat2 as _;
23use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, ptr, sync::Arc, vec};
24
25const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
26const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
27
28pub struct Renderer {
29 layer: metal::MetalLayer,
30 command_queue: CommandQueue,
31 sprite_cache: SpriteCache,
32 image_cache: ImageCache,
33 path_atlases: AtlasAllocator,
34 quad_pipeline_state: metal::RenderPipelineState,
35 shadow_pipeline_state: metal::RenderPipelineState,
36 sprite_pipeline_state: metal::RenderPipelineState,
37 image_pipeline_state: metal::RenderPipelineState,
38 surface_pipeline_state: metal::RenderPipelineState,
39 path_atlas_pipeline_state: metal::RenderPipelineState,
40 underline_pipeline_state: metal::RenderPipelineState,
41 unit_vertices: metal::Buffer,
42 instances: metal::Buffer,
43 cv_texture_cache: core_video::CVMetalTextureCache,
44}
45
46struct PathSprite {
47 layer_id: usize,
48 atlas_id: usize,
49 shader_data: shaders::GPUISprite,
50}
51
52pub struct Surface {
53 pub bounds: RectF,
54 pub image_buffer: core_video::CVImageBuffer,
55}
56
57impl Renderer {
58 pub fn new(is_opaque: bool, fonts: Arc<dyn platform::FontSystem>) -> Self {
59 const PIXEL_FORMAT: MTLPixelFormat = MTLPixelFormat::BGRA8Unorm;
60
61 let device: metal::Device = if let Some(device) = metal::Device::system_default() {
62 device
63 } else {
64 log::error!("unable to access a compatible graphics device");
65 std::process::exit(1);
66 };
67
68 let layer = metal::MetalLayer::new();
69 layer.set_device(&device);
70 layer.set_pixel_format(PIXEL_FORMAT);
71 layer.set_presents_with_transaction(true);
72 layer.set_opaque(is_opaque);
73 unsafe {
74 let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
75 let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
76 let _: () = msg_send![
77 &*layer,
78 setAutoresizingMask: AutoresizingMask::WIDTH_SIZABLE
79 | AutoresizingMask::HEIGHT_SIZABLE
80 ];
81 }
82
83 let library = device
84 .new_library_with_data(SHADERS_METALLIB)
85 .expect("error building metal library");
86
87 let unit_vertices = [
88 (0., 0.).to_float2(),
89 (1., 0.).to_float2(),
90 (0., 1.).to_float2(),
91 (0., 1.).to_float2(),
92 (1., 0.).to_float2(),
93 (1., 1.).to_float2(),
94 ];
95 let unit_vertices = device.new_buffer_with_data(
96 unit_vertices.as_ptr() as *const c_void,
97 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
98 MTLResourceOptions::StorageModeManaged,
99 );
100 let instances = device.new_buffer(
101 INSTANCE_BUFFER_SIZE as u64,
102 MTLResourceOptions::StorageModeManaged,
103 );
104
105 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), 1., fonts.clone());
106 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), 1., fonts);
107 let path_atlases =
108 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
109 let quad_pipeline_state = build_pipeline_state(
110 &device,
111 &library,
112 "quad",
113 "quad_vertex",
114 "quad_fragment",
115 PIXEL_FORMAT,
116 );
117 let shadow_pipeline_state = build_pipeline_state(
118 &device,
119 &library,
120 "shadow",
121 "shadow_vertex",
122 "shadow_fragment",
123 PIXEL_FORMAT,
124 );
125 let sprite_pipeline_state = build_pipeline_state(
126 &device,
127 &library,
128 "sprite",
129 "sprite_vertex",
130 "sprite_fragment",
131 PIXEL_FORMAT,
132 );
133 let image_pipeline_state = build_pipeline_state(
134 &device,
135 &library,
136 "image",
137 "image_vertex",
138 "image_fragment",
139 PIXEL_FORMAT,
140 );
141 let surface_pipeline_state = build_pipeline_state(
142 &device,
143 &library,
144 "surface",
145 "surface_vertex",
146 "surface_fragment",
147 PIXEL_FORMAT,
148 );
149 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
150 &device,
151 &library,
152 "path_atlas",
153 "path_atlas_vertex",
154 "path_atlas_fragment",
155 MTLPixelFormat::R16Float,
156 );
157 let underline_pipeline_state = build_pipeline_state(
158 &device,
159 &library,
160 "underline",
161 "underline_vertex",
162 "underline_fragment",
163 PIXEL_FORMAT,
164 );
165 let cv_texture_cache = CVMetalTextureCache::new(device.as_ptr()).unwrap();
166 Self {
167 layer,
168 command_queue: device.new_command_queue(),
169 sprite_cache,
170 image_cache,
171 path_atlases,
172 quad_pipeline_state,
173 shadow_pipeline_state,
174 sprite_pipeline_state,
175 image_pipeline_state,
176 surface_pipeline_state,
177 path_atlas_pipeline_state,
178 underline_pipeline_state,
179 unit_vertices,
180 instances,
181 cv_texture_cache,
182 }
183 }
184
185 pub fn layer(&self) -> &metal::MetalLayerRef {
186 &*self.layer
187 }
188
189 pub fn render(&mut self, scene: &Scene) {
190 let layer = self.layer.clone();
191 let drawable_size = layer.drawable_size();
192 let drawable = if let Some(drawable) = layer.next_drawable() {
193 drawable
194 } else {
195 log::error!(
196 "failed to retrieve next drawable, drawable size: {:?}",
197 drawable_size
198 );
199 return;
200 };
201 let command_queue = self.command_queue.clone();
202 let command_buffer = command_queue.new_command_buffer();
203
204 self.sprite_cache.set_scale_factor(scene.scale_factor());
205 self.image_cache.set_scale_factor(scene.scale_factor());
206
207 let mut offset = 0;
208
209 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
210 self.render_layers(
211 scene,
212 path_sprites,
213 &mut offset,
214 vec2f(drawable_size.width as f32, drawable_size.height as f32),
215 command_buffer,
216 drawable.texture(),
217 );
218 self.instances.did_modify_range(NSRange {
219 location: 0,
220 length: offset as NSUInteger,
221 });
222 self.image_cache.finish_frame();
223
224 command_buffer.commit();
225 command_buffer.wait_until_completed();
226 drawable.present();
227 }
228
229 fn render_path_atlases(
230 &mut self,
231 scene: &Scene,
232 offset: &mut usize,
233 command_buffer: &metal::CommandBufferRef,
234 ) -> Vec<PathSprite> {
235 self.path_atlases.clear();
236 let mut sprites = Vec::new();
237 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
238 let mut current_atlas_id = None;
239 for (layer_id, layer) in scene.layers().enumerate() {
240 for path in layer.paths() {
241 let origin = path.bounds.origin() * scene.scale_factor();
242 let size = (path.bounds.size() * scene.scale_factor()).ceil();
243
244 let path_allocation = self.path_atlases.allocate(size.to_i32());
245 if path_allocation.is_none() {
246 // Path size was likely zero.
247 warn!("could not allocate path texture of size {:?}", size);
248 continue;
249 }
250 let (alloc_id, atlas_origin) = path_allocation.unwrap();
251 let atlas_origin = atlas_origin.to_f32();
252 sprites.push(PathSprite {
253 layer_id,
254 atlas_id: alloc_id.atlas_id,
255 shader_data: shaders::GPUISprite {
256 origin: origin.floor().to_float2(),
257 target_size: size.to_float2(),
258 source_size: size.to_float2(),
259 atlas_origin: atlas_origin.to_float2(),
260 color: path.color.to_uchar4(),
261 compute_winding: 1,
262 },
263 });
264
265 if let Some(current_atlas_id) = current_atlas_id {
266 if alloc_id.atlas_id != current_atlas_id {
267 self.render_paths_to_atlas(
268 offset,
269 &vertices,
270 current_atlas_id,
271 command_buffer,
272 );
273 vertices.clear();
274 }
275 }
276
277 current_atlas_id = Some(alloc_id.atlas_id);
278
279 for vertex in &path.vertices {
280 let xy_position =
281 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
282 vertices.push(shaders::GPUIPathVertex {
283 xy_position: (atlas_origin + xy_position).to_float2(),
284 st_position: vertex.st_position.to_float2(),
285 clip_rect_origin: atlas_origin.to_float2(),
286 clip_rect_size: size.to_float2(),
287 });
288 }
289 }
290 }
291
292 if let Some(atlas_id) = current_atlas_id {
293 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
294 }
295
296 sprites
297 }
298
299 fn render_paths_to_atlas(
300 &mut self,
301 offset: &mut usize,
302 vertices: &[shaders::GPUIPathVertex],
303 atlas_id: usize,
304 command_buffer: &metal::CommandBufferRef,
305 ) {
306 align_offset(offset);
307 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
308 assert!(
309 next_offset <= INSTANCE_BUFFER_SIZE,
310 "instance buffer exhausted"
311 );
312
313 let render_pass_descriptor = metal::RenderPassDescriptor::new();
314 let color_attachment = render_pass_descriptor
315 .color_attachments()
316 .object_at(0)
317 .unwrap();
318 let texture = self.path_atlases.texture(atlas_id).unwrap();
319 color_attachment.set_texture(Some(texture));
320 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
321 color_attachment.set_store_action(metal::MTLStoreAction::Store);
322 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
323
324 let path_atlas_command_encoder =
325 command_buffer.new_render_command_encoder(render_pass_descriptor);
326 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
327 path_atlas_command_encoder.set_vertex_buffer(
328 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
329 Some(&self.instances),
330 *offset as u64,
331 );
332 path_atlas_command_encoder.set_vertex_bytes(
333 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
334 mem::size_of::<shaders::vector_float2>() as u64,
335 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
336 as *const c_void,
337 );
338
339 let buffer_contents = unsafe {
340 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
341 };
342
343 for (ix, vertex) in vertices.iter().enumerate() {
344 unsafe {
345 *buffer_contents.add(ix) = *vertex;
346 }
347 }
348
349 path_atlas_command_encoder.draw_primitives(
350 metal::MTLPrimitiveType::Triangle,
351 0,
352 vertices.len() as u64,
353 );
354 path_atlas_command_encoder.end_encoding();
355 *offset = next_offset;
356 }
357
358 fn render_layers(
359 &mut self,
360 scene: &Scene,
361 path_sprites: Vec<PathSprite>,
362 offset: &mut usize,
363 drawable_size: Vector2F,
364 command_buffer: &metal::CommandBufferRef,
365 output: &metal::TextureRef,
366 ) {
367 let render_pass_descriptor = metal::RenderPassDescriptor::new();
368 let color_attachment = render_pass_descriptor
369 .color_attachments()
370 .object_at(0)
371 .unwrap();
372 color_attachment.set_texture(Some(output));
373 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
374 color_attachment.set_store_action(metal::MTLStoreAction::Store);
375 let alpha = if self.layer.is_opaque() { 1. } else { 0. };
376 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., alpha));
377 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
378
379 command_encoder.set_viewport(metal::MTLViewport {
380 originX: 0.0,
381 originY: 0.0,
382 width: drawable_size.x() as f64,
383 height: drawable_size.y() as f64,
384 znear: 0.0,
385 zfar: 1.0,
386 });
387
388 let scale_factor = scene.scale_factor();
389 let mut path_sprites = path_sprites.into_iter().peekable();
390 for (layer_id, layer) in scene.layers().enumerate() {
391 self.clip(scene, layer, drawable_size, command_encoder);
392 self.render_shadows(
393 layer.shadows(),
394 scale_factor,
395 offset,
396 drawable_size,
397 command_encoder,
398 );
399 self.render_quads(
400 layer.quads(),
401 scale_factor,
402 offset,
403 drawable_size,
404 command_encoder,
405 );
406 self.render_path_sprites(
407 layer_id,
408 &mut path_sprites,
409 offset,
410 drawable_size,
411 command_encoder,
412 );
413 self.render_underlines(
414 layer.underlines(),
415 scale_factor,
416 offset,
417 drawable_size,
418 command_encoder,
419 );
420 self.render_sprites(
421 layer.glyphs(),
422 layer.icons(),
423 scale_factor,
424 offset,
425 drawable_size,
426 command_encoder,
427 );
428 self.render_images(
429 layer.images(),
430 layer.image_glyphs(),
431 scale_factor,
432 offset,
433 drawable_size,
434 command_encoder,
435 );
436 self.render_surfaces(
437 layer.surfaces(),
438 scale_factor,
439 offset,
440 drawable_size,
441 command_encoder,
442 );
443 }
444
445 command_encoder.end_encoding();
446 }
447
448 fn clip(
449 &mut self,
450 scene: &Scene,
451 layer: &Layer,
452 drawable_size: Vector2F,
453 command_encoder: &metal::RenderCommandEncoderRef,
454 ) {
455 let clip_bounds = (layer
456 .clip_bounds()
457 .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
458 * scene.scale_factor())
459 .round();
460 command_encoder.set_scissor_rect(metal::MTLScissorRect {
461 x: clip_bounds.origin_x() as NSUInteger,
462 y: clip_bounds.origin_y() as NSUInteger,
463 width: clip_bounds.width() as NSUInteger,
464 height: clip_bounds.height() as NSUInteger,
465 });
466 }
467
468 fn render_shadows(
469 &mut self,
470 shadows: &[Shadow],
471 scale_factor: f32,
472 offset: &mut usize,
473 drawable_size: Vector2F,
474 command_encoder: &metal::RenderCommandEncoderRef,
475 ) {
476 if shadows.is_empty() {
477 return;
478 }
479
480 align_offset(offset);
481 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
482 assert!(
483 next_offset <= INSTANCE_BUFFER_SIZE,
484 "instance buffer exhausted"
485 );
486
487 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
488 command_encoder.set_vertex_buffer(
489 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
490 Some(&self.unit_vertices),
491 0,
492 );
493 command_encoder.set_vertex_buffer(
494 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
495 Some(&self.instances),
496 *offset as u64,
497 );
498 command_encoder.set_vertex_bytes(
499 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
500 mem::size_of::<shaders::GPUIUniforms>() as u64,
501 [shaders::GPUIUniforms {
502 viewport_size: drawable_size.to_float2(),
503 }]
504 .as_ptr() as *const c_void,
505 );
506
507 let buffer_contents = unsafe {
508 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
509 };
510 for (ix, shadow) in shadows.iter().enumerate() {
511 let shape_bounds = shadow.bounds * scale_factor;
512 let shader_shadow = shaders::GPUIShadow {
513 origin: shape_bounds.origin().to_float2(),
514 size: shape_bounds.size().to_float2(),
515 corner_radius: shadow.corner_radius * scale_factor,
516 sigma: shadow.sigma,
517 color: shadow.color.to_uchar4(),
518 };
519 unsafe {
520 *(buffer_contents.add(ix)) = shader_shadow;
521 }
522 }
523
524 command_encoder.draw_primitives_instanced(
525 metal::MTLPrimitiveType::Triangle,
526 0,
527 6,
528 shadows.len() as u64,
529 );
530 *offset = next_offset;
531 }
532
533 fn render_quads(
534 &mut self,
535 quads: &[Quad],
536 scale_factor: f32,
537 offset: &mut usize,
538 drawable_size: Vector2F,
539 command_encoder: &metal::RenderCommandEncoderRef,
540 ) {
541 if quads.is_empty() {
542 return;
543 }
544 align_offset(offset);
545 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
546 assert!(
547 next_offset <= INSTANCE_BUFFER_SIZE,
548 "instance buffer exhausted"
549 );
550
551 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
552 command_encoder.set_vertex_buffer(
553 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
554 Some(&self.unit_vertices),
555 0,
556 );
557 command_encoder.set_vertex_buffer(
558 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
559 Some(&self.instances),
560 *offset as u64,
561 );
562 command_encoder.set_vertex_bytes(
563 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
564 mem::size_of::<shaders::GPUIUniforms>() as u64,
565 [shaders::GPUIUniforms {
566 viewport_size: drawable_size.to_float2(),
567 }]
568 .as_ptr() as *const c_void,
569 );
570
571 let buffer_contents = unsafe {
572 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
573 };
574 for (ix, quad) in quads.iter().enumerate() {
575 let bounds = quad.bounds * scale_factor;
576 let border_width = quad.border.width * scale_factor;
577 let shader_quad = shaders::GPUIQuad {
578 origin: bounds.origin().round().to_float2(),
579 size: bounds.size().round().to_float2(),
580 background_color: quad
581 .background
582 .unwrap_or_else(Color::transparent_black)
583 .to_uchar4(),
584 border_top: border_width * (quad.border.top as usize as f32),
585 border_right: border_width * (quad.border.right as usize as f32),
586 border_bottom: border_width * (quad.border.bottom as usize as f32),
587 border_left: border_width * (quad.border.left as usize as f32),
588 border_color: quad.border.color.to_uchar4(),
589 corner_radius: quad.corner_radius * scale_factor,
590 };
591 unsafe {
592 *(buffer_contents.add(ix)) = shader_quad;
593 }
594 }
595
596 command_encoder.draw_primitives_instanced(
597 metal::MTLPrimitiveType::Triangle,
598 0,
599 6,
600 quads.len() as u64,
601 );
602 *offset = next_offset;
603 }
604
605 fn render_sprites(
606 &mut self,
607 glyphs: &[Glyph],
608 icons: &[Icon],
609 scale_factor: f32,
610 offset: &mut usize,
611 drawable_size: Vector2F,
612 command_encoder: &metal::RenderCommandEncoderRef,
613 ) {
614 if glyphs.is_empty() && icons.is_empty() {
615 return;
616 }
617
618 let mut sprites_by_atlas = HashMap::new();
619
620 for glyph in glyphs {
621 if let Some(sprite) = self.sprite_cache.render_glyph(
622 glyph.font_id,
623 glyph.font_size,
624 glyph.id,
625 glyph.origin,
626 ) {
627 // Snap sprite to pixel grid.
628 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
629 sprites_by_atlas
630 .entry(sprite.atlas_id)
631 .or_insert_with(Vec::new)
632 .push(shaders::GPUISprite {
633 origin: origin.to_float2(),
634 target_size: sprite.size.to_float2(),
635 source_size: sprite.size.to_float2(),
636 atlas_origin: sprite.atlas_origin.to_float2(),
637 color: glyph.color.to_uchar4(),
638 compute_winding: 0,
639 });
640 }
641 }
642
643 for icon in icons {
644 // Snap sprite to pixel grid.
645 let origin = (icon.bounds.origin() * scale_factor).floor();
646 let target_size = (icon.bounds.size() * scale_factor).ceil();
647 let source_size = (target_size * 2.).to_i32();
648
649 let sprite =
650 self.sprite_cache
651 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
652 if sprite.is_none() {
653 continue;
654 }
655 let sprite = sprite.unwrap();
656
657 sprites_by_atlas
658 .entry(sprite.atlas_id)
659 .or_insert_with(Vec::new)
660 .push(shaders::GPUISprite {
661 origin: origin.to_float2(),
662 target_size: target_size.to_float2(),
663 source_size: sprite.size.to_float2(),
664 atlas_origin: sprite.atlas_origin.to_float2(),
665 color: icon.color.to_uchar4(),
666 compute_winding: 0,
667 });
668 }
669
670 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
671 command_encoder.set_vertex_buffer(
672 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
673 Some(&self.unit_vertices),
674 0,
675 );
676 command_encoder.set_vertex_bytes(
677 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
678 mem::size_of::<shaders::vector_float2>() as u64,
679 [drawable_size.to_float2()].as_ptr() as *const c_void,
680 );
681
682 for (atlas_id, sprites) in sprites_by_atlas {
683 align_offset(offset);
684 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
685 assert!(
686 next_offset <= INSTANCE_BUFFER_SIZE,
687 "instance buffer exhausted"
688 );
689
690 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
691 command_encoder.set_vertex_buffer(
692 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
693 Some(&self.instances),
694 *offset as u64,
695 );
696 command_encoder.set_vertex_bytes(
697 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
698 mem::size_of::<shaders::vector_float2>() as u64,
699 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
700 as *const c_void,
701 );
702
703 command_encoder.set_fragment_texture(
704 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
705 Some(texture),
706 );
707
708 unsafe {
709 let buffer_contents =
710 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
711 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
712 }
713
714 command_encoder.draw_primitives_instanced(
715 metal::MTLPrimitiveType::Triangle,
716 0,
717 6,
718 sprites.len() as u64,
719 );
720 *offset = next_offset;
721 }
722 }
723
724 fn render_images(
725 &mut self,
726 images: &[Image],
727 image_glyphs: &[ImageGlyph],
728 scale_factor: f32,
729 offset: &mut usize,
730 drawable_size: Vector2F,
731 command_encoder: &metal::RenderCommandEncoderRef,
732 ) {
733 if images.is_empty() && image_glyphs.is_empty() {
734 return;
735 }
736
737 let mut images_by_atlas = HashMap::new();
738 for image in images {
739 let origin = image.bounds.origin() * scale_factor;
740 let target_size = image.bounds.size() * scale_factor;
741 let corner_radius = image.corner_radius * scale_factor;
742 let border_width = image.border.width * scale_factor;
743 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
744 images_by_atlas
745 .entry(alloc_id.atlas_id)
746 .or_insert_with(Vec::new)
747 .push(shaders::GPUIImage {
748 origin: origin.to_float2(),
749 target_size: target_size.to_float2(),
750 source_size: atlas_bounds.size().to_float2(),
751 atlas_origin: atlas_bounds.origin().to_float2(),
752 border_top: border_width * (image.border.top as usize as f32),
753 border_right: border_width * (image.border.right as usize as f32),
754 border_bottom: border_width * (image.border.bottom as usize as f32),
755 border_left: border_width * (image.border.left as usize as f32),
756 border_color: image.border.color.to_uchar4(),
757 corner_radius,
758 grayscale: image.grayscale as u8,
759 });
760 }
761
762 for image_glyph in image_glyphs {
763 let origin = (image_glyph.origin * scale_factor).floor();
764 if let Some((alloc_id, atlas_bounds, glyph_origin)) =
765 self.image_cache.render_glyph(image_glyph)
766 {
767 images_by_atlas
768 .entry(alloc_id.atlas_id)
769 .or_insert_with(Vec::new)
770 .push(shaders::GPUIImage {
771 origin: (origin + glyph_origin.to_f32()).to_float2(),
772 target_size: atlas_bounds.size().to_float2(),
773 source_size: atlas_bounds.size().to_float2(),
774 atlas_origin: atlas_bounds.origin().to_float2(),
775 border_top: 0.,
776 border_right: 0.,
777 border_bottom: 0.,
778 border_left: 0.,
779 border_color: Default::default(),
780 corner_radius: 0.,
781 grayscale: false as u8,
782 });
783 } else {
784 log::warn!("could not render glyph with id {}", image_glyph.id);
785 }
786 }
787
788 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
789 command_encoder.set_vertex_buffer(
790 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
791 Some(&self.unit_vertices),
792 0,
793 );
794 command_encoder.set_vertex_bytes(
795 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
796 mem::size_of::<shaders::vector_float2>() as u64,
797 [drawable_size.to_float2()].as_ptr() as *const c_void,
798 );
799
800 for (atlas_id, images) in images_by_atlas {
801 align_offset(offset);
802 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
803 assert!(
804 next_offset <= INSTANCE_BUFFER_SIZE,
805 "instance buffer exhausted"
806 );
807
808 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
809 command_encoder.set_vertex_buffer(
810 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
811 Some(&self.instances),
812 *offset as u64,
813 );
814 command_encoder.set_vertex_bytes(
815 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
816 mem::size_of::<shaders::vector_float2>() as u64,
817 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
818 as *const c_void,
819 );
820 command_encoder.set_fragment_texture(
821 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
822 Some(texture),
823 );
824
825 unsafe {
826 let buffer_contents =
827 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
828 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
829 }
830
831 command_encoder.draw_primitives_instanced(
832 metal::MTLPrimitiveType::Triangle,
833 0,
834 6,
835 images.len() as u64,
836 );
837 *offset = next_offset;
838 }
839 }
840
841 fn render_surfaces(
842 &mut self,
843 surfaces: &[Surface],
844 scale_factor: f32,
845 offset: &mut usize,
846 drawable_size: Vector2F,
847 command_encoder: &metal::RenderCommandEncoderRef,
848 ) {
849 if surfaces.is_empty() {
850 return;
851 }
852
853 command_encoder.set_render_pipeline_state(&self.surface_pipeline_state);
854 command_encoder.set_vertex_buffer(
855 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexVertices as u64,
856 Some(&self.unit_vertices),
857 0,
858 );
859 command_encoder.set_vertex_bytes(
860 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexViewportSize as u64,
861 mem::size_of::<shaders::vector_float2>() as u64,
862 [drawable_size.to_float2()].as_ptr() as *const c_void,
863 );
864
865 for surface in surfaces {
866 let origin = surface.bounds.origin() * scale_factor;
867 let source_size = vec2i(
868 surface.image_buffer.width() as i32,
869 surface.image_buffer.height() as i32,
870 );
871 let target_size = surface.bounds.size() * scale_factor;
872
873 assert_eq!(
874 surface.image_buffer.pixel_format_type(),
875 core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
876 );
877
878 let y_texture = self
879 .cv_texture_cache
880 .create_texture_from_image(
881 surface.image_buffer.as_concrete_TypeRef(),
882 ptr::null(),
883 MTLPixelFormat::R8Unorm,
884 surface.image_buffer.plane_width(0),
885 surface.image_buffer.plane_height(0),
886 0,
887 )
888 .unwrap();
889 let cb_cr_texture = self
890 .cv_texture_cache
891 .create_texture_from_image(
892 surface.image_buffer.as_concrete_TypeRef(),
893 ptr::null(),
894 MTLPixelFormat::RG8Unorm,
895 surface.image_buffer.plane_width(1),
896 surface.image_buffer.plane_height(1),
897 1,
898 )
899 .unwrap();
900
901 align_offset(offset);
902 let next_offset = *offset + mem::size_of::<shaders::GPUISurface>();
903 assert!(
904 next_offset <= INSTANCE_BUFFER_SIZE,
905 "instance buffer exhausted"
906 );
907
908 command_encoder.set_vertex_buffer(
909 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexSurfaces as u64,
910 Some(&self.instances),
911 *offset as u64,
912 );
913 command_encoder.set_vertex_bytes(
914 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexAtlasSize as u64,
915 mem::size_of::<shaders::vector_float2>() as u64,
916 [source_size.to_float2()].as_ptr() as *const c_void,
917 );
918 command_encoder.set_fragment_texture(
919 shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexYAtlas as u64,
920 Some(y_texture.as_texture_ref()),
921 );
922 command_encoder.set_fragment_texture(
923 shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexCbCrAtlas
924 as u64,
925 Some(cb_cr_texture.as_texture_ref()),
926 );
927
928 unsafe {
929 let buffer_contents = (self.instances.contents() as *mut u8).add(*offset)
930 as *mut shaders::GPUISurface;
931 std::ptr::write(
932 buffer_contents,
933 shaders::GPUISurface {
934 origin: origin.to_float2(),
935 target_size: target_size.to_float2(),
936 source_size: source_size.to_float2(),
937 },
938 );
939 }
940
941 command_encoder.draw_primitives(metal::MTLPrimitiveType::Triangle, 0, 6);
942 *offset = next_offset;
943 }
944 }
945
946 fn render_path_sprites(
947 &mut self,
948 layer_id: usize,
949 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
950 offset: &mut usize,
951 drawable_size: Vector2F,
952 command_encoder: &metal::RenderCommandEncoderRef,
953 ) {
954 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
955 command_encoder.set_vertex_buffer(
956 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
957 Some(&self.unit_vertices),
958 0,
959 );
960 command_encoder.set_vertex_bytes(
961 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
962 mem::size_of::<shaders::vector_float2>() as u64,
963 [drawable_size.to_float2()].as_ptr() as *const c_void,
964 );
965
966 let mut atlas_id = None;
967 let mut atlas_sprite_count = 0;
968 align_offset(offset);
969
970 while let Some(sprite) = sprites.peek() {
971 if sprite.layer_id != layer_id {
972 break;
973 }
974
975 let sprite = sprites.next().unwrap();
976 if let Some(atlas_id) = atlas_id.as_mut() {
977 if sprite.atlas_id != *atlas_id {
978 self.render_path_sprites_for_atlas(
979 offset,
980 *atlas_id,
981 atlas_sprite_count,
982 command_encoder,
983 );
984
985 *atlas_id = sprite.atlas_id;
986 atlas_sprite_count = 0;
987 align_offset(offset);
988 }
989 } else {
990 atlas_id = Some(sprite.atlas_id);
991 }
992
993 unsafe {
994 let buffer_contents =
995 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
996 *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
997 }
998
999 atlas_sprite_count += 1;
1000 }
1001
1002 if let Some(atlas_id) = atlas_id {
1003 self.render_path_sprites_for_atlas(
1004 offset,
1005 atlas_id,
1006 atlas_sprite_count,
1007 command_encoder,
1008 );
1009 }
1010 }
1011
1012 fn render_path_sprites_for_atlas(
1013 &mut self,
1014 offset: &mut usize,
1015 atlas_id: usize,
1016 sprite_count: usize,
1017 command_encoder: &metal::RenderCommandEncoderRef,
1018 ) {
1019 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
1020 assert!(
1021 next_offset <= INSTANCE_BUFFER_SIZE,
1022 "instance buffer exhausted"
1023 );
1024 command_encoder.set_vertex_buffer(
1025 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
1026 Some(&self.instances),
1027 *offset as u64,
1028 );
1029 let texture = self.path_atlases.texture(atlas_id).unwrap();
1030 command_encoder.set_fragment_texture(
1031 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
1032 Some(texture),
1033 );
1034 command_encoder.set_vertex_bytes(
1035 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
1036 mem::size_of::<shaders::vector_float2>() as u64,
1037 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
1038 as *const c_void,
1039 );
1040
1041 command_encoder.draw_primitives_instanced(
1042 metal::MTLPrimitiveType::Triangle,
1043 0,
1044 6,
1045 sprite_count as u64,
1046 );
1047 *offset = next_offset;
1048 }
1049
1050 fn render_underlines(
1051 &mut self,
1052 underlines: &[Underline],
1053 scale_factor: f32,
1054 offset: &mut usize,
1055 drawable_size: Vector2F,
1056 command_encoder: &metal::RenderCommandEncoderRef,
1057 ) {
1058 if underlines.is_empty() {
1059 return;
1060 }
1061 align_offset(offset);
1062 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
1063 assert!(
1064 next_offset <= INSTANCE_BUFFER_SIZE,
1065 "instance buffer exhausted"
1066 );
1067
1068 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
1069 command_encoder.set_vertex_buffer(
1070 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
1071 Some(&self.unit_vertices),
1072 0,
1073 );
1074 command_encoder.set_vertex_buffer(
1075 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
1076 Some(&self.instances),
1077 *offset as u64,
1078 );
1079 command_encoder.set_vertex_bytes(
1080 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
1081 mem::size_of::<shaders::GPUIUniforms>() as u64,
1082 [shaders::GPUIUniforms {
1083 viewport_size: drawable_size.to_float2(),
1084 }]
1085 .as_ptr() as *const c_void,
1086 );
1087
1088 let buffer_contents = unsafe {
1089 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
1090 };
1091 for (ix, underline) in underlines.iter().enumerate() {
1092 let origin = underline.origin * scale_factor;
1093 let mut height = underline.thickness;
1094 if underline.squiggly {
1095 height *= 3.;
1096 }
1097 let size = vec2f(underline.width, height) * scale_factor;
1098 let shader_underline = shaders::GPUIUnderline {
1099 origin: origin.round().to_float2(),
1100 size: size.round().to_float2(),
1101 thickness: underline.thickness * scale_factor,
1102 color: underline.color.to_uchar4(),
1103 squiggly: underline.squiggly as u8,
1104 };
1105 unsafe {
1106 *(buffer_contents.add(ix)) = shader_underline;
1107 }
1108 }
1109
1110 command_encoder.draw_primitives_instanced(
1111 metal::MTLPrimitiveType::Triangle,
1112 0,
1113 6,
1114 underlines.len() as u64,
1115 );
1116 *offset = next_offset;
1117 }
1118}
1119
1120fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
1121 let texture_descriptor = metal::TextureDescriptor::new();
1122 texture_descriptor.set_width(2048);
1123 texture_descriptor.set_height(2048);
1124 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
1125 texture_descriptor
1126 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
1127 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
1128 texture_descriptor
1129}
1130
1131fn align_offset(offset: &mut usize) {
1132 let r = *offset % 256;
1133 if r > 0 {
1134 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
1135 }
1136}
1137
1138fn build_pipeline_state(
1139 device: &metal::DeviceRef,
1140 library: &metal::LibraryRef,
1141 label: &str,
1142 vertex_fn_name: &str,
1143 fragment_fn_name: &str,
1144 pixel_format: metal::MTLPixelFormat,
1145) -> metal::RenderPipelineState {
1146 let vertex_fn = library
1147 .get_function(vertex_fn_name, None)
1148 .expect("error locating vertex function");
1149 let fragment_fn = library
1150 .get_function(fragment_fn_name, None)
1151 .expect("error locating fragment function");
1152
1153 let descriptor = metal::RenderPipelineDescriptor::new();
1154 descriptor.set_label(label);
1155 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1156 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1157 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1158 color_attachment.set_pixel_format(pixel_format);
1159 color_attachment.set_blending_enabled(true);
1160 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1161 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1162 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
1163 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1164 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
1165 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1166
1167 device
1168 .new_render_pipeline_state(&descriptor)
1169 .expect("could not create render pipeline state")
1170}
1171
1172fn build_path_atlas_pipeline_state(
1173 device: &metal::DeviceRef,
1174 library: &metal::LibraryRef,
1175 label: &str,
1176 vertex_fn_name: &str,
1177 fragment_fn_name: &str,
1178 pixel_format: metal::MTLPixelFormat,
1179) -> metal::RenderPipelineState {
1180 let vertex_fn = library
1181 .get_function(vertex_fn_name, None)
1182 .expect("error locating vertex function");
1183 let fragment_fn = library
1184 .get_function(fragment_fn_name, None)
1185 .expect("error locating fragment function");
1186
1187 let descriptor = metal::RenderPipelineDescriptor::new();
1188 descriptor.set_label(label);
1189 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1190 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1191 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1192 color_attachment.set_pixel_format(pixel_format);
1193 color_attachment.set_blending_enabled(true);
1194 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1195 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1196 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1197 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1198 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1199 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1200
1201 device
1202 .new_render_pipeline_state(&descriptor)
1203 .expect("could not create render pipeline state")
1204}
1205
1206mod shaders {
1207 #![allow(non_upper_case_globals)]
1208 #![allow(non_camel_case_types)]
1209 #![allow(non_snake_case)]
1210
1211 use crate::{
1212 color::Color,
1213 geometry::vector::{Vector2F, Vector2I},
1214 };
1215 use std::mem;
1216
1217 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1218
1219 pub trait ToFloat2 {
1220 fn to_float2(&self) -> vector_float2;
1221 }
1222
1223 impl ToFloat2 for (f32, f32) {
1224 fn to_float2(&self) -> vector_float2 {
1225 unsafe {
1226 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1227 output <<= 32;
1228 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1229 output
1230 }
1231 }
1232 }
1233
1234 impl ToFloat2 for Vector2F {
1235 fn to_float2(&self) -> vector_float2 {
1236 unsafe {
1237 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1238 output <<= 32;
1239 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1240 output
1241 }
1242 }
1243 }
1244
1245 impl ToFloat2 for Vector2I {
1246 fn to_float2(&self) -> vector_float2 {
1247 self.to_f32().to_float2()
1248 }
1249 }
1250
1251 impl Color {
1252 pub fn to_uchar4(&self) -> vector_uchar4 {
1253 let mut vec = self.a as vector_uchar4;
1254 vec <<= 8;
1255 vec |= self.b as vector_uchar4;
1256 vec <<= 8;
1257 vec |= self.g as vector_uchar4;
1258 vec <<= 8;
1259 vec |= self.r as vector_uchar4;
1260 vec
1261 }
1262 }
1263}