1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::{
12 base::{NO, YES},
13 foundation::NSUInteger,
14 quartzcore::AutoresizingMask,
15};
16use core_foundation::base::TCFType;
17use foreign_types::ForeignTypeRef;
18use log::warn;
19use media::core_video::{self, CVMetalTextureCache};
20use metal::{CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
21use objc::{self, msg_send, sel, sel_impl};
22use shaders::ToFloat2 as _;
23use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, ptr, sync::Arc, vec};
24
25const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
26const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
27
28pub struct Renderer {
29 layer: metal::MetalLayer,
30 command_queue: CommandQueue,
31 sprite_cache: SpriteCache,
32 image_cache: ImageCache,
33 path_atlases: AtlasAllocator,
34 quad_pipeline_state: metal::RenderPipelineState,
35 shadow_pipeline_state: metal::RenderPipelineState,
36 sprite_pipeline_state: metal::RenderPipelineState,
37 image_pipeline_state: metal::RenderPipelineState,
38 surface_pipeline_state: metal::RenderPipelineState,
39 path_atlas_pipeline_state: metal::RenderPipelineState,
40 underline_pipeline_state: metal::RenderPipelineState,
41 unit_vertices: metal::Buffer,
42 instances: metal::Buffer,
43 cv_texture_cache: core_video::CVMetalTextureCache,
44}
45
46struct PathSprite {
47 layer_id: usize,
48 atlas_id: usize,
49 shader_data: shaders::GPUISprite,
50}
51
52pub struct Surface {
53 pub bounds: RectF,
54 pub image_buffer: core_video::CVImageBuffer,
55}
56
57impl Renderer {
58 pub fn new(is_opaque: bool, fonts: Arc<dyn platform::FontSystem>) -> Self {
59 const PIXEL_FORMAT: MTLPixelFormat = MTLPixelFormat::BGRA8Unorm;
60
61 let device: metal::Device = if let Some(device) = metal::Device::system_default() {
62 device
63 } else {
64 log::error!("unable to access a compatible graphics device");
65 std::process::exit(1);
66 };
67
68 let layer = metal::MetalLayer::new();
69 layer.set_device(&device);
70 layer.set_pixel_format(PIXEL_FORMAT);
71 layer.set_presents_with_transaction(true);
72 layer.set_opaque(is_opaque);
73 unsafe {
74 let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
75 let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
76 let _: () = msg_send![
77 &*layer,
78 setAutoresizingMask: AutoresizingMask::WIDTH_SIZABLE
79 | AutoresizingMask::HEIGHT_SIZABLE
80 ];
81 }
82
83 let library = device
84 .new_library_with_data(SHADERS_METALLIB)
85 .expect("error building metal library");
86
87 let unit_vertices = [
88 (0., 0.).to_float2(),
89 (1., 0.).to_float2(),
90 (0., 1.).to_float2(),
91 (0., 1.).to_float2(),
92 (1., 0.).to_float2(),
93 (1., 1.).to_float2(),
94 ];
95 let unit_vertices = device.new_buffer_with_data(
96 unit_vertices.as_ptr() as *const c_void,
97 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
98 MTLResourceOptions::StorageModeManaged,
99 );
100 let instances = device.new_buffer(
101 INSTANCE_BUFFER_SIZE as u64,
102 MTLResourceOptions::StorageModeManaged,
103 );
104
105 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), 1., fonts.clone());
106 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), 1., fonts);
107 let path_atlases =
108 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
109 let quad_pipeline_state = build_pipeline_state(
110 &device,
111 &library,
112 "quad",
113 "quad_vertex",
114 "quad_fragment",
115 PIXEL_FORMAT,
116 );
117 let shadow_pipeline_state = build_pipeline_state(
118 &device,
119 &library,
120 "shadow",
121 "shadow_vertex",
122 "shadow_fragment",
123 PIXEL_FORMAT,
124 );
125 let sprite_pipeline_state = build_pipeline_state(
126 &device,
127 &library,
128 "sprite",
129 "sprite_vertex",
130 "sprite_fragment",
131 PIXEL_FORMAT,
132 );
133 let image_pipeline_state = build_pipeline_state(
134 &device,
135 &library,
136 "image",
137 "image_vertex",
138 "image_fragment",
139 PIXEL_FORMAT,
140 );
141 let surface_pipeline_state = build_pipeline_state(
142 &device,
143 &library,
144 "surface",
145 "surface_vertex",
146 "surface_fragment",
147 PIXEL_FORMAT,
148 );
149 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
150 &device,
151 &library,
152 "path_atlas",
153 "path_atlas_vertex",
154 "path_atlas_fragment",
155 MTLPixelFormat::R16Float,
156 );
157 let underline_pipeline_state = build_pipeline_state(
158 &device,
159 &library,
160 "underline",
161 "underline_vertex",
162 "underline_fragment",
163 PIXEL_FORMAT,
164 );
165 let cv_texture_cache = CVMetalTextureCache::new(device.as_ptr()).unwrap();
166 Self {
167 layer,
168 command_queue: device.new_command_queue(),
169 sprite_cache,
170 image_cache,
171 path_atlases,
172 quad_pipeline_state,
173 shadow_pipeline_state,
174 sprite_pipeline_state,
175 image_pipeline_state,
176 surface_pipeline_state,
177 path_atlas_pipeline_state,
178 underline_pipeline_state,
179 unit_vertices,
180 instances,
181 cv_texture_cache,
182 }
183 }
184
185 pub fn layer(&self) -> &metal::MetalLayerRef {
186 &*self.layer
187 }
188
189 pub fn render(&mut self, scene: &Scene) {
190 let layer = self.layer.clone();
191 let drawable_size = layer.drawable_size();
192 let drawable = layer.next_drawable().unwrap();
193 let command_queue = self.command_queue.clone();
194 let command_buffer = command_queue.new_command_buffer();
195
196 self.sprite_cache.set_scale_factor(scene.scale_factor());
197 self.image_cache.set_scale_factor(scene.scale_factor());
198
199 let mut offset = 0;
200
201 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
202 self.render_layers(
203 scene,
204 path_sprites,
205 &mut offset,
206 vec2f(drawable_size.width as f32, drawable_size.height as f32),
207 command_buffer,
208 drawable.texture(),
209 );
210 self.instances.did_modify_range(NSRange {
211 location: 0,
212 length: offset as NSUInteger,
213 });
214 self.image_cache.finish_frame();
215
216 command_buffer.commit();
217 command_buffer.wait_until_completed();
218 drawable.present();
219 }
220
221 fn render_path_atlases(
222 &mut self,
223 scene: &Scene,
224 offset: &mut usize,
225 command_buffer: &metal::CommandBufferRef,
226 ) -> Vec<PathSprite> {
227 self.path_atlases.clear();
228 let mut sprites = Vec::new();
229 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
230 let mut current_atlas_id = None;
231 for (layer_id, layer) in scene.layers().enumerate() {
232 for path in layer.paths() {
233 let origin = path.bounds.origin() * scene.scale_factor();
234 let size = (path.bounds.size() * scene.scale_factor()).ceil();
235
236 let path_allocation = self.path_atlases.allocate(size.to_i32());
237 if path_allocation.is_none() {
238 // Path size was likely zero.
239 warn!("could not allocate path texture of size {:?}", size);
240 continue;
241 }
242 let (alloc_id, atlas_origin) = path_allocation.unwrap();
243 let atlas_origin = atlas_origin.to_f32();
244 sprites.push(PathSprite {
245 layer_id,
246 atlas_id: alloc_id.atlas_id,
247 shader_data: shaders::GPUISprite {
248 origin: origin.floor().to_float2(),
249 target_size: size.to_float2(),
250 source_size: size.to_float2(),
251 atlas_origin: atlas_origin.to_float2(),
252 color: path.color.to_uchar4(),
253 compute_winding: 1,
254 },
255 });
256
257 if let Some(current_atlas_id) = current_atlas_id {
258 if alloc_id.atlas_id != current_atlas_id {
259 self.render_paths_to_atlas(
260 offset,
261 &vertices,
262 current_atlas_id,
263 command_buffer,
264 );
265 vertices.clear();
266 }
267 }
268
269 current_atlas_id = Some(alloc_id.atlas_id);
270
271 for vertex in &path.vertices {
272 let xy_position =
273 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
274 vertices.push(shaders::GPUIPathVertex {
275 xy_position: (atlas_origin + xy_position).to_float2(),
276 st_position: vertex.st_position.to_float2(),
277 clip_rect_origin: atlas_origin.to_float2(),
278 clip_rect_size: size.to_float2(),
279 });
280 }
281 }
282 }
283
284 if let Some(atlas_id) = current_atlas_id {
285 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
286 }
287
288 sprites
289 }
290
291 fn render_paths_to_atlas(
292 &mut self,
293 offset: &mut usize,
294 vertices: &[shaders::GPUIPathVertex],
295 atlas_id: usize,
296 command_buffer: &metal::CommandBufferRef,
297 ) {
298 align_offset(offset);
299 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
300 assert!(
301 next_offset <= INSTANCE_BUFFER_SIZE,
302 "instance buffer exhausted"
303 );
304
305 let render_pass_descriptor = metal::RenderPassDescriptor::new();
306 let color_attachment = render_pass_descriptor
307 .color_attachments()
308 .object_at(0)
309 .unwrap();
310 let texture = self.path_atlases.texture(atlas_id).unwrap();
311 color_attachment.set_texture(Some(texture));
312 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
313 color_attachment.set_store_action(metal::MTLStoreAction::Store);
314 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
315
316 let path_atlas_command_encoder =
317 command_buffer.new_render_command_encoder(render_pass_descriptor);
318 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
319 path_atlas_command_encoder.set_vertex_buffer(
320 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
321 Some(&self.instances),
322 *offset as u64,
323 );
324 path_atlas_command_encoder.set_vertex_bytes(
325 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
326 mem::size_of::<shaders::vector_float2>() as u64,
327 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
328 as *const c_void,
329 );
330
331 let buffer_contents = unsafe {
332 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
333 };
334
335 for (ix, vertex) in vertices.iter().enumerate() {
336 unsafe {
337 *buffer_contents.add(ix) = *vertex;
338 }
339 }
340
341 path_atlas_command_encoder.draw_primitives(
342 metal::MTLPrimitiveType::Triangle,
343 0,
344 vertices.len() as u64,
345 );
346 path_atlas_command_encoder.end_encoding();
347 *offset = next_offset;
348 }
349
350 fn render_layers(
351 &mut self,
352 scene: &Scene,
353 path_sprites: Vec<PathSprite>,
354 offset: &mut usize,
355 drawable_size: Vector2F,
356 command_buffer: &metal::CommandBufferRef,
357 output: &metal::TextureRef,
358 ) {
359 let render_pass_descriptor = metal::RenderPassDescriptor::new();
360 let color_attachment = render_pass_descriptor
361 .color_attachments()
362 .object_at(0)
363 .unwrap();
364 color_attachment.set_texture(Some(output));
365 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
366 color_attachment.set_store_action(metal::MTLStoreAction::Store);
367 let alpha = if self.layer.is_opaque() { 1. } else { 0. };
368 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., alpha));
369 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
370
371 command_encoder.set_viewport(metal::MTLViewport {
372 originX: 0.0,
373 originY: 0.0,
374 width: drawable_size.x() as f64,
375 height: drawable_size.y() as f64,
376 znear: 0.0,
377 zfar: 1.0,
378 });
379
380 let scale_factor = scene.scale_factor();
381 let mut path_sprites = path_sprites.into_iter().peekable();
382 for (layer_id, layer) in scene.layers().enumerate() {
383 self.clip(scene, layer, drawable_size, command_encoder);
384 self.render_shadows(
385 layer.shadows(),
386 scale_factor,
387 offset,
388 drawable_size,
389 command_encoder,
390 );
391 self.render_quads(
392 layer.quads(),
393 scale_factor,
394 offset,
395 drawable_size,
396 command_encoder,
397 );
398 self.render_path_sprites(
399 layer_id,
400 &mut path_sprites,
401 offset,
402 drawable_size,
403 command_encoder,
404 );
405 self.render_underlines(
406 layer.underlines(),
407 scale_factor,
408 offset,
409 drawable_size,
410 command_encoder,
411 );
412 self.render_sprites(
413 layer.glyphs(),
414 layer.icons(),
415 scale_factor,
416 offset,
417 drawable_size,
418 command_encoder,
419 );
420 self.render_images(
421 layer.images(),
422 layer.image_glyphs(),
423 scale_factor,
424 offset,
425 drawable_size,
426 command_encoder,
427 );
428 self.render_surfaces(
429 layer.surfaces(),
430 scale_factor,
431 offset,
432 drawable_size,
433 command_encoder,
434 );
435 }
436
437 command_encoder.end_encoding();
438 }
439
440 fn clip(
441 &mut self,
442 scene: &Scene,
443 layer: &Layer,
444 drawable_size: Vector2F,
445 command_encoder: &metal::RenderCommandEncoderRef,
446 ) {
447 let clip_bounds = (layer
448 .clip_bounds()
449 .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
450 * scene.scale_factor())
451 .round();
452 command_encoder.set_scissor_rect(metal::MTLScissorRect {
453 x: clip_bounds.origin_x() as NSUInteger,
454 y: clip_bounds.origin_y() as NSUInteger,
455 width: clip_bounds.width() as NSUInteger,
456 height: clip_bounds.height() as NSUInteger,
457 });
458 }
459
460 fn render_shadows(
461 &mut self,
462 shadows: &[Shadow],
463 scale_factor: f32,
464 offset: &mut usize,
465 drawable_size: Vector2F,
466 command_encoder: &metal::RenderCommandEncoderRef,
467 ) {
468 if shadows.is_empty() {
469 return;
470 }
471
472 align_offset(offset);
473 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
474 assert!(
475 next_offset <= INSTANCE_BUFFER_SIZE,
476 "instance buffer exhausted"
477 );
478
479 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
480 command_encoder.set_vertex_buffer(
481 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
482 Some(&self.unit_vertices),
483 0,
484 );
485 command_encoder.set_vertex_buffer(
486 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
487 Some(&self.instances),
488 *offset as u64,
489 );
490 command_encoder.set_vertex_bytes(
491 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
492 mem::size_of::<shaders::GPUIUniforms>() as u64,
493 [shaders::GPUIUniforms {
494 viewport_size: drawable_size.to_float2(),
495 }]
496 .as_ptr() as *const c_void,
497 );
498
499 let buffer_contents = unsafe {
500 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
501 };
502 for (ix, shadow) in shadows.iter().enumerate() {
503 let shape_bounds = shadow.bounds * scale_factor;
504 let shader_shadow = shaders::GPUIShadow {
505 origin: shape_bounds.origin().to_float2(),
506 size: shape_bounds.size().to_float2(),
507 corner_radius: shadow.corner_radius * scale_factor,
508 sigma: shadow.sigma,
509 color: shadow.color.to_uchar4(),
510 };
511 unsafe {
512 *(buffer_contents.add(ix)) = shader_shadow;
513 }
514 }
515
516 command_encoder.draw_primitives_instanced(
517 metal::MTLPrimitiveType::Triangle,
518 0,
519 6,
520 shadows.len() as u64,
521 );
522 *offset = next_offset;
523 }
524
525 fn render_quads(
526 &mut self,
527 quads: &[Quad],
528 scale_factor: f32,
529 offset: &mut usize,
530 drawable_size: Vector2F,
531 command_encoder: &metal::RenderCommandEncoderRef,
532 ) {
533 if quads.is_empty() {
534 return;
535 }
536 align_offset(offset);
537 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
538 assert!(
539 next_offset <= INSTANCE_BUFFER_SIZE,
540 "instance buffer exhausted"
541 );
542
543 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
544 command_encoder.set_vertex_buffer(
545 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
546 Some(&self.unit_vertices),
547 0,
548 );
549 command_encoder.set_vertex_buffer(
550 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
551 Some(&self.instances),
552 *offset as u64,
553 );
554 command_encoder.set_vertex_bytes(
555 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
556 mem::size_of::<shaders::GPUIUniforms>() as u64,
557 [shaders::GPUIUniforms {
558 viewport_size: drawable_size.to_float2(),
559 }]
560 .as_ptr() as *const c_void,
561 );
562
563 let buffer_contents = unsafe {
564 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
565 };
566 for (ix, quad) in quads.iter().enumerate() {
567 let bounds = quad.bounds * scale_factor;
568 let border_width = quad.border.width * scale_factor;
569 let shader_quad = shaders::GPUIQuad {
570 origin: bounds.origin().round().to_float2(),
571 size: bounds.size().round().to_float2(),
572 background_color: quad
573 .background
574 .unwrap_or_else(Color::transparent_black)
575 .to_uchar4(),
576 border_top: border_width * (quad.border.top as usize as f32),
577 border_right: border_width * (quad.border.right as usize as f32),
578 border_bottom: border_width * (quad.border.bottom as usize as f32),
579 border_left: border_width * (quad.border.left as usize as f32),
580 border_color: quad.border.color.to_uchar4(),
581 corner_radius: quad.corner_radius * scale_factor,
582 };
583 unsafe {
584 *(buffer_contents.add(ix)) = shader_quad;
585 }
586 }
587
588 command_encoder.draw_primitives_instanced(
589 metal::MTLPrimitiveType::Triangle,
590 0,
591 6,
592 quads.len() as u64,
593 );
594 *offset = next_offset;
595 }
596
597 fn render_sprites(
598 &mut self,
599 glyphs: &[Glyph],
600 icons: &[Icon],
601 scale_factor: f32,
602 offset: &mut usize,
603 drawable_size: Vector2F,
604 command_encoder: &metal::RenderCommandEncoderRef,
605 ) {
606 if glyphs.is_empty() && icons.is_empty() {
607 return;
608 }
609
610 let mut sprites_by_atlas = HashMap::new();
611
612 for glyph in glyphs {
613 if let Some(sprite) = self.sprite_cache.render_glyph(
614 glyph.font_id,
615 glyph.font_size,
616 glyph.id,
617 glyph.origin,
618 ) {
619 // Snap sprite to pixel grid.
620 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
621 sprites_by_atlas
622 .entry(sprite.atlas_id)
623 .or_insert_with(Vec::new)
624 .push(shaders::GPUISprite {
625 origin: origin.to_float2(),
626 target_size: sprite.size.to_float2(),
627 source_size: sprite.size.to_float2(),
628 atlas_origin: sprite.atlas_origin.to_float2(),
629 color: glyph.color.to_uchar4(),
630 compute_winding: 0,
631 });
632 }
633 }
634
635 for icon in icons {
636 // Snap sprite to pixel grid.
637 let origin = (icon.bounds.origin() * scale_factor).floor();
638 let target_size = (icon.bounds.size() * scale_factor).ceil();
639 let source_size = (target_size * 2.).to_i32();
640
641 let sprite =
642 self.sprite_cache
643 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
644 if sprite.is_none() {
645 continue;
646 }
647 let sprite = sprite.unwrap();
648
649 sprites_by_atlas
650 .entry(sprite.atlas_id)
651 .or_insert_with(Vec::new)
652 .push(shaders::GPUISprite {
653 origin: origin.to_float2(),
654 target_size: target_size.to_float2(),
655 source_size: sprite.size.to_float2(),
656 atlas_origin: sprite.atlas_origin.to_float2(),
657 color: icon.color.to_uchar4(),
658 compute_winding: 0,
659 });
660 }
661
662 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
663 command_encoder.set_vertex_buffer(
664 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
665 Some(&self.unit_vertices),
666 0,
667 );
668 command_encoder.set_vertex_bytes(
669 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
670 mem::size_of::<shaders::vector_float2>() as u64,
671 [drawable_size.to_float2()].as_ptr() as *const c_void,
672 );
673
674 for (atlas_id, sprites) in sprites_by_atlas {
675 align_offset(offset);
676 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
677 assert!(
678 next_offset <= INSTANCE_BUFFER_SIZE,
679 "instance buffer exhausted"
680 );
681
682 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
683 command_encoder.set_vertex_buffer(
684 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
685 Some(&self.instances),
686 *offset as u64,
687 );
688 command_encoder.set_vertex_bytes(
689 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
690 mem::size_of::<shaders::vector_float2>() as u64,
691 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
692 as *const c_void,
693 );
694
695 command_encoder.set_fragment_texture(
696 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
697 Some(texture),
698 );
699
700 unsafe {
701 let buffer_contents =
702 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
703 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
704 }
705
706 command_encoder.draw_primitives_instanced(
707 metal::MTLPrimitiveType::Triangle,
708 0,
709 6,
710 sprites.len() as u64,
711 );
712 *offset = next_offset;
713 }
714 }
715
716 fn render_images(
717 &mut self,
718 images: &[Image],
719 image_glyphs: &[ImageGlyph],
720 scale_factor: f32,
721 offset: &mut usize,
722 drawable_size: Vector2F,
723 command_encoder: &metal::RenderCommandEncoderRef,
724 ) {
725 if images.is_empty() && image_glyphs.is_empty() {
726 return;
727 }
728
729 let mut images_by_atlas = HashMap::new();
730 for image in images {
731 let origin = image.bounds.origin() * scale_factor;
732 let target_size = image.bounds.size() * scale_factor;
733 let corner_radius = image.corner_radius * scale_factor;
734 let border_width = image.border.width * scale_factor;
735 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
736 images_by_atlas
737 .entry(alloc_id.atlas_id)
738 .or_insert_with(Vec::new)
739 .push(shaders::GPUIImage {
740 origin: origin.to_float2(),
741 target_size: target_size.to_float2(),
742 source_size: atlas_bounds.size().to_float2(),
743 atlas_origin: atlas_bounds.origin().to_float2(),
744 border_top: border_width * (image.border.top as usize as f32),
745 border_right: border_width * (image.border.right as usize as f32),
746 border_bottom: border_width * (image.border.bottom as usize as f32),
747 border_left: border_width * (image.border.left as usize as f32),
748 border_color: image.border.color.to_uchar4(),
749 corner_radius,
750 grayscale: image.grayscale as u8,
751 });
752 }
753
754 for image_glyph in image_glyphs {
755 let origin = (image_glyph.origin * scale_factor).floor();
756 if let Some((alloc_id, atlas_bounds, glyph_origin)) =
757 self.image_cache.render_glyph(image_glyph)
758 {
759 images_by_atlas
760 .entry(alloc_id.atlas_id)
761 .or_insert_with(Vec::new)
762 .push(shaders::GPUIImage {
763 origin: (origin + glyph_origin.to_f32()).to_float2(),
764 target_size: atlas_bounds.size().to_float2(),
765 source_size: atlas_bounds.size().to_float2(),
766 atlas_origin: atlas_bounds.origin().to_float2(),
767 border_top: 0.,
768 border_right: 0.,
769 border_bottom: 0.,
770 border_left: 0.,
771 border_color: Default::default(),
772 corner_radius: 0.,
773 grayscale: false as u8,
774 });
775 } else {
776 log::warn!("could not render glyph with id {}", image_glyph.id);
777 }
778 }
779
780 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
781 command_encoder.set_vertex_buffer(
782 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
783 Some(&self.unit_vertices),
784 0,
785 );
786 command_encoder.set_vertex_bytes(
787 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
788 mem::size_of::<shaders::vector_float2>() as u64,
789 [drawable_size.to_float2()].as_ptr() as *const c_void,
790 );
791
792 for (atlas_id, images) in images_by_atlas {
793 align_offset(offset);
794 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
795 assert!(
796 next_offset <= INSTANCE_BUFFER_SIZE,
797 "instance buffer exhausted"
798 );
799
800 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
801 command_encoder.set_vertex_buffer(
802 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
803 Some(&self.instances),
804 *offset as u64,
805 );
806 command_encoder.set_vertex_bytes(
807 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
808 mem::size_of::<shaders::vector_float2>() as u64,
809 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
810 as *const c_void,
811 );
812 command_encoder.set_fragment_texture(
813 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
814 Some(texture),
815 );
816
817 unsafe {
818 let buffer_contents =
819 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
820 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
821 }
822
823 command_encoder.draw_primitives_instanced(
824 metal::MTLPrimitiveType::Triangle,
825 0,
826 6,
827 images.len() as u64,
828 );
829 *offset = next_offset;
830 }
831 }
832
833 fn render_surfaces(
834 &mut self,
835 surfaces: &[Surface],
836 scale_factor: f32,
837 offset: &mut usize,
838 drawable_size: Vector2F,
839 command_encoder: &metal::RenderCommandEncoderRef,
840 ) {
841 if surfaces.is_empty() {
842 return;
843 }
844
845 command_encoder.set_render_pipeline_state(&self.surface_pipeline_state);
846 command_encoder.set_vertex_buffer(
847 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexVertices as u64,
848 Some(&self.unit_vertices),
849 0,
850 );
851 command_encoder.set_vertex_bytes(
852 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexViewportSize as u64,
853 mem::size_of::<shaders::vector_float2>() as u64,
854 [drawable_size.to_float2()].as_ptr() as *const c_void,
855 );
856
857 for surface in surfaces {
858 let origin = surface.bounds.origin() * scale_factor;
859 let source_size = vec2i(
860 surface.image_buffer.width() as i32,
861 surface.image_buffer.height() as i32,
862 );
863 let target_size = surface.bounds.size() * scale_factor;
864
865 assert_eq!(
866 surface.image_buffer.pixel_format_type(),
867 core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
868 );
869
870 let y_texture = self
871 .cv_texture_cache
872 .create_texture_from_image(
873 surface.image_buffer.as_concrete_TypeRef(),
874 ptr::null(),
875 MTLPixelFormat::R8Unorm,
876 surface.image_buffer.plane_width(0),
877 surface.image_buffer.plane_height(0),
878 0,
879 )
880 .unwrap();
881 let cb_cr_texture = self
882 .cv_texture_cache
883 .create_texture_from_image(
884 surface.image_buffer.as_concrete_TypeRef(),
885 ptr::null(),
886 MTLPixelFormat::RG8Unorm,
887 surface.image_buffer.plane_width(1),
888 surface.image_buffer.plane_height(1),
889 1,
890 )
891 .unwrap();
892
893 align_offset(offset);
894 let next_offset = *offset + mem::size_of::<shaders::GPUISurface>();
895 assert!(
896 next_offset <= INSTANCE_BUFFER_SIZE,
897 "instance buffer exhausted"
898 );
899
900 command_encoder.set_vertex_buffer(
901 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexSurfaces as u64,
902 Some(&self.instances),
903 *offset as u64,
904 );
905 command_encoder.set_vertex_bytes(
906 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexAtlasSize as u64,
907 mem::size_of::<shaders::vector_float2>() as u64,
908 [source_size.to_float2()].as_ptr() as *const c_void,
909 );
910 command_encoder.set_fragment_texture(
911 shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexYAtlas as u64,
912 Some(y_texture.as_texture_ref()),
913 );
914 command_encoder.set_fragment_texture(
915 shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexCbCrAtlas
916 as u64,
917 Some(cb_cr_texture.as_texture_ref()),
918 );
919
920 unsafe {
921 let buffer_contents = (self.instances.contents() as *mut u8).add(*offset)
922 as *mut shaders::GPUISurface;
923 std::ptr::write(
924 buffer_contents,
925 shaders::GPUISurface {
926 origin: origin.to_float2(),
927 target_size: target_size.to_float2(),
928 source_size: source_size.to_float2(),
929 },
930 );
931 }
932
933 command_encoder.draw_primitives(metal::MTLPrimitiveType::Triangle, 0, 6);
934 *offset = next_offset;
935 }
936 }
937
938 fn render_path_sprites(
939 &mut self,
940 layer_id: usize,
941 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
942 offset: &mut usize,
943 drawable_size: Vector2F,
944 command_encoder: &metal::RenderCommandEncoderRef,
945 ) {
946 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
947 command_encoder.set_vertex_buffer(
948 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
949 Some(&self.unit_vertices),
950 0,
951 );
952 command_encoder.set_vertex_bytes(
953 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
954 mem::size_of::<shaders::vector_float2>() as u64,
955 [drawable_size.to_float2()].as_ptr() as *const c_void,
956 );
957
958 let mut atlas_id = None;
959 let mut atlas_sprite_count = 0;
960 align_offset(offset);
961
962 while let Some(sprite) = sprites.peek() {
963 if sprite.layer_id != layer_id {
964 break;
965 }
966
967 let sprite = sprites.next().unwrap();
968 if let Some(atlas_id) = atlas_id.as_mut() {
969 if sprite.atlas_id != *atlas_id {
970 self.render_path_sprites_for_atlas(
971 offset,
972 *atlas_id,
973 atlas_sprite_count,
974 command_encoder,
975 );
976
977 *atlas_id = sprite.atlas_id;
978 atlas_sprite_count = 0;
979 align_offset(offset);
980 }
981 } else {
982 atlas_id = Some(sprite.atlas_id);
983 }
984
985 unsafe {
986 let buffer_contents =
987 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
988 *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
989 }
990
991 atlas_sprite_count += 1;
992 }
993
994 if let Some(atlas_id) = atlas_id {
995 self.render_path_sprites_for_atlas(
996 offset,
997 atlas_id,
998 atlas_sprite_count,
999 command_encoder,
1000 );
1001 }
1002 }
1003
1004 fn render_path_sprites_for_atlas(
1005 &mut self,
1006 offset: &mut usize,
1007 atlas_id: usize,
1008 sprite_count: usize,
1009 command_encoder: &metal::RenderCommandEncoderRef,
1010 ) {
1011 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
1012 assert!(
1013 next_offset <= INSTANCE_BUFFER_SIZE,
1014 "instance buffer exhausted"
1015 );
1016 command_encoder.set_vertex_buffer(
1017 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
1018 Some(&self.instances),
1019 *offset as u64,
1020 );
1021 let texture = self.path_atlases.texture(atlas_id).unwrap();
1022 command_encoder.set_fragment_texture(
1023 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
1024 Some(texture),
1025 );
1026 command_encoder.set_vertex_bytes(
1027 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
1028 mem::size_of::<shaders::vector_float2>() as u64,
1029 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
1030 as *const c_void,
1031 );
1032
1033 command_encoder.draw_primitives_instanced(
1034 metal::MTLPrimitiveType::Triangle,
1035 0,
1036 6,
1037 sprite_count as u64,
1038 );
1039 *offset = next_offset;
1040 }
1041
1042 fn render_underlines(
1043 &mut self,
1044 underlines: &[Underline],
1045 scale_factor: f32,
1046 offset: &mut usize,
1047 drawable_size: Vector2F,
1048 command_encoder: &metal::RenderCommandEncoderRef,
1049 ) {
1050 if underlines.is_empty() {
1051 return;
1052 }
1053 align_offset(offset);
1054 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
1055 assert!(
1056 next_offset <= INSTANCE_BUFFER_SIZE,
1057 "instance buffer exhausted"
1058 );
1059
1060 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
1061 command_encoder.set_vertex_buffer(
1062 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
1063 Some(&self.unit_vertices),
1064 0,
1065 );
1066 command_encoder.set_vertex_buffer(
1067 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
1068 Some(&self.instances),
1069 *offset as u64,
1070 );
1071 command_encoder.set_vertex_bytes(
1072 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
1073 mem::size_of::<shaders::GPUIUniforms>() as u64,
1074 [shaders::GPUIUniforms {
1075 viewport_size: drawable_size.to_float2(),
1076 }]
1077 .as_ptr() as *const c_void,
1078 );
1079
1080 let buffer_contents = unsafe {
1081 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
1082 };
1083 for (ix, underline) in underlines.iter().enumerate() {
1084 let origin = underline.origin * scale_factor;
1085 let mut height = underline.thickness;
1086 if underline.squiggly {
1087 height *= 3.;
1088 }
1089 let size = vec2f(underline.width, height) * scale_factor;
1090 let shader_underline = shaders::GPUIUnderline {
1091 origin: origin.round().to_float2(),
1092 size: size.round().to_float2(),
1093 thickness: underline.thickness * scale_factor,
1094 color: underline.color.to_uchar4(),
1095 squiggly: underline.squiggly as u8,
1096 };
1097 unsafe {
1098 *(buffer_contents.add(ix)) = shader_underline;
1099 }
1100 }
1101
1102 command_encoder.draw_primitives_instanced(
1103 metal::MTLPrimitiveType::Triangle,
1104 0,
1105 6,
1106 underlines.len() as u64,
1107 );
1108 *offset = next_offset;
1109 }
1110}
1111
1112fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
1113 let texture_descriptor = metal::TextureDescriptor::new();
1114 texture_descriptor.set_width(2048);
1115 texture_descriptor.set_height(2048);
1116 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
1117 texture_descriptor
1118 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
1119 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
1120 texture_descriptor
1121}
1122
1123fn align_offset(offset: &mut usize) {
1124 let r = *offset % 256;
1125 if r > 0 {
1126 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
1127 }
1128}
1129
1130fn build_pipeline_state(
1131 device: &metal::DeviceRef,
1132 library: &metal::LibraryRef,
1133 label: &str,
1134 vertex_fn_name: &str,
1135 fragment_fn_name: &str,
1136 pixel_format: metal::MTLPixelFormat,
1137) -> metal::RenderPipelineState {
1138 let vertex_fn = library
1139 .get_function(vertex_fn_name, None)
1140 .expect("error locating vertex function");
1141 let fragment_fn = library
1142 .get_function(fragment_fn_name, None)
1143 .expect("error locating fragment function");
1144
1145 let descriptor = metal::RenderPipelineDescriptor::new();
1146 descriptor.set_label(label);
1147 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1148 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1149 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1150 color_attachment.set_pixel_format(pixel_format);
1151 color_attachment.set_blending_enabled(true);
1152 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1153 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1154 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
1155 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1156 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
1157 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1158
1159 device
1160 .new_render_pipeline_state(&descriptor)
1161 .expect("could not create render pipeline state")
1162}
1163
1164fn build_path_atlas_pipeline_state(
1165 device: &metal::DeviceRef,
1166 library: &metal::LibraryRef,
1167 label: &str,
1168 vertex_fn_name: &str,
1169 fragment_fn_name: &str,
1170 pixel_format: metal::MTLPixelFormat,
1171) -> metal::RenderPipelineState {
1172 let vertex_fn = library
1173 .get_function(vertex_fn_name, None)
1174 .expect("error locating vertex function");
1175 let fragment_fn = library
1176 .get_function(fragment_fn_name, None)
1177 .expect("error locating fragment function");
1178
1179 let descriptor = metal::RenderPipelineDescriptor::new();
1180 descriptor.set_label(label);
1181 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1182 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1183 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1184 color_attachment.set_pixel_format(pixel_format);
1185 color_attachment.set_blending_enabled(true);
1186 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1187 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1188 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1189 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1190 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1191 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1192
1193 device
1194 .new_render_pipeline_state(&descriptor)
1195 .expect("could not create render pipeline state")
1196}
1197
1198mod shaders {
1199 #![allow(non_upper_case_globals)]
1200 #![allow(non_camel_case_types)]
1201 #![allow(non_snake_case)]
1202
1203 use crate::{
1204 color::Color,
1205 geometry::vector::{Vector2F, Vector2I},
1206 };
1207 use std::mem;
1208
1209 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1210
1211 pub trait ToFloat2 {
1212 fn to_float2(&self) -> vector_float2;
1213 }
1214
1215 impl ToFloat2 for (f32, f32) {
1216 fn to_float2(&self) -> vector_float2 {
1217 unsafe {
1218 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1219 output <<= 32;
1220 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1221 output
1222 }
1223 }
1224 }
1225
1226 impl ToFloat2 for Vector2F {
1227 fn to_float2(&self) -> vector_float2 {
1228 unsafe {
1229 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1230 output <<= 32;
1231 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1232 output
1233 }
1234 }
1235 }
1236
1237 impl ToFloat2 for Vector2I {
1238 fn to_float2(&self) -> vector_float2 {
1239 self.to_f32().to_float2()
1240 }
1241 }
1242
1243 impl Color {
1244 pub fn to_uchar4(&self) -> vector_uchar4 {
1245 let mut vec = self.a as vector_uchar4;
1246 vec <<= 8;
1247 vec |= self.b as vector_uchar4;
1248 vec <<= 8;
1249 vec |= self.g as vector_uchar4;
1250 vec <<= 8;
1251 vec |= self.r as vector_uchar4;
1252 vec
1253 }
1254 }
1255}