1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::{
12 base::{NO, YES},
13 foundation::{NSRect, NSUInteger},
14 quartzcore::AutoresizingMask,
15};
16use core_foundation::base::TCFType;
17use foreign_types::ForeignTypeRef;
18use log::warn;
19use media::core_video::{self, CVMetalTextureCache};
20use metal::{CGFloat, CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
21use objc::{self, msg_send, sel, sel_impl};
22use shaders::ToFloat2 as _;
23use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, ptr, sync::Arc, vec};
24
25const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
26const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
27
28pub struct Renderer {
29 layer: metal::MetalLayer,
30 command_queue: CommandQueue,
31 sprite_cache: SpriteCache,
32 image_cache: ImageCache,
33 path_atlases: AtlasAllocator,
34 quad_pipeline_state: metal::RenderPipelineState,
35 shadow_pipeline_state: metal::RenderPipelineState,
36 sprite_pipeline_state: metal::RenderPipelineState,
37 image_pipeline_state: metal::RenderPipelineState,
38 surface_pipeline_state: metal::RenderPipelineState,
39 path_atlas_pipeline_state: metal::RenderPipelineState,
40 underline_pipeline_state: metal::RenderPipelineState,
41 unit_vertices: metal::Buffer,
42 instances: metal::Buffer,
43 cv_texture_cache: core_video::CVMetalTextureCache,
44}
45
46struct PathSprite {
47 layer_id: usize,
48 atlas_id: usize,
49 shader_data: shaders::GPUISprite,
50}
51
52pub struct Surface {
53 pub bounds: RectF,
54 pub image_buffer: core_video::CVImageBuffer,
55}
56
57impl Renderer {
58 pub fn new(fonts: Arc<dyn platform::FontSystem>) -> Self {
59 const PIXEL_FORMAT: MTLPixelFormat = MTLPixelFormat::BGRA8Unorm;
60
61 let device: metal::Device = if let Some(device) = metal::Device::system_default() {
62 device
63 } else {
64 log::error!("unable to access a compatible graphics device");
65 std::process::exit(1);
66 };
67
68 let layer = metal::MetalLayer::new();
69 layer.set_device(&device);
70 layer.set_pixel_format(PIXEL_FORMAT);
71 layer.set_presents_with_transaction(true);
72 unsafe {
73 let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
74 let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
75 let _: () = msg_send![
76 &*layer,
77 setAutoresizingMask: AutoresizingMask::WIDTH_SIZABLE
78 | AutoresizingMask::HEIGHT_SIZABLE
79 ];
80 }
81
82 let library = device
83 .new_library_with_data(SHADERS_METALLIB)
84 .expect("error building metal library");
85
86 let unit_vertices = [
87 (0., 0.).to_float2(),
88 (1., 0.).to_float2(),
89 (0., 1.).to_float2(),
90 (0., 1.).to_float2(),
91 (1., 0.).to_float2(),
92 (1., 1.).to_float2(),
93 ];
94 let unit_vertices = device.new_buffer_with_data(
95 unit_vertices.as_ptr() as *const c_void,
96 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
97 MTLResourceOptions::StorageModeManaged,
98 );
99 let instances = device.new_buffer(
100 INSTANCE_BUFFER_SIZE as u64,
101 MTLResourceOptions::StorageModeManaged,
102 );
103
104 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), 1., fonts.clone());
105 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), 1., fonts);
106 let path_atlases =
107 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
108 let quad_pipeline_state = build_pipeline_state(
109 &device,
110 &library,
111 "quad",
112 "quad_vertex",
113 "quad_fragment",
114 PIXEL_FORMAT,
115 );
116 let shadow_pipeline_state = build_pipeline_state(
117 &device,
118 &library,
119 "shadow",
120 "shadow_vertex",
121 "shadow_fragment",
122 PIXEL_FORMAT,
123 );
124 let sprite_pipeline_state = build_pipeline_state(
125 &device,
126 &library,
127 "sprite",
128 "sprite_vertex",
129 "sprite_fragment",
130 PIXEL_FORMAT,
131 );
132 let image_pipeline_state = build_pipeline_state(
133 &device,
134 &library,
135 "image",
136 "image_vertex",
137 "image_fragment",
138 PIXEL_FORMAT,
139 );
140 let surface_pipeline_state = build_pipeline_state(
141 &device,
142 &library,
143 "surface",
144 "surface_vertex",
145 "surface_fragment",
146 PIXEL_FORMAT,
147 );
148 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
149 &device,
150 &library,
151 "path_atlas",
152 "path_atlas_vertex",
153 "path_atlas_fragment",
154 MTLPixelFormat::R16Float,
155 );
156 let underline_pipeline_state = build_pipeline_state(
157 &device,
158 &library,
159 "underline",
160 "underline_vertex",
161 "underline_fragment",
162 PIXEL_FORMAT,
163 );
164 let cv_texture_cache = CVMetalTextureCache::new(device.as_ptr()).unwrap();
165 Self {
166 layer,
167 command_queue: device.new_command_queue(),
168 sprite_cache,
169 image_cache,
170 path_atlases,
171 quad_pipeline_state,
172 shadow_pipeline_state,
173 sprite_pipeline_state,
174 image_pipeline_state,
175 surface_pipeline_state,
176 path_atlas_pipeline_state,
177 underline_pipeline_state,
178 unit_vertices,
179 instances,
180 cv_texture_cache,
181 }
182 }
183
184 pub fn layer(&self) -> &metal::MetalLayerRef {
185 &*self.layer
186 }
187
188 pub fn render(&mut self, scene: &Scene) {
189 let layer = self.layer.clone();
190 let drawable = layer.next_drawable().unwrap();
191 let command_queue = self.command_queue.clone();
192 let command_buffer = command_queue.new_command_buffer();
193
194 let frame: NSRect = unsafe { msg_send![self.layer(), frame] };
195 let scale_factor: CGFloat = unsafe { msg_send![self.layer(), contentsScale] };
196 let drawable_size =
197 vec2f(frame.size.width as f32, frame.size.height as f32) * scale_factor as f32;
198
199 self.sprite_cache.set_scale_factor(scene.scale_factor());
200 self.image_cache.set_scale_factor(scene.scale_factor());
201
202 let mut offset = 0;
203
204 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
205 self.render_layers(
206 scene,
207 path_sprites,
208 &mut offset,
209 drawable_size,
210 command_buffer,
211 drawable.texture(),
212 );
213 self.instances.did_modify_range(NSRange {
214 location: 0,
215 length: offset as NSUInteger,
216 });
217 self.image_cache.finish_frame();
218
219 command_buffer.commit();
220 command_buffer.wait_until_completed();
221 drawable.present();
222 }
223
224 fn render_path_atlases(
225 &mut self,
226 scene: &Scene,
227 offset: &mut usize,
228 command_buffer: &metal::CommandBufferRef,
229 ) -> Vec<PathSprite> {
230 self.path_atlases.clear();
231 let mut sprites = Vec::new();
232 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
233 let mut current_atlas_id = None;
234 for (layer_id, layer) in scene.layers().enumerate() {
235 for path in layer.paths() {
236 let origin = path.bounds.origin() * scene.scale_factor();
237 let size = (path.bounds.size() * scene.scale_factor()).ceil();
238
239 let path_allocation = self.path_atlases.allocate(size.to_i32());
240 if path_allocation.is_none() {
241 // Path size was likely zero.
242 warn!("could not allocate path texture of size {:?}", size);
243 continue;
244 }
245 let (alloc_id, atlas_origin) = path_allocation.unwrap();
246 let atlas_origin = atlas_origin.to_f32();
247 sprites.push(PathSprite {
248 layer_id,
249 atlas_id: alloc_id.atlas_id,
250 shader_data: shaders::GPUISprite {
251 origin: origin.floor().to_float2(),
252 target_size: size.to_float2(),
253 source_size: size.to_float2(),
254 atlas_origin: atlas_origin.to_float2(),
255 color: path.color.to_uchar4(),
256 compute_winding: 1,
257 },
258 });
259
260 if let Some(current_atlas_id) = current_atlas_id {
261 if alloc_id.atlas_id != current_atlas_id {
262 self.render_paths_to_atlas(
263 offset,
264 &vertices,
265 current_atlas_id,
266 command_buffer,
267 );
268 vertices.clear();
269 }
270 }
271
272 current_atlas_id = Some(alloc_id.atlas_id);
273
274 for vertex in &path.vertices {
275 let xy_position =
276 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
277 vertices.push(shaders::GPUIPathVertex {
278 xy_position: (atlas_origin + xy_position).to_float2(),
279 st_position: vertex.st_position.to_float2(),
280 clip_rect_origin: atlas_origin.to_float2(),
281 clip_rect_size: size.to_float2(),
282 });
283 }
284 }
285 }
286
287 if let Some(atlas_id) = current_atlas_id {
288 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
289 }
290
291 sprites
292 }
293
294 fn render_paths_to_atlas(
295 &mut self,
296 offset: &mut usize,
297 vertices: &[shaders::GPUIPathVertex],
298 atlas_id: usize,
299 command_buffer: &metal::CommandBufferRef,
300 ) {
301 align_offset(offset);
302 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
303 assert!(
304 next_offset <= INSTANCE_BUFFER_SIZE,
305 "instance buffer exhausted"
306 );
307
308 let render_pass_descriptor = metal::RenderPassDescriptor::new();
309 let color_attachment = render_pass_descriptor
310 .color_attachments()
311 .object_at(0)
312 .unwrap();
313 let texture = self.path_atlases.texture(atlas_id).unwrap();
314 color_attachment.set_texture(Some(texture));
315 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
316 color_attachment.set_store_action(metal::MTLStoreAction::Store);
317 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
318
319 let path_atlas_command_encoder =
320 command_buffer.new_render_command_encoder(render_pass_descriptor);
321 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
322 path_atlas_command_encoder.set_vertex_buffer(
323 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
324 Some(&self.instances),
325 *offset as u64,
326 );
327 path_atlas_command_encoder.set_vertex_bytes(
328 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
329 mem::size_of::<shaders::vector_float2>() as u64,
330 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
331 as *const c_void,
332 );
333
334 let buffer_contents = unsafe {
335 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
336 };
337
338 for (ix, vertex) in vertices.iter().enumerate() {
339 unsafe {
340 *buffer_contents.add(ix) = *vertex;
341 }
342 }
343
344 path_atlas_command_encoder.draw_primitives(
345 metal::MTLPrimitiveType::Triangle,
346 0,
347 vertices.len() as u64,
348 );
349 path_atlas_command_encoder.end_encoding();
350 *offset = next_offset;
351 }
352
353 fn render_layers(
354 &mut self,
355 scene: &Scene,
356 path_sprites: Vec<PathSprite>,
357 offset: &mut usize,
358 drawable_size: Vector2F,
359 command_buffer: &metal::CommandBufferRef,
360 output: &metal::TextureRef,
361 ) {
362 let render_pass_descriptor = metal::RenderPassDescriptor::new();
363 let color_attachment = render_pass_descriptor
364 .color_attachments()
365 .object_at(0)
366 .unwrap();
367 color_attachment.set_texture(Some(output));
368 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
369 color_attachment.set_store_action(metal::MTLStoreAction::Store);
370 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
371 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
372
373 command_encoder.set_viewport(metal::MTLViewport {
374 originX: 0.0,
375 originY: 0.0,
376 width: drawable_size.x() as f64,
377 height: drawable_size.y() as f64,
378 znear: 0.0,
379 zfar: 1.0,
380 });
381
382 let scale_factor = scene.scale_factor();
383 let mut path_sprites = path_sprites.into_iter().peekable();
384 for (layer_id, layer) in scene.layers().enumerate() {
385 self.clip(scene, layer, drawable_size, command_encoder);
386 self.render_shadows(
387 layer.shadows(),
388 scale_factor,
389 offset,
390 drawable_size,
391 command_encoder,
392 );
393 self.render_quads(
394 layer.quads(),
395 scale_factor,
396 offset,
397 drawable_size,
398 command_encoder,
399 );
400 self.render_path_sprites(
401 layer_id,
402 &mut path_sprites,
403 offset,
404 drawable_size,
405 command_encoder,
406 );
407 self.render_underlines(
408 layer.underlines(),
409 scale_factor,
410 offset,
411 drawable_size,
412 command_encoder,
413 );
414 self.render_sprites(
415 layer.glyphs(),
416 layer.icons(),
417 scale_factor,
418 offset,
419 drawable_size,
420 command_encoder,
421 );
422 self.render_images(
423 layer.images(),
424 layer.image_glyphs(),
425 scale_factor,
426 offset,
427 drawable_size,
428 command_encoder,
429 );
430 self.render_surfaces(
431 layer.surfaces(),
432 scale_factor,
433 offset,
434 drawable_size,
435 command_encoder,
436 );
437 }
438
439 command_encoder.end_encoding();
440 }
441
442 fn clip(
443 &mut self,
444 scene: &Scene,
445 layer: &Layer,
446 drawable_size: Vector2F,
447 command_encoder: &metal::RenderCommandEncoderRef,
448 ) {
449 let clip_bounds = (layer
450 .clip_bounds()
451 .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
452 * scene.scale_factor())
453 .round();
454 command_encoder.set_scissor_rect(metal::MTLScissorRect {
455 x: clip_bounds.origin_x() as NSUInteger,
456 y: clip_bounds.origin_y() as NSUInteger,
457 width: clip_bounds.width() as NSUInteger,
458 height: clip_bounds.height() as NSUInteger,
459 });
460 }
461
462 fn render_shadows(
463 &mut self,
464 shadows: &[Shadow],
465 scale_factor: f32,
466 offset: &mut usize,
467 drawable_size: Vector2F,
468 command_encoder: &metal::RenderCommandEncoderRef,
469 ) {
470 if shadows.is_empty() {
471 return;
472 }
473
474 align_offset(offset);
475 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
476 assert!(
477 next_offset <= INSTANCE_BUFFER_SIZE,
478 "instance buffer exhausted"
479 );
480
481 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
482 command_encoder.set_vertex_buffer(
483 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
484 Some(&self.unit_vertices),
485 0,
486 );
487 command_encoder.set_vertex_buffer(
488 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
489 Some(&self.instances),
490 *offset as u64,
491 );
492 command_encoder.set_vertex_bytes(
493 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
494 mem::size_of::<shaders::GPUIUniforms>() as u64,
495 [shaders::GPUIUniforms {
496 viewport_size: drawable_size.to_float2(),
497 }]
498 .as_ptr() as *const c_void,
499 );
500
501 let buffer_contents = unsafe {
502 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
503 };
504 for (ix, shadow) in shadows.iter().enumerate() {
505 let shape_bounds = shadow.bounds * scale_factor;
506 let shader_shadow = shaders::GPUIShadow {
507 origin: shape_bounds.origin().to_float2(),
508 size: shape_bounds.size().to_float2(),
509 corner_radius: shadow.corner_radius * scale_factor,
510 sigma: shadow.sigma,
511 color: shadow.color.to_uchar4(),
512 };
513 unsafe {
514 *(buffer_contents.add(ix)) = shader_shadow;
515 }
516 }
517
518 command_encoder.draw_primitives_instanced(
519 metal::MTLPrimitiveType::Triangle,
520 0,
521 6,
522 shadows.len() as u64,
523 );
524 *offset = next_offset;
525 }
526
527 fn render_quads(
528 &mut self,
529 quads: &[Quad],
530 scale_factor: f32,
531 offset: &mut usize,
532 drawable_size: Vector2F,
533 command_encoder: &metal::RenderCommandEncoderRef,
534 ) {
535 if quads.is_empty() {
536 return;
537 }
538 align_offset(offset);
539 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
540 assert!(
541 next_offset <= INSTANCE_BUFFER_SIZE,
542 "instance buffer exhausted"
543 );
544
545 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
546 command_encoder.set_vertex_buffer(
547 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
548 Some(&self.unit_vertices),
549 0,
550 );
551 command_encoder.set_vertex_buffer(
552 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
553 Some(&self.instances),
554 *offset as u64,
555 );
556 command_encoder.set_vertex_bytes(
557 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
558 mem::size_of::<shaders::GPUIUniforms>() as u64,
559 [shaders::GPUIUniforms {
560 viewport_size: drawable_size.to_float2(),
561 }]
562 .as_ptr() as *const c_void,
563 );
564
565 let buffer_contents = unsafe {
566 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
567 };
568 for (ix, quad) in quads.iter().enumerate() {
569 let bounds = quad.bounds * scale_factor;
570 let border_width = quad.border.width * scale_factor;
571 let shader_quad = shaders::GPUIQuad {
572 origin: bounds.origin().round().to_float2(),
573 size: bounds.size().round().to_float2(),
574 background_color: quad
575 .background
576 .unwrap_or_else(Color::transparent_black)
577 .to_uchar4(),
578 border_top: border_width * (quad.border.top as usize as f32),
579 border_right: border_width * (quad.border.right as usize as f32),
580 border_bottom: border_width * (quad.border.bottom as usize as f32),
581 border_left: border_width * (quad.border.left as usize as f32),
582 border_color: quad.border.color.to_uchar4(),
583 corner_radius: quad.corner_radius * scale_factor,
584 };
585 unsafe {
586 *(buffer_contents.add(ix)) = shader_quad;
587 }
588 }
589
590 command_encoder.draw_primitives_instanced(
591 metal::MTLPrimitiveType::Triangle,
592 0,
593 6,
594 quads.len() as u64,
595 );
596 *offset = next_offset;
597 }
598
599 fn render_sprites(
600 &mut self,
601 glyphs: &[Glyph],
602 icons: &[Icon],
603 scale_factor: f32,
604 offset: &mut usize,
605 drawable_size: Vector2F,
606 command_encoder: &metal::RenderCommandEncoderRef,
607 ) {
608 if glyphs.is_empty() && icons.is_empty() {
609 return;
610 }
611
612 let mut sprites_by_atlas = HashMap::new();
613
614 for glyph in glyphs {
615 if let Some(sprite) = self.sprite_cache.render_glyph(
616 glyph.font_id,
617 glyph.font_size,
618 glyph.id,
619 glyph.origin,
620 ) {
621 // Snap sprite to pixel grid.
622 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
623 sprites_by_atlas
624 .entry(sprite.atlas_id)
625 .or_insert_with(Vec::new)
626 .push(shaders::GPUISprite {
627 origin: origin.to_float2(),
628 target_size: sprite.size.to_float2(),
629 source_size: sprite.size.to_float2(),
630 atlas_origin: sprite.atlas_origin.to_float2(),
631 color: glyph.color.to_uchar4(),
632 compute_winding: 0,
633 });
634 }
635 }
636
637 for icon in icons {
638 // Snap sprite to pixel grid.
639 let origin = (icon.bounds.origin() * scale_factor).floor();
640 let target_size = (icon.bounds.size() * scale_factor).ceil();
641 let source_size = (target_size * 2.).to_i32();
642
643 let sprite =
644 self.sprite_cache
645 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
646 if sprite.is_none() {
647 continue;
648 }
649 let sprite = sprite.unwrap();
650
651 sprites_by_atlas
652 .entry(sprite.atlas_id)
653 .or_insert_with(Vec::new)
654 .push(shaders::GPUISprite {
655 origin: origin.to_float2(),
656 target_size: target_size.to_float2(),
657 source_size: sprite.size.to_float2(),
658 atlas_origin: sprite.atlas_origin.to_float2(),
659 color: icon.color.to_uchar4(),
660 compute_winding: 0,
661 });
662 }
663
664 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
665 command_encoder.set_vertex_buffer(
666 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
667 Some(&self.unit_vertices),
668 0,
669 );
670 command_encoder.set_vertex_bytes(
671 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
672 mem::size_of::<shaders::vector_float2>() as u64,
673 [drawable_size.to_float2()].as_ptr() as *const c_void,
674 );
675
676 for (atlas_id, sprites) in sprites_by_atlas {
677 align_offset(offset);
678 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
679 assert!(
680 next_offset <= INSTANCE_BUFFER_SIZE,
681 "instance buffer exhausted"
682 );
683
684 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
685 command_encoder.set_vertex_buffer(
686 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
687 Some(&self.instances),
688 *offset as u64,
689 );
690 command_encoder.set_vertex_bytes(
691 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
692 mem::size_of::<shaders::vector_float2>() as u64,
693 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
694 as *const c_void,
695 );
696
697 command_encoder.set_fragment_texture(
698 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
699 Some(texture),
700 );
701
702 unsafe {
703 let buffer_contents =
704 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
705 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
706 }
707
708 command_encoder.draw_primitives_instanced(
709 metal::MTLPrimitiveType::Triangle,
710 0,
711 6,
712 sprites.len() as u64,
713 );
714 *offset = next_offset;
715 }
716 }
717
718 fn render_images(
719 &mut self,
720 images: &[Image],
721 image_glyphs: &[ImageGlyph],
722 scale_factor: f32,
723 offset: &mut usize,
724 drawable_size: Vector2F,
725 command_encoder: &metal::RenderCommandEncoderRef,
726 ) {
727 if images.is_empty() && image_glyphs.is_empty() {
728 return;
729 }
730
731 let mut images_by_atlas = HashMap::new();
732 for image in images {
733 let origin = image.bounds.origin() * scale_factor;
734 let target_size = image.bounds.size() * scale_factor;
735 let corner_radius = image.corner_radius * scale_factor;
736 let border_width = image.border.width * scale_factor;
737 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
738 images_by_atlas
739 .entry(alloc_id.atlas_id)
740 .or_insert_with(Vec::new)
741 .push(shaders::GPUIImage {
742 origin: origin.to_float2(),
743 target_size: target_size.to_float2(),
744 source_size: atlas_bounds.size().to_float2(),
745 atlas_origin: atlas_bounds.origin().to_float2(),
746 border_top: border_width * (image.border.top as usize as f32),
747 border_right: border_width * (image.border.right as usize as f32),
748 border_bottom: border_width * (image.border.bottom as usize as f32),
749 border_left: border_width * (image.border.left as usize as f32),
750 border_color: image.border.color.to_uchar4(),
751 corner_radius,
752 });
753 }
754
755 for image_glyph in image_glyphs {
756 let origin = (image_glyph.origin * scale_factor).floor();
757 if let Some((alloc_id, atlas_bounds, glyph_origin)) =
758 self.image_cache.render_glyph(image_glyph)
759 {
760 images_by_atlas
761 .entry(alloc_id.atlas_id)
762 .or_insert_with(Vec::new)
763 .push(shaders::GPUIImage {
764 origin: (origin + glyph_origin.to_f32()).to_float2(),
765 target_size: atlas_bounds.size().to_float2(),
766 source_size: atlas_bounds.size().to_float2(),
767 atlas_origin: atlas_bounds.origin().to_float2(),
768 border_top: 0.,
769 border_right: 0.,
770 border_bottom: 0.,
771 border_left: 0.,
772 border_color: Default::default(),
773 corner_radius: 0.,
774 });
775 } else {
776 log::warn!("could not render glyph with id {}", image_glyph.id);
777 }
778 }
779
780 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
781 command_encoder.set_vertex_buffer(
782 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
783 Some(&self.unit_vertices),
784 0,
785 );
786 command_encoder.set_vertex_bytes(
787 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
788 mem::size_of::<shaders::vector_float2>() as u64,
789 [drawable_size.to_float2()].as_ptr() as *const c_void,
790 );
791
792 for (atlas_id, images) in images_by_atlas {
793 align_offset(offset);
794 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
795 assert!(
796 next_offset <= INSTANCE_BUFFER_SIZE,
797 "instance buffer exhausted"
798 );
799
800 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
801 command_encoder.set_vertex_buffer(
802 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
803 Some(&self.instances),
804 *offset as u64,
805 );
806 command_encoder.set_vertex_bytes(
807 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
808 mem::size_of::<shaders::vector_float2>() as u64,
809 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
810 as *const c_void,
811 );
812 command_encoder.set_fragment_texture(
813 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
814 Some(texture),
815 );
816
817 unsafe {
818 let buffer_contents =
819 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
820 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
821 }
822
823 command_encoder.draw_primitives_instanced(
824 metal::MTLPrimitiveType::Triangle,
825 0,
826 6,
827 images.len() as u64,
828 );
829 *offset = next_offset;
830 }
831 }
832
833 fn render_surfaces(
834 &mut self,
835 surfaces: &[Surface],
836 scale_factor: f32,
837 offset: &mut usize,
838 drawable_size: Vector2F,
839 command_encoder: &metal::RenderCommandEncoderRef,
840 ) {
841 if surfaces.is_empty() {
842 return;
843 }
844
845 command_encoder.set_render_pipeline_state(&self.surface_pipeline_state);
846 command_encoder.set_vertex_buffer(
847 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexVertices as u64,
848 Some(&self.unit_vertices),
849 0,
850 );
851 command_encoder.set_vertex_bytes(
852 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexViewportSize as u64,
853 mem::size_of::<shaders::vector_float2>() as u64,
854 [drawable_size.to_float2()].as_ptr() as *const c_void,
855 );
856
857 for surface in surfaces {
858 let origin = surface.bounds.origin() * scale_factor;
859 let source_size = vec2i(
860 surface.image_buffer.width() as i32,
861 surface.image_buffer.height() as i32,
862 );
863 let target_size = surface.bounds.size() * scale_factor;
864
865 assert_eq!(
866 surface.image_buffer.pixel_format_type(),
867 core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
868 );
869
870 let y_texture = self
871 .cv_texture_cache
872 .create_texture_from_image(
873 surface.image_buffer.as_concrete_TypeRef(),
874 ptr::null(),
875 MTLPixelFormat::R8Unorm,
876 surface.image_buffer.plane_width(0),
877 surface.image_buffer.plane_height(0),
878 0,
879 )
880 .unwrap();
881 let cb_cr_texture = self
882 .cv_texture_cache
883 .create_texture_from_image(
884 surface.image_buffer.as_concrete_TypeRef(),
885 ptr::null(),
886 MTLPixelFormat::RG8Unorm,
887 surface.image_buffer.plane_width(1),
888 surface.image_buffer.plane_height(1),
889 1,
890 )
891 .unwrap();
892
893 align_offset(offset);
894 let next_offset = *offset + mem::size_of::<shaders::GPUISurface>();
895 assert!(
896 next_offset <= INSTANCE_BUFFER_SIZE,
897 "instance buffer exhausted"
898 );
899
900 command_encoder.set_vertex_buffer(
901 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexSurfaces as u64,
902 Some(&self.instances),
903 *offset as u64,
904 );
905 command_encoder.set_vertex_bytes(
906 shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexAtlasSize as u64,
907 mem::size_of::<shaders::vector_float2>() as u64,
908 [source_size.to_float2()].as_ptr() as *const c_void,
909 );
910 command_encoder.set_fragment_texture(
911 shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexYAtlas as u64,
912 Some(y_texture.as_texture_ref()),
913 );
914 command_encoder.set_fragment_texture(
915 shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexCbCrAtlas
916 as u64,
917 Some(cb_cr_texture.as_texture_ref()),
918 );
919
920 unsafe {
921 let buffer_contents = (self.instances.contents() as *mut u8).add(*offset)
922 as *mut shaders::GPUISurface;
923 std::ptr::write(
924 buffer_contents,
925 shaders::GPUISurface {
926 origin: origin.to_float2(),
927 target_size: target_size.to_float2(),
928 source_size: source_size.to_float2(),
929 },
930 );
931 }
932
933 command_encoder.draw_primitives(metal::MTLPrimitiveType::Triangle, 0, 6);
934 *offset = next_offset;
935 }
936 }
937
938 fn render_path_sprites(
939 &mut self,
940 layer_id: usize,
941 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
942 offset: &mut usize,
943 drawable_size: Vector2F,
944 command_encoder: &metal::RenderCommandEncoderRef,
945 ) {
946 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
947 command_encoder.set_vertex_buffer(
948 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
949 Some(&self.unit_vertices),
950 0,
951 );
952 command_encoder.set_vertex_bytes(
953 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
954 mem::size_of::<shaders::vector_float2>() as u64,
955 [drawable_size.to_float2()].as_ptr() as *const c_void,
956 );
957
958 let mut atlas_id = None;
959 let mut atlas_sprite_count = 0;
960 align_offset(offset);
961
962 while let Some(sprite) = sprites.peek() {
963 if sprite.layer_id != layer_id {
964 break;
965 }
966
967 let sprite = sprites.next().unwrap();
968 if let Some(atlas_id) = atlas_id.as_mut() {
969 if sprite.atlas_id != *atlas_id {
970 self.render_path_sprites_for_atlas(
971 offset,
972 *atlas_id,
973 atlas_sprite_count,
974 command_encoder,
975 );
976
977 *atlas_id = sprite.atlas_id;
978 atlas_sprite_count = 0;
979 align_offset(offset);
980 }
981 } else {
982 atlas_id = Some(sprite.atlas_id);
983 }
984
985 unsafe {
986 let buffer_contents =
987 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
988 *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
989 }
990
991 atlas_sprite_count += 1;
992 }
993
994 if let Some(atlas_id) = atlas_id {
995 self.render_path_sprites_for_atlas(
996 offset,
997 atlas_id,
998 atlas_sprite_count,
999 command_encoder,
1000 );
1001 }
1002 }
1003
1004 fn render_path_sprites_for_atlas(
1005 &mut self,
1006 offset: &mut usize,
1007 atlas_id: usize,
1008 sprite_count: usize,
1009 command_encoder: &metal::RenderCommandEncoderRef,
1010 ) {
1011 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
1012 assert!(
1013 next_offset <= INSTANCE_BUFFER_SIZE,
1014 "instance buffer exhausted"
1015 );
1016 command_encoder.set_vertex_buffer(
1017 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
1018 Some(&self.instances),
1019 *offset as u64,
1020 );
1021 let texture = self.path_atlases.texture(atlas_id).unwrap();
1022 command_encoder.set_fragment_texture(
1023 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
1024 Some(texture),
1025 );
1026 command_encoder.set_vertex_bytes(
1027 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
1028 mem::size_of::<shaders::vector_float2>() as u64,
1029 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
1030 as *const c_void,
1031 );
1032
1033 command_encoder.draw_primitives_instanced(
1034 metal::MTLPrimitiveType::Triangle,
1035 0,
1036 6,
1037 sprite_count as u64,
1038 );
1039 *offset = next_offset;
1040 }
1041
1042 fn render_underlines(
1043 &mut self,
1044 underlines: &[Underline],
1045 scale_factor: f32,
1046 offset: &mut usize,
1047 drawable_size: Vector2F,
1048 command_encoder: &metal::RenderCommandEncoderRef,
1049 ) {
1050 if underlines.is_empty() {
1051 return;
1052 }
1053 align_offset(offset);
1054 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
1055 assert!(
1056 next_offset <= INSTANCE_BUFFER_SIZE,
1057 "instance buffer exhausted"
1058 );
1059
1060 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
1061 command_encoder.set_vertex_buffer(
1062 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
1063 Some(&self.unit_vertices),
1064 0,
1065 );
1066 command_encoder.set_vertex_buffer(
1067 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
1068 Some(&self.instances),
1069 *offset as u64,
1070 );
1071 command_encoder.set_vertex_bytes(
1072 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
1073 mem::size_of::<shaders::GPUIUniforms>() as u64,
1074 [shaders::GPUIUniforms {
1075 viewport_size: drawable_size.to_float2(),
1076 }]
1077 .as_ptr() as *const c_void,
1078 );
1079
1080 let buffer_contents = unsafe {
1081 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
1082 };
1083 for (ix, underline) in underlines.iter().enumerate() {
1084 let origin = underline.origin * scale_factor;
1085 let mut height = underline.thickness;
1086 if underline.squiggly {
1087 height *= 3.;
1088 }
1089 let size = vec2f(underline.width, height) * scale_factor;
1090 let shader_underline = shaders::GPUIUnderline {
1091 origin: origin.round().to_float2(),
1092 size: size.round().to_float2(),
1093 thickness: underline.thickness * scale_factor,
1094 color: underline.color.to_uchar4(),
1095 squiggly: underline.squiggly as u8,
1096 };
1097 unsafe {
1098 *(buffer_contents.add(ix)) = shader_underline;
1099 }
1100 }
1101
1102 command_encoder.draw_primitives_instanced(
1103 metal::MTLPrimitiveType::Triangle,
1104 0,
1105 6,
1106 underlines.len() as u64,
1107 );
1108 *offset = next_offset;
1109 }
1110}
1111
1112fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
1113 let texture_descriptor = metal::TextureDescriptor::new();
1114 texture_descriptor.set_width(2048);
1115 texture_descriptor.set_height(2048);
1116 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
1117 texture_descriptor
1118 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
1119 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
1120 texture_descriptor
1121}
1122
1123fn align_offset(offset: &mut usize) {
1124 let r = *offset % 256;
1125 if r > 0 {
1126 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
1127 }
1128}
1129
1130fn build_pipeline_state(
1131 device: &metal::DeviceRef,
1132 library: &metal::LibraryRef,
1133 label: &str,
1134 vertex_fn_name: &str,
1135 fragment_fn_name: &str,
1136 pixel_format: metal::MTLPixelFormat,
1137) -> metal::RenderPipelineState {
1138 let vertex_fn = library
1139 .get_function(vertex_fn_name, None)
1140 .expect("error locating vertex function");
1141 let fragment_fn = library
1142 .get_function(fragment_fn_name, None)
1143 .expect("error locating fragment function");
1144
1145 let descriptor = metal::RenderPipelineDescriptor::new();
1146 descriptor.set_label(label);
1147 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1148 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1149 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1150 color_attachment.set_pixel_format(pixel_format);
1151 color_attachment.set_blending_enabled(true);
1152 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1153 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1154 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
1155 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1156 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
1157 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1158
1159 device
1160 .new_render_pipeline_state(&descriptor)
1161 .expect("could not create render pipeline state")
1162}
1163
1164fn build_path_atlas_pipeline_state(
1165 device: &metal::DeviceRef,
1166 library: &metal::LibraryRef,
1167 label: &str,
1168 vertex_fn_name: &str,
1169 fragment_fn_name: &str,
1170 pixel_format: metal::MTLPixelFormat,
1171) -> metal::RenderPipelineState {
1172 let vertex_fn = library
1173 .get_function(vertex_fn_name, None)
1174 .expect("error locating vertex function");
1175 let fragment_fn = library
1176 .get_function(fragment_fn_name, None)
1177 .expect("error locating fragment function");
1178
1179 let descriptor = metal::RenderPipelineDescriptor::new();
1180 descriptor.set_label(label);
1181 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1182 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1183 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1184 color_attachment.set_pixel_format(pixel_format);
1185 color_attachment.set_blending_enabled(true);
1186 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1187 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1188 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1189 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1190 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1191 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1192
1193 device
1194 .new_render_pipeline_state(&descriptor)
1195 .expect("could not create render pipeline state")
1196}
1197
1198mod shaders {
1199 #![allow(non_upper_case_globals)]
1200 #![allow(non_camel_case_types)]
1201 #![allow(non_snake_case)]
1202
1203 use crate::{
1204 color::Color,
1205 geometry::vector::{Vector2F, Vector2I},
1206 };
1207 use std::mem;
1208
1209 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1210
1211 pub trait ToFloat2 {
1212 fn to_float2(&self) -> vector_float2;
1213 }
1214
1215 impl ToFloat2 for (f32, f32) {
1216 fn to_float2(&self) -> vector_float2 {
1217 unsafe {
1218 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1219 output <<= 32;
1220 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1221 output
1222 }
1223 }
1224 }
1225
1226 impl ToFloat2 for Vector2F {
1227 fn to_float2(&self) -> vector_float2 {
1228 unsafe {
1229 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1230 output <<= 32;
1231 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1232 output
1233 }
1234 }
1235 }
1236
1237 impl ToFloat2 for Vector2I {
1238 fn to_float2(&self) -> vector_float2 {
1239 self.to_f32().to_float2()
1240 }
1241 }
1242
1243 impl Color {
1244 pub fn to_uchar4(&self) -> vector_uchar4 {
1245 let mut vec = self.a as vector_uchar4;
1246 vec <<= 8;
1247 vec |= self.b as vector_uchar4;
1248 vec <<= 8;
1249 vec |= self.g as vector_uchar4;
1250 vec <<= 8;
1251 vec |= self.r as vector_uchar4;
1252 vec
1253 }
1254 }
1255}