1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::foundation::NSUInteger;
12use core_foundation::base::TCFType;
13use foreign_types::ForeignTypeRef;
14use log::warn;
15use media::core_video::{self, CVMetalTextureCache};
16use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
17use shaders::ToFloat2 as _;
18use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, ptr, sync::Arc, vec};
19
20const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
21const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
22
23pub struct Renderer {
24 sprite_cache: SpriteCache,
25 image_cache: ImageCache,
26 path_atlases: AtlasAllocator,
27 quad_pipeline_state: metal::RenderPipelineState,
28 shadow_pipeline_state: metal::RenderPipelineState,
29 sprite_pipeline_state: metal::RenderPipelineState,
30 image_pipeline_state: metal::RenderPipelineState,
31 path_atlas_pipeline_state: metal::RenderPipelineState,
32 underline_pipeline_state: metal::RenderPipelineState,
33 unit_vertices: metal::Buffer,
34 instances: metal::Buffer,
35 cv_texture_cache: core_video::CVMetalTextureCache,
36}
37
38struct PathSprite {
39 layer_id: usize,
40 atlas_id: usize,
41 shader_data: shaders::GPUISprite,
42}
43
44pub struct Surface {
45 pub bounds: RectF,
46 pub image_buffer: core_video::CVImageBuffer,
47}
48
49impl Renderer {
50 pub fn new(
51 device: metal::Device,
52 pixel_format: metal::MTLPixelFormat,
53 scale_factor: f32,
54 fonts: Arc<dyn platform::FontSystem>,
55 ) -> Self {
56 let library = device
57 .new_library_with_data(SHADERS_METALLIB)
58 .expect("error building metal library");
59
60 let unit_vertices = [
61 (0., 0.).to_float2(),
62 (1., 0.).to_float2(),
63 (0., 1.).to_float2(),
64 (0., 1.).to_float2(),
65 (1., 0.).to_float2(),
66 (1., 1.).to_float2(),
67 ];
68 let unit_vertices = device.new_buffer_with_data(
69 unit_vertices.as_ptr() as *const c_void,
70 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
71 MTLResourceOptions::StorageModeManaged,
72 );
73 let instances = device.new_buffer(
74 INSTANCE_BUFFER_SIZE as u64,
75 MTLResourceOptions::StorageModeManaged,
76 );
77
78 let sprite_cache = SpriteCache::new(
79 device.clone(),
80 vec2i(1024, 768),
81 scale_factor,
82 fonts.clone(),
83 );
84 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), scale_factor, fonts);
85 let path_atlases =
86 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
87 let quad_pipeline_state = build_pipeline_state(
88 &device,
89 &library,
90 "quad",
91 "quad_vertex",
92 "quad_fragment",
93 pixel_format,
94 );
95 let shadow_pipeline_state = build_pipeline_state(
96 &device,
97 &library,
98 "shadow",
99 "shadow_vertex",
100 "shadow_fragment",
101 pixel_format,
102 );
103 let sprite_pipeline_state = build_pipeline_state(
104 &device,
105 &library,
106 "sprite",
107 "sprite_vertex",
108 "sprite_fragment",
109 pixel_format,
110 );
111 let image_pipeline_state = build_pipeline_state(
112 &device,
113 &library,
114 "image",
115 "image_vertex",
116 "image_fragment",
117 pixel_format,
118 );
119 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
120 &device,
121 &library,
122 "path_atlas",
123 "path_atlas_vertex",
124 "path_atlas_fragment",
125 MTLPixelFormat::R16Float,
126 );
127 let underline_pipeline_state = build_pipeline_state(
128 &device,
129 &library,
130 "underline",
131 "underline_vertex",
132 "underline_fragment",
133 pixel_format,
134 );
135 let cv_texture_cache = CVMetalTextureCache::new(device.as_ptr()).unwrap();
136 Self {
137 sprite_cache,
138 image_cache,
139 path_atlases,
140 quad_pipeline_state,
141 shadow_pipeline_state,
142 sprite_pipeline_state,
143 image_pipeline_state,
144 path_atlas_pipeline_state,
145 underline_pipeline_state,
146 unit_vertices,
147 instances,
148 cv_texture_cache,
149 }
150 }
151
152 pub fn render(
153 &mut self,
154 scene: &Scene,
155 drawable_size: Vector2F,
156 command_buffer: &metal::CommandBufferRef,
157 output: &metal::TextureRef,
158 ) {
159 self.sprite_cache.set_scale_factor(scene.scale_factor());
160 self.image_cache.set_scale_factor(scene.scale_factor());
161
162 let mut offset = 0;
163
164 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
165 self.render_layers(
166 scene,
167 path_sprites,
168 &mut offset,
169 drawable_size,
170 command_buffer,
171 output,
172 );
173 self.instances.did_modify_range(NSRange {
174 location: 0,
175 length: offset as NSUInteger,
176 });
177 self.image_cache.finish_frame();
178 }
179
180 fn render_path_atlases(
181 &mut self,
182 scene: &Scene,
183 offset: &mut usize,
184 command_buffer: &metal::CommandBufferRef,
185 ) -> Vec<PathSprite> {
186 self.path_atlases.clear();
187 let mut sprites = Vec::new();
188 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
189 let mut current_atlas_id = None;
190 for (layer_id, layer) in scene.layers().enumerate() {
191 for path in layer.paths() {
192 let origin = path.bounds.origin() * scene.scale_factor();
193 let size = (path.bounds.size() * scene.scale_factor()).ceil();
194
195 let path_allocation = self.path_atlases.allocate(size.to_i32());
196 if path_allocation.is_none() {
197 // Path size was likely zero.
198 warn!("could not allocate path texture of size {:?}", size);
199 continue;
200 }
201 let (alloc_id, atlas_origin) = path_allocation.unwrap();
202 let atlas_origin = atlas_origin.to_f32();
203 sprites.push(PathSprite {
204 layer_id,
205 atlas_id: alloc_id.atlas_id,
206 shader_data: shaders::GPUISprite {
207 origin: origin.floor().to_float2(),
208 target_size: size.to_float2(),
209 source_size: size.to_float2(),
210 atlas_origin: atlas_origin.to_float2(),
211 color: path.color.to_uchar4(),
212 compute_winding: 1,
213 },
214 });
215
216 if let Some(current_atlas_id) = current_atlas_id {
217 if alloc_id.atlas_id != current_atlas_id {
218 self.render_paths_to_atlas(
219 offset,
220 &vertices,
221 current_atlas_id,
222 command_buffer,
223 );
224 vertices.clear();
225 }
226 }
227
228 current_atlas_id = Some(alloc_id.atlas_id);
229
230 for vertex in &path.vertices {
231 let xy_position =
232 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
233 vertices.push(shaders::GPUIPathVertex {
234 xy_position: (atlas_origin + xy_position).to_float2(),
235 st_position: vertex.st_position.to_float2(),
236 clip_rect_origin: atlas_origin.to_float2(),
237 clip_rect_size: size.to_float2(),
238 });
239 }
240 }
241 }
242
243 if let Some(atlas_id) = current_atlas_id {
244 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
245 }
246
247 sprites
248 }
249
250 fn render_paths_to_atlas(
251 &mut self,
252 offset: &mut usize,
253 vertices: &[shaders::GPUIPathVertex],
254 atlas_id: usize,
255 command_buffer: &metal::CommandBufferRef,
256 ) {
257 align_offset(offset);
258 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
259 assert!(
260 next_offset <= INSTANCE_BUFFER_SIZE,
261 "instance buffer exhausted"
262 );
263
264 let render_pass_descriptor = metal::RenderPassDescriptor::new();
265 let color_attachment = render_pass_descriptor
266 .color_attachments()
267 .object_at(0)
268 .unwrap();
269 let texture = self.path_atlases.texture(atlas_id).unwrap();
270 color_attachment.set_texture(Some(texture));
271 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
272 color_attachment.set_store_action(metal::MTLStoreAction::Store);
273 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
274
275 let path_atlas_command_encoder =
276 command_buffer.new_render_command_encoder(render_pass_descriptor);
277 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
278 path_atlas_command_encoder.set_vertex_buffer(
279 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
280 Some(&self.instances),
281 *offset as u64,
282 );
283 path_atlas_command_encoder.set_vertex_bytes(
284 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
285 mem::size_of::<shaders::vector_float2>() as u64,
286 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
287 as *const c_void,
288 );
289
290 let buffer_contents = unsafe {
291 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
292 };
293
294 for (ix, vertex) in vertices.iter().enumerate() {
295 unsafe {
296 *buffer_contents.add(ix) = *vertex;
297 }
298 }
299
300 path_atlas_command_encoder.draw_primitives(
301 metal::MTLPrimitiveType::Triangle,
302 0,
303 vertices.len() as u64,
304 );
305 path_atlas_command_encoder.end_encoding();
306 *offset = next_offset;
307 }
308
309 fn render_layers(
310 &mut self,
311 scene: &Scene,
312 path_sprites: Vec<PathSprite>,
313 offset: &mut usize,
314 drawable_size: Vector2F,
315 command_buffer: &metal::CommandBufferRef,
316 output: &metal::TextureRef,
317 ) {
318 let render_pass_descriptor = metal::RenderPassDescriptor::new();
319 let color_attachment = render_pass_descriptor
320 .color_attachments()
321 .object_at(0)
322 .unwrap();
323 color_attachment.set_texture(Some(output));
324 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
325 color_attachment.set_store_action(metal::MTLStoreAction::Store);
326 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
327 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
328
329 command_encoder.set_viewport(metal::MTLViewport {
330 originX: 0.0,
331 originY: 0.0,
332 width: drawable_size.x() as f64,
333 height: drawable_size.y() as f64,
334 znear: 0.0,
335 zfar: 1.0,
336 });
337
338 let scale_factor = scene.scale_factor();
339 let mut path_sprites = path_sprites.into_iter().peekable();
340 for (layer_id, layer) in scene.layers().enumerate() {
341 self.clip(scene, layer, drawable_size, command_encoder);
342 self.render_shadows(
343 layer.shadows(),
344 scale_factor,
345 offset,
346 drawable_size,
347 command_encoder,
348 );
349 self.render_quads(
350 layer.quads(),
351 scale_factor,
352 offset,
353 drawable_size,
354 command_encoder,
355 );
356 self.render_path_sprites(
357 layer_id,
358 &mut path_sprites,
359 offset,
360 drawable_size,
361 command_encoder,
362 );
363 self.render_underlines(
364 layer.underlines(),
365 scale_factor,
366 offset,
367 drawable_size,
368 command_encoder,
369 );
370 self.render_sprites(
371 layer.glyphs(),
372 layer.icons(),
373 scale_factor,
374 offset,
375 drawable_size,
376 command_encoder,
377 );
378 self.render_images(
379 layer.images(),
380 layer.image_glyphs(),
381 scale_factor,
382 offset,
383 drawable_size,
384 command_encoder,
385 );
386 self.render_surfaces(
387 layer.surfaces(),
388 scale_factor,
389 offset,
390 drawable_size,
391 command_encoder,
392 );
393 }
394
395 command_encoder.end_encoding();
396 }
397
398 fn clip(
399 &mut self,
400 scene: &Scene,
401 layer: &Layer,
402 drawable_size: Vector2F,
403 command_encoder: &metal::RenderCommandEncoderRef,
404 ) {
405 let clip_bounds = (layer
406 .clip_bounds()
407 .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
408 * scene.scale_factor())
409 .round();
410 command_encoder.set_scissor_rect(metal::MTLScissorRect {
411 x: clip_bounds.origin_x() as NSUInteger,
412 y: clip_bounds.origin_y() as NSUInteger,
413 width: clip_bounds.width() as NSUInteger,
414 height: clip_bounds.height() as NSUInteger,
415 });
416 }
417
418 fn render_shadows(
419 &mut self,
420 shadows: &[Shadow],
421 scale_factor: f32,
422 offset: &mut usize,
423 drawable_size: Vector2F,
424 command_encoder: &metal::RenderCommandEncoderRef,
425 ) {
426 if shadows.is_empty() {
427 return;
428 }
429
430 align_offset(offset);
431 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
432 assert!(
433 next_offset <= INSTANCE_BUFFER_SIZE,
434 "instance buffer exhausted"
435 );
436
437 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
438 command_encoder.set_vertex_buffer(
439 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
440 Some(&self.unit_vertices),
441 0,
442 );
443 command_encoder.set_vertex_buffer(
444 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
445 Some(&self.instances),
446 *offset as u64,
447 );
448 command_encoder.set_vertex_bytes(
449 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
450 mem::size_of::<shaders::GPUIUniforms>() as u64,
451 [shaders::GPUIUniforms {
452 viewport_size: drawable_size.to_float2(),
453 }]
454 .as_ptr() as *const c_void,
455 );
456
457 let buffer_contents = unsafe {
458 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
459 };
460 for (ix, shadow) in shadows.iter().enumerate() {
461 let shape_bounds = shadow.bounds * scale_factor;
462 let shader_shadow = shaders::GPUIShadow {
463 origin: shape_bounds.origin().to_float2(),
464 size: shape_bounds.size().to_float2(),
465 corner_radius: shadow.corner_radius * scale_factor,
466 sigma: shadow.sigma,
467 color: shadow.color.to_uchar4(),
468 };
469 unsafe {
470 *(buffer_contents.add(ix)) = shader_shadow;
471 }
472 }
473
474 command_encoder.draw_primitives_instanced(
475 metal::MTLPrimitiveType::Triangle,
476 0,
477 6,
478 shadows.len() as u64,
479 );
480 *offset = next_offset;
481 }
482
483 fn render_quads(
484 &mut self,
485 quads: &[Quad],
486 scale_factor: f32,
487 offset: &mut usize,
488 drawable_size: Vector2F,
489 command_encoder: &metal::RenderCommandEncoderRef,
490 ) {
491 if quads.is_empty() {
492 return;
493 }
494 align_offset(offset);
495 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
496 assert!(
497 next_offset <= INSTANCE_BUFFER_SIZE,
498 "instance buffer exhausted"
499 );
500
501 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
502 command_encoder.set_vertex_buffer(
503 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
504 Some(&self.unit_vertices),
505 0,
506 );
507 command_encoder.set_vertex_buffer(
508 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
509 Some(&self.instances),
510 *offset as u64,
511 );
512 command_encoder.set_vertex_bytes(
513 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
514 mem::size_of::<shaders::GPUIUniforms>() as u64,
515 [shaders::GPUIUniforms {
516 viewport_size: drawable_size.to_float2(),
517 }]
518 .as_ptr() as *const c_void,
519 );
520
521 let buffer_contents = unsafe {
522 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
523 };
524 for (ix, quad) in quads.iter().enumerate() {
525 let bounds = quad.bounds * scale_factor;
526 let border_width = quad.border.width * scale_factor;
527 let shader_quad = shaders::GPUIQuad {
528 origin: bounds.origin().round().to_float2(),
529 size: bounds.size().round().to_float2(),
530 background_color: quad
531 .background
532 .unwrap_or_else(Color::transparent_black)
533 .to_uchar4(),
534 border_top: border_width * (quad.border.top as usize as f32),
535 border_right: border_width * (quad.border.right as usize as f32),
536 border_bottom: border_width * (quad.border.bottom as usize as f32),
537 border_left: border_width * (quad.border.left as usize as f32),
538 border_color: quad.border.color.to_uchar4(),
539 corner_radius: quad.corner_radius * scale_factor,
540 };
541 unsafe {
542 *(buffer_contents.add(ix)) = shader_quad;
543 }
544 }
545
546 command_encoder.draw_primitives_instanced(
547 metal::MTLPrimitiveType::Triangle,
548 0,
549 6,
550 quads.len() as u64,
551 );
552 *offset = next_offset;
553 }
554
555 fn render_sprites(
556 &mut self,
557 glyphs: &[Glyph],
558 icons: &[Icon],
559 scale_factor: f32,
560 offset: &mut usize,
561 drawable_size: Vector2F,
562 command_encoder: &metal::RenderCommandEncoderRef,
563 ) {
564 if glyphs.is_empty() && icons.is_empty() {
565 return;
566 }
567
568 let mut sprites_by_atlas = HashMap::new();
569
570 for glyph in glyphs {
571 if let Some(sprite) = self.sprite_cache.render_glyph(
572 glyph.font_id,
573 glyph.font_size,
574 glyph.id,
575 glyph.origin,
576 ) {
577 // Snap sprite to pixel grid.
578 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
579 sprites_by_atlas
580 .entry(sprite.atlas_id)
581 .or_insert_with(Vec::new)
582 .push(shaders::GPUISprite {
583 origin: origin.to_float2(),
584 target_size: sprite.size.to_float2(),
585 source_size: sprite.size.to_float2(),
586 atlas_origin: sprite.atlas_origin.to_float2(),
587 color: glyph.color.to_uchar4(),
588 compute_winding: 0,
589 });
590 }
591 }
592
593 for icon in icons {
594 // Snap sprite to pixel grid.
595 let origin = (icon.bounds.origin() * scale_factor).floor();
596 let target_size = (icon.bounds.size() * scale_factor).ceil();
597 let source_size = (target_size * 2.).to_i32();
598
599 let sprite =
600 self.sprite_cache
601 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
602 if sprite.is_none() {
603 continue;
604 }
605 let sprite = sprite.unwrap();
606
607 sprites_by_atlas
608 .entry(sprite.atlas_id)
609 .or_insert_with(Vec::new)
610 .push(shaders::GPUISprite {
611 origin: origin.to_float2(),
612 target_size: target_size.to_float2(),
613 source_size: sprite.size.to_float2(),
614 atlas_origin: sprite.atlas_origin.to_float2(),
615 color: icon.color.to_uchar4(),
616 compute_winding: 0,
617 });
618 }
619
620 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
621 command_encoder.set_vertex_buffer(
622 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
623 Some(&self.unit_vertices),
624 0,
625 );
626 command_encoder.set_vertex_bytes(
627 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
628 mem::size_of::<shaders::vector_float2>() as u64,
629 [drawable_size.to_float2()].as_ptr() as *const c_void,
630 );
631
632 for (atlas_id, sprites) in sprites_by_atlas {
633 align_offset(offset);
634 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
635 assert!(
636 next_offset <= INSTANCE_BUFFER_SIZE,
637 "instance buffer exhausted"
638 );
639
640 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
641 command_encoder.set_vertex_buffer(
642 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
643 Some(&self.instances),
644 *offset as u64,
645 );
646 command_encoder.set_vertex_bytes(
647 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
648 mem::size_of::<shaders::vector_float2>() as u64,
649 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
650 as *const c_void,
651 );
652
653 command_encoder.set_fragment_texture(
654 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
655 Some(texture),
656 );
657
658 unsafe {
659 let buffer_contents =
660 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
661 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
662 }
663
664 command_encoder.draw_primitives_instanced(
665 metal::MTLPrimitiveType::Triangle,
666 0,
667 6,
668 sprites.len() as u64,
669 );
670 *offset = next_offset;
671 }
672 }
673
674 fn render_images(
675 &mut self,
676 images: &[Image],
677 image_glyphs: &[ImageGlyph],
678 scale_factor: f32,
679 offset: &mut usize,
680 drawable_size: Vector2F,
681 command_encoder: &metal::RenderCommandEncoderRef,
682 ) {
683 if images.is_empty() && image_glyphs.is_empty() {
684 return;
685 }
686
687 let mut images_by_atlas = HashMap::new();
688 for image in images {
689 let origin = image.bounds.origin() * scale_factor;
690 let target_size = image.bounds.size() * scale_factor;
691 let corner_radius = image.corner_radius * scale_factor;
692 let border_width = image.border.width * scale_factor;
693 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
694 images_by_atlas
695 .entry(alloc_id.atlas_id)
696 .or_insert_with(Vec::new)
697 .push(shaders::GPUIImage {
698 origin: origin.to_float2(),
699 target_size: target_size.to_float2(),
700 source_size: atlas_bounds.size().to_float2(),
701 atlas_origin: atlas_bounds.origin().to_float2(),
702 border_top: border_width * (image.border.top as usize as f32),
703 border_right: border_width * (image.border.right as usize as f32),
704 border_bottom: border_width * (image.border.bottom as usize as f32),
705 border_left: border_width * (image.border.left as usize as f32),
706 border_color: image.border.color.to_uchar4(),
707 corner_radius,
708 });
709 }
710
711 for image_glyph in image_glyphs {
712 let origin = (image_glyph.origin * scale_factor).floor();
713 if let Some((alloc_id, atlas_bounds, glyph_origin)) =
714 self.image_cache.render_glyph(image_glyph)
715 {
716 images_by_atlas
717 .entry(alloc_id.atlas_id)
718 .or_insert_with(Vec::new)
719 .push(shaders::GPUIImage {
720 origin: (origin + glyph_origin.to_f32()).to_float2(),
721 target_size: atlas_bounds.size().to_float2(),
722 source_size: atlas_bounds.size().to_float2(),
723 atlas_origin: atlas_bounds.origin().to_float2(),
724 border_top: 0.,
725 border_right: 0.,
726 border_bottom: 0.,
727 border_left: 0.,
728 border_color: Default::default(),
729 corner_radius: 0.,
730 });
731 } else {
732 log::warn!("could not render glyph with id {}", image_glyph.id);
733 }
734 }
735
736 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
737 command_encoder.set_vertex_buffer(
738 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
739 Some(&self.unit_vertices),
740 0,
741 );
742 command_encoder.set_vertex_bytes(
743 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
744 mem::size_of::<shaders::vector_float2>() as u64,
745 [drawable_size.to_float2()].as_ptr() as *const c_void,
746 );
747
748 for (atlas_id, images) in images_by_atlas {
749 align_offset(offset);
750 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
751 assert!(
752 next_offset <= INSTANCE_BUFFER_SIZE,
753 "instance buffer exhausted"
754 );
755
756 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
757 command_encoder.set_vertex_buffer(
758 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
759 Some(&self.instances),
760 *offset as u64,
761 );
762 command_encoder.set_vertex_bytes(
763 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
764 mem::size_of::<shaders::vector_float2>() as u64,
765 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
766 as *const c_void,
767 );
768 command_encoder.set_fragment_texture(
769 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
770 Some(texture),
771 );
772
773 unsafe {
774 let buffer_contents =
775 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
776 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
777 }
778
779 command_encoder.draw_primitives_instanced(
780 metal::MTLPrimitiveType::Triangle,
781 0,
782 6,
783 images.len() as u64,
784 );
785 *offset = next_offset;
786 }
787 }
788
789 fn render_surfaces(
790 &mut self,
791 surfaces: &[Surface],
792 scale_factor: f32,
793 offset: &mut usize,
794 drawable_size: Vector2F,
795 command_encoder: &metal::RenderCommandEncoderRef,
796 ) {
797 if surfaces.is_empty() {
798 return;
799 }
800
801 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
802 command_encoder.set_vertex_buffer(
803 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
804 Some(&self.unit_vertices),
805 0,
806 );
807 command_encoder.set_vertex_bytes(
808 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
809 mem::size_of::<shaders::vector_float2>() as u64,
810 [drawable_size.to_float2()].as_ptr() as *const c_void,
811 );
812
813 for surface in surfaces {
814 let origin = surface.bounds.origin() * scale_factor;
815 let source_size = vec2i(
816 surface.image_buffer.width() as i32,
817 surface.image_buffer.height() as i32,
818 );
819 let target_size = surface.bounds.size() * scale_factor;
820 let pixel_format = if surface.image_buffer.pixel_format_type()
821 == core_video::kCVPixelFormatType_32BGRA
822 {
823 MTLPixelFormat::BGRA8Unorm
824 } else {
825 panic!("unsupported pixel format")
826 };
827
828 let texture = self
829 .cv_texture_cache
830 .create_texture_from_image(
831 surface.image_buffer.as_concrete_TypeRef(),
832 ptr::null(),
833 pixel_format,
834 source_size.x() as usize,
835 source_size.y() as usize,
836 0,
837 )
838 .unwrap();
839
840 align_offset(offset);
841 let next_offset = *offset + mem::size_of::<shaders::GPUIImage>();
842 assert!(
843 next_offset <= INSTANCE_BUFFER_SIZE,
844 "instance buffer exhausted"
845 );
846
847 command_encoder.set_vertex_buffer(
848 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
849 Some(&self.instances),
850 *offset as u64,
851 );
852 command_encoder.set_vertex_bytes(
853 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
854 mem::size_of::<shaders::vector_float2>() as u64,
855 [source_size.to_float2()].as_ptr() as *const c_void,
856 );
857 command_encoder.set_fragment_texture(
858 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
859 Some(texture.as_texture_ref()),
860 );
861
862 unsafe {
863 let buffer_contents =
864 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
865 std::ptr::write(
866 buffer_contents,
867 shaders::GPUIImage {
868 origin: origin.to_float2(),
869 target_size: target_size.to_float2(),
870 source_size: source_size.to_float2(),
871 atlas_origin: Default::default(),
872 border_top: Default::default(),
873 border_right: Default::default(),
874 border_bottom: Default::default(),
875 border_left: Default::default(),
876 border_color: Default::default(),
877 corner_radius: Default::default(),
878 },
879 );
880 }
881
882 command_encoder.draw_primitives(metal::MTLPrimitiveType::Triangle, 0, 6);
883 *offset = next_offset;
884 }
885 }
886
887 fn render_path_sprites(
888 &mut self,
889 layer_id: usize,
890 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
891 offset: &mut usize,
892 drawable_size: Vector2F,
893 command_encoder: &metal::RenderCommandEncoderRef,
894 ) {
895 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
896 command_encoder.set_vertex_buffer(
897 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
898 Some(&self.unit_vertices),
899 0,
900 );
901 command_encoder.set_vertex_bytes(
902 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
903 mem::size_of::<shaders::vector_float2>() as u64,
904 [drawable_size.to_float2()].as_ptr() as *const c_void,
905 );
906
907 let mut atlas_id = None;
908 let mut atlas_sprite_count = 0;
909 align_offset(offset);
910
911 while let Some(sprite) = sprites.peek() {
912 if sprite.layer_id != layer_id {
913 break;
914 }
915
916 let sprite = sprites.next().unwrap();
917 if let Some(atlas_id) = atlas_id.as_mut() {
918 if sprite.atlas_id != *atlas_id {
919 self.render_path_sprites_for_atlas(
920 offset,
921 *atlas_id,
922 atlas_sprite_count,
923 command_encoder,
924 );
925
926 *atlas_id = sprite.atlas_id;
927 atlas_sprite_count = 0;
928 align_offset(offset);
929 }
930 } else {
931 atlas_id = Some(sprite.atlas_id);
932 }
933
934 unsafe {
935 let buffer_contents =
936 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
937 *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
938 }
939
940 atlas_sprite_count += 1;
941 }
942
943 if let Some(atlas_id) = atlas_id {
944 self.render_path_sprites_for_atlas(
945 offset,
946 atlas_id,
947 atlas_sprite_count,
948 command_encoder,
949 );
950 }
951 }
952
953 fn render_path_sprites_for_atlas(
954 &mut self,
955 offset: &mut usize,
956 atlas_id: usize,
957 sprite_count: usize,
958 command_encoder: &metal::RenderCommandEncoderRef,
959 ) {
960 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
961 assert!(
962 next_offset <= INSTANCE_BUFFER_SIZE,
963 "instance buffer exhausted"
964 );
965 command_encoder.set_vertex_buffer(
966 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
967 Some(&self.instances),
968 *offset as u64,
969 );
970 let texture = self.path_atlases.texture(atlas_id).unwrap();
971 command_encoder.set_fragment_texture(
972 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
973 Some(texture),
974 );
975 command_encoder.set_vertex_bytes(
976 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
977 mem::size_of::<shaders::vector_float2>() as u64,
978 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
979 as *const c_void,
980 );
981
982 command_encoder.draw_primitives_instanced(
983 metal::MTLPrimitiveType::Triangle,
984 0,
985 6,
986 sprite_count as u64,
987 );
988 *offset = next_offset;
989 }
990
991 fn render_underlines(
992 &mut self,
993 underlines: &[Underline],
994 scale_factor: f32,
995 offset: &mut usize,
996 drawable_size: Vector2F,
997 command_encoder: &metal::RenderCommandEncoderRef,
998 ) {
999 if underlines.is_empty() {
1000 return;
1001 }
1002 align_offset(offset);
1003 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
1004 assert!(
1005 next_offset <= INSTANCE_BUFFER_SIZE,
1006 "instance buffer exhausted"
1007 );
1008
1009 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
1010 command_encoder.set_vertex_buffer(
1011 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
1012 Some(&self.unit_vertices),
1013 0,
1014 );
1015 command_encoder.set_vertex_buffer(
1016 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
1017 Some(&self.instances),
1018 *offset as u64,
1019 );
1020 command_encoder.set_vertex_bytes(
1021 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
1022 mem::size_of::<shaders::GPUIUniforms>() as u64,
1023 [shaders::GPUIUniforms {
1024 viewport_size: drawable_size.to_float2(),
1025 }]
1026 .as_ptr() as *const c_void,
1027 );
1028
1029 let buffer_contents = unsafe {
1030 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
1031 };
1032 for (ix, underline) in underlines.iter().enumerate() {
1033 let origin = underline.origin * scale_factor;
1034 let mut height = underline.thickness;
1035 if underline.squiggly {
1036 height *= 3.;
1037 }
1038 let size = vec2f(underline.width, height) * scale_factor;
1039 let shader_underline = shaders::GPUIUnderline {
1040 origin: origin.round().to_float2(),
1041 size: size.round().to_float2(),
1042 thickness: underline.thickness * scale_factor,
1043 color: underline.color.to_uchar4(),
1044 squiggly: underline.squiggly as u8,
1045 };
1046 unsafe {
1047 *(buffer_contents.add(ix)) = shader_underline;
1048 }
1049 }
1050
1051 command_encoder.draw_primitives_instanced(
1052 metal::MTLPrimitiveType::Triangle,
1053 0,
1054 6,
1055 underlines.len() as u64,
1056 );
1057 *offset = next_offset;
1058 }
1059}
1060
1061fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
1062 let texture_descriptor = metal::TextureDescriptor::new();
1063 texture_descriptor.set_width(2048);
1064 texture_descriptor.set_height(2048);
1065 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
1066 texture_descriptor
1067 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
1068 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
1069 texture_descriptor
1070}
1071
1072fn align_offset(offset: &mut usize) {
1073 let r = *offset % 256;
1074 if r > 0 {
1075 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
1076 }
1077}
1078
1079fn build_pipeline_state(
1080 device: &metal::DeviceRef,
1081 library: &metal::LibraryRef,
1082 label: &str,
1083 vertex_fn_name: &str,
1084 fragment_fn_name: &str,
1085 pixel_format: metal::MTLPixelFormat,
1086) -> metal::RenderPipelineState {
1087 let vertex_fn = library
1088 .get_function(vertex_fn_name, None)
1089 .expect("error locating vertex function");
1090 let fragment_fn = library
1091 .get_function(fragment_fn_name, None)
1092 .expect("error locating fragment function");
1093
1094 let descriptor = metal::RenderPipelineDescriptor::new();
1095 descriptor.set_label(label);
1096 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1097 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1098 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1099 color_attachment.set_pixel_format(pixel_format);
1100 color_attachment.set_blending_enabled(true);
1101 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1102 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1103 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
1104 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1105 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
1106 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1107
1108 device
1109 .new_render_pipeline_state(&descriptor)
1110 .expect("could not create render pipeline state")
1111}
1112
1113fn build_path_atlas_pipeline_state(
1114 device: &metal::DeviceRef,
1115 library: &metal::LibraryRef,
1116 label: &str,
1117 vertex_fn_name: &str,
1118 fragment_fn_name: &str,
1119 pixel_format: metal::MTLPixelFormat,
1120) -> metal::RenderPipelineState {
1121 let vertex_fn = library
1122 .get_function(vertex_fn_name, None)
1123 .expect("error locating vertex function");
1124 let fragment_fn = library
1125 .get_function(fragment_fn_name, None)
1126 .expect("error locating fragment function");
1127
1128 let descriptor = metal::RenderPipelineDescriptor::new();
1129 descriptor.set_label(label);
1130 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1131 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1132 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1133 color_attachment.set_pixel_format(pixel_format);
1134 color_attachment.set_blending_enabled(true);
1135 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1136 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1137 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1138 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1139 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1140 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1141
1142 device
1143 .new_render_pipeline_state(&descriptor)
1144 .expect("could not create render pipeline state")
1145}
1146
1147mod shaders {
1148 #![allow(non_upper_case_globals)]
1149 #![allow(non_camel_case_types)]
1150 #![allow(non_snake_case)]
1151
1152 use crate::{
1153 color::Color,
1154 geometry::vector::{Vector2F, Vector2I},
1155 };
1156 use std::mem;
1157
1158 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1159
1160 pub trait ToFloat2 {
1161 fn to_float2(&self) -> vector_float2;
1162 }
1163
1164 impl ToFloat2 for (f32, f32) {
1165 fn to_float2(&self) -> vector_float2 {
1166 unsafe {
1167 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1168 output <<= 32;
1169 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1170 output
1171 }
1172 }
1173 }
1174
1175 impl ToFloat2 for Vector2F {
1176 fn to_float2(&self) -> vector_float2 {
1177 unsafe {
1178 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1179 output <<= 32;
1180 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1181 output
1182 }
1183 }
1184 }
1185
1186 impl ToFloat2 for Vector2I {
1187 fn to_float2(&self) -> vector_float2 {
1188 self.to_f32().to_float2()
1189 }
1190 }
1191
1192 impl Color {
1193 pub fn to_uchar4(&self) -> vector_uchar4 {
1194 let mut vec = self.a as vector_uchar4;
1195 vec <<= 8;
1196 vec |= self.b as vector_uchar4;
1197 vec <<= 8;
1198 vec |= self.g as vector_uchar4;
1199 vec <<= 8;
1200 vec |= self.r as vector_uchar4;
1201 vec
1202 }
1203 }
1204}