1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::foundation::NSUInteger;
12use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
13use shaders::ToFloat2 as _;
14use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
15
16const SHADERS_METALLIB: &'static [u8] =
17 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
18const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
19
20pub struct Renderer {
21 sprite_cache: SpriteCache,
22 image_cache: ImageCache,
23 path_atlases: AtlasAllocator,
24 quad_pipeline_state: metal::RenderPipelineState,
25 shadow_pipeline_state: metal::RenderPipelineState,
26 sprite_pipeline_state: metal::RenderPipelineState,
27 image_pipeline_state: metal::RenderPipelineState,
28 path_atlas_pipeline_state: metal::RenderPipelineState,
29 underline_pipeline_state: metal::RenderPipelineState,
30 unit_vertices: metal::Buffer,
31 instances: metal::Buffer,
32}
33
34struct PathSprite {
35 layer_id: usize,
36 atlas_id: usize,
37 shader_data: shaders::GPUISprite,
38}
39
40impl Renderer {
41 pub fn new(
42 device: metal::Device,
43 pixel_format: metal::MTLPixelFormat,
44 scale_factor: f32,
45 fonts: Arc<dyn platform::FontSystem>,
46 ) -> Self {
47 let library = device
48 .new_library_with_data(SHADERS_METALLIB)
49 .expect("error building metal library");
50
51 let unit_vertices = [
52 (0., 0.).to_float2(),
53 (1., 0.).to_float2(),
54 (0., 1.).to_float2(),
55 (0., 1.).to_float2(),
56 (1., 0.).to_float2(),
57 (1., 1.).to_float2(),
58 ];
59 let unit_vertices = device.new_buffer_with_data(
60 unit_vertices.as_ptr() as *const c_void,
61 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
62 MTLResourceOptions::StorageModeManaged,
63 );
64 let instances = device.new_buffer(
65 INSTANCE_BUFFER_SIZE as u64,
66 MTLResourceOptions::StorageModeManaged,
67 );
68
69 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), scale_factor, fonts);
70 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768));
71 let path_atlases =
72 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
73 let quad_pipeline_state = build_pipeline_state(
74 &device,
75 &library,
76 "quad",
77 "quad_vertex",
78 "quad_fragment",
79 pixel_format,
80 );
81 let shadow_pipeline_state = build_pipeline_state(
82 &device,
83 &library,
84 "shadow",
85 "shadow_vertex",
86 "shadow_fragment",
87 pixel_format,
88 );
89 let sprite_pipeline_state = build_pipeline_state(
90 &device,
91 &library,
92 "sprite",
93 "sprite_vertex",
94 "sprite_fragment",
95 pixel_format,
96 );
97 let image_pipeline_state = build_pipeline_state(
98 &device,
99 &library,
100 "image",
101 "image_vertex",
102 "image_fragment",
103 pixel_format,
104 );
105 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
106 &device,
107 &library,
108 "path_atlas",
109 "path_atlas_vertex",
110 "path_atlas_fragment",
111 MTLPixelFormat::R16Float,
112 );
113 let underline_pipeline_state = build_pipeline_state(
114 &device,
115 &library,
116 "underline",
117 "underline_vertex",
118 "underline_fragment",
119 pixel_format,
120 );
121 Self {
122 sprite_cache,
123 image_cache,
124 path_atlases,
125 quad_pipeline_state,
126 shadow_pipeline_state,
127 sprite_pipeline_state,
128 image_pipeline_state,
129 path_atlas_pipeline_state,
130 underline_pipeline_state,
131 unit_vertices,
132 instances,
133 }
134 }
135
136 pub fn render(
137 &mut self,
138 scene: &Scene,
139 drawable_size: Vector2F,
140 command_buffer: &metal::CommandBufferRef,
141 output: &metal::TextureRef,
142 ) {
143 let mut offset = 0;
144
145 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
146 self.render_layers(
147 scene,
148 path_sprites,
149 &mut offset,
150 drawable_size,
151 command_buffer,
152 output,
153 );
154 self.instances.did_modify_range(NSRange {
155 location: 0,
156 length: offset as NSUInteger,
157 });
158 self.image_cache.finish_frame();
159 }
160
161 fn render_path_atlases(
162 &mut self,
163 scene: &Scene,
164 offset: &mut usize,
165 command_buffer: &metal::CommandBufferRef,
166 ) -> Vec<PathSprite> {
167 self.path_atlases.clear();
168 let mut sprites = Vec::new();
169 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
170 let mut current_atlas_id = None;
171 for (layer_id, layer) in scene.layers().enumerate() {
172 for path in layer.paths() {
173 let origin = path.bounds.origin() * scene.scale_factor();
174 let size = (path.bounds.size() * scene.scale_factor()).ceil();
175
176 let path_allocation = self.path_atlases.allocate(size.to_i32());
177 if path_allocation.is_none() {
178 // Path size was likely zero.
179 continue;
180 }
181 let (alloc_id, atlas_origin) = path_allocation.unwrap();
182 let atlas_origin = atlas_origin.to_f32();
183 sprites.push(PathSprite {
184 layer_id,
185 atlas_id: alloc_id.atlas_id,
186 shader_data: shaders::GPUISprite {
187 origin: origin.floor().to_float2(),
188 target_size: size.to_float2(),
189 source_size: size.to_float2(),
190 atlas_origin: atlas_origin.to_float2(),
191 color: path.color.to_uchar4(),
192 compute_winding: 1,
193 },
194 });
195
196 if let Some(current_atlas_id) = current_atlas_id {
197 if alloc_id.atlas_id != current_atlas_id {
198 self.render_paths_to_atlas(
199 offset,
200 &vertices,
201 current_atlas_id,
202 command_buffer,
203 );
204 vertices.clear();
205 }
206 }
207
208 current_atlas_id = Some(alloc_id.atlas_id);
209
210 for vertex in &path.vertices {
211 let xy_position =
212 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
213 vertices.push(shaders::GPUIPathVertex {
214 xy_position: (atlas_origin + xy_position).to_float2(),
215 st_position: vertex.st_position.to_float2(),
216 clip_rect_origin: atlas_origin.to_float2(),
217 clip_rect_size: size.to_float2(),
218 });
219 }
220 }
221 }
222
223 if let Some(atlas_id) = current_atlas_id {
224 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
225 }
226
227 sprites
228 }
229
230 fn render_paths_to_atlas(
231 &mut self,
232 offset: &mut usize,
233 vertices: &[shaders::GPUIPathVertex],
234 atlas_id: usize,
235 command_buffer: &metal::CommandBufferRef,
236 ) {
237 align_offset(offset);
238 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
239 assert!(
240 next_offset <= INSTANCE_BUFFER_SIZE,
241 "instance buffer exhausted"
242 );
243
244 let render_pass_descriptor = metal::RenderPassDescriptor::new();
245 let color_attachment = render_pass_descriptor
246 .color_attachments()
247 .object_at(0)
248 .unwrap();
249 let texture = self.path_atlases.texture(atlas_id).unwrap();
250 color_attachment.set_texture(Some(texture));
251 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
252 color_attachment.set_store_action(metal::MTLStoreAction::Store);
253 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
254
255 let path_atlas_command_encoder =
256 command_buffer.new_render_command_encoder(render_pass_descriptor);
257 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
258 path_atlas_command_encoder.set_vertex_buffer(
259 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
260 Some(&self.instances),
261 *offset as u64,
262 );
263 path_atlas_command_encoder.set_vertex_bytes(
264 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
265 mem::size_of::<shaders::vector_float2>() as u64,
266 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
267 as *const c_void,
268 );
269
270 let buffer_contents = unsafe {
271 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
272 };
273
274 for (ix, vertex) in vertices.iter().enumerate() {
275 unsafe {
276 *buffer_contents.add(ix) = *vertex;
277 }
278 }
279
280 path_atlas_command_encoder.draw_primitives(
281 metal::MTLPrimitiveType::Triangle,
282 0,
283 vertices.len() as u64,
284 );
285 path_atlas_command_encoder.end_encoding();
286 *offset = next_offset;
287 }
288
289 fn render_layers(
290 &mut self,
291 scene: &Scene,
292 path_sprites: Vec<PathSprite>,
293 offset: &mut usize,
294 drawable_size: Vector2F,
295 command_buffer: &metal::CommandBufferRef,
296 output: &metal::TextureRef,
297 ) {
298 let render_pass_descriptor = metal::RenderPassDescriptor::new();
299 let color_attachment = render_pass_descriptor
300 .color_attachments()
301 .object_at(0)
302 .unwrap();
303 color_attachment.set_texture(Some(output));
304 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
305 color_attachment.set_store_action(metal::MTLStoreAction::Store);
306 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
307 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
308
309 command_encoder.set_viewport(metal::MTLViewport {
310 originX: 0.0,
311 originY: 0.0,
312 width: drawable_size.x() as f64,
313 height: drawable_size.y() as f64,
314 znear: 0.0,
315 zfar: 1.0,
316 });
317
318 let scale_factor = scene.scale_factor();
319 let mut path_sprites = path_sprites.into_iter().peekable();
320 for (layer_id, layer) in scene.layers().enumerate() {
321 self.clip(scene, layer, drawable_size, command_encoder);
322 self.render_shadows(
323 layer.shadows(),
324 scale_factor,
325 offset,
326 drawable_size,
327 command_encoder,
328 );
329 self.render_quads(
330 layer.quads(),
331 scale_factor,
332 offset,
333 drawable_size,
334 command_encoder,
335 );
336 self.render_path_sprites(
337 layer_id,
338 &mut path_sprites,
339 offset,
340 drawable_size,
341 command_encoder,
342 );
343 self.render_underlines(
344 layer.underlines(),
345 scale_factor,
346 offset,
347 drawable_size,
348 command_encoder,
349 );
350 self.render_sprites(
351 layer.glyphs(),
352 layer.icons(),
353 scale_factor,
354 offset,
355 drawable_size,
356 command_encoder,
357 );
358 self.render_images(
359 layer.images(),
360 scale_factor,
361 offset,
362 drawable_size,
363 command_encoder,
364 );
365 }
366
367 command_encoder.end_encoding();
368 }
369
370 fn clip(
371 &mut self,
372 scene: &Scene,
373 layer: &Layer,
374 drawable_size: Vector2F,
375 command_encoder: &metal::RenderCommandEncoderRef,
376 ) {
377 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
378 vec2f(0., 0.),
379 drawable_size / scene.scale_factor(),
380 )) * scene.scale_factor())
381 .round();
382 command_encoder.set_scissor_rect(metal::MTLScissorRect {
383 x: clip_bounds.origin_x() as NSUInteger,
384 y: clip_bounds.origin_y() as NSUInteger,
385 width: clip_bounds.width() as NSUInteger,
386 height: clip_bounds.height() as NSUInteger,
387 });
388 }
389
390 fn render_shadows(
391 &mut self,
392 shadows: &[Shadow],
393 scale_factor: f32,
394 offset: &mut usize,
395 drawable_size: Vector2F,
396 command_encoder: &metal::RenderCommandEncoderRef,
397 ) {
398 if shadows.is_empty() {
399 return;
400 }
401
402 align_offset(offset);
403 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
404 assert!(
405 next_offset <= INSTANCE_BUFFER_SIZE,
406 "instance buffer exhausted"
407 );
408
409 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
410 command_encoder.set_vertex_buffer(
411 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
412 Some(&self.unit_vertices),
413 0,
414 );
415 command_encoder.set_vertex_buffer(
416 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
417 Some(&self.instances),
418 *offset as u64,
419 );
420 command_encoder.set_vertex_bytes(
421 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
422 mem::size_of::<shaders::GPUIUniforms>() as u64,
423 [shaders::GPUIUniforms {
424 viewport_size: drawable_size.to_float2(),
425 }]
426 .as_ptr() as *const c_void,
427 );
428
429 let buffer_contents = unsafe {
430 (self.instances.contents() as *mut u8).offset(*offset as isize)
431 as *mut shaders::GPUIShadow
432 };
433 for (ix, shadow) in shadows.iter().enumerate() {
434 let shape_bounds = shadow.bounds * scale_factor;
435 let shader_shadow = shaders::GPUIShadow {
436 origin: shape_bounds.origin().to_float2(),
437 size: shape_bounds.size().to_float2(),
438 corner_radius: shadow.corner_radius * scale_factor,
439 sigma: shadow.sigma,
440 color: shadow.color.to_uchar4(),
441 };
442 unsafe {
443 *(buffer_contents.offset(ix as isize)) = shader_shadow;
444 }
445 }
446
447 command_encoder.draw_primitives_instanced(
448 metal::MTLPrimitiveType::Triangle,
449 0,
450 6,
451 shadows.len() as u64,
452 );
453 *offset = next_offset;
454 }
455
456 fn render_quads(
457 &mut self,
458 quads: &[Quad],
459 scale_factor: f32,
460 offset: &mut usize,
461 drawable_size: Vector2F,
462 command_encoder: &metal::RenderCommandEncoderRef,
463 ) {
464 if quads.is_empty() {
465 return;
466 }
467 align_offset(offset);
468 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
469 assert!(
470 next_offset <= INSTANCE_BUFFER_SIZE,
471 "instance buffer exhausted"
472 );
473
474 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
475 command_encoder.set_vertex_buffer(
476 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
477 Some(&self.unit_vertices),
478 0,
479 );
480 command_encoder.set_vertex_buffer(
481 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
482 Some(&self.instances),
483 *offset as u64,
484 );
485 command_encoder.set_vertex_bytes(
486 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
487 mem::size_of::<shaders::GPUIUniforms>() as u64,
488 [shaders::GPUIUniforms {
489 viewport_size: drawable_size.to_float2(),
490 }]
491 .as_ptr() as *const c_void,
492 );
493
494 let buffer_contents = unsafe {
495 (self.instances.contents() as *mut u8).offset(*offset as isize)
496 as *mut shaders::GPUIQuad
497 };
498 for (ix, quad) in quads.iter().enumerate() {
499 let bounds = quad.bounds * scale_factor;
500 let border_width = quad.border.width * scale_factor;
501 let shader_quad = shaders::GPUIQuad {
502 origin: bounds.origin().round().to_float2(),
503 size: bounds.size().round().to_float2(),
504 background_color: quad
505 .background
506 .unwrap_or(Color::transparent_black())
507 .to_uchar4(),
508 border_top: border_width * (quad.border.top as usize as f32),
509 border_right: border_width * (quad.border.right as usize as f32),
510 border_bottom: border_width * (quad.border.bottom as usize as f32),
511 border_left: border_width * (quad.border.left as usize as f32),
512 border_color: quad.border.color.to_uchar4(),
513 corner_radius: quad.corner_radius * scale_factor,
514 };
515 unsafe {
516 *(buffer_contents.offset(ix as isize)) = shader_quad;
517 }
518 }
519
520 command_encoder.draw_primitives_instanced(
521 metal::MTLPrimitiveType::Triangle,
522 0,
523 6,
524 quads.len() as u64,
525 );
526 *offset = next_offset;
527 }
528
529 fn render_sprites(
530 &mut self,
531 glyphs: &[Glyph],
532 icons: &[Icon],
533 scale_factor: f32,
534 offset: &mut usize,
535 drawable_size: Vector2F,
536 command_encoder: &metal::RenderCommandEncoderRef,
537 ) {
538 if glyphs.is_empty() && icons.is_empty() {
539 return;
540 }
541
542 self.sprite_cache.set_scale_factor(scale_factor);
543
544 let mut sprites_by_atlas = HashMap::new();
545
546 for glyph in glyphs {
547 if let Some(sprite) = self.sprite_cache.render_glyph(
548 glyph.font_id,
549 glyph.font_size,
550 glyph.id,
551 glyph.origin,
552 ) {
553 // Snap sprite to pixel grid.
554 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
555 sprites_by_atlas
556 .entry(sprite.atlas_id)
557 .or_insert_with(Vec::new)
558 .push(shaders::GPUISprite {
559 origin: origin.to_float2(),
560 target_size: sprite.size.to_float2(),
561 source_size: sprite.size.to_float2(),
562 atlas_origin: sprite.atlas_origin.to_float2(),
563 color: glyph.color.to_uchar4(),
564 compute_winding: 0,
565 });
566 }
567 }
568
569 for icon in icons {
570 // Snap sprite to pixel grid.
571 let origin = (icon.bounds.origin() * scale_factor).floor();
572 let target_size = (icon.bounds.size() * scale_factor).ceil();
573 let source_size = (target_size * 2.).to_i32();
574
575 let sprite =
576 self.sprite_cache
577 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
578 if sprite.is_none() {
579 continue;
580 }
581 let sprite = sprite.unwrap();
582
583 sprites_by_atlas
584 .entry(sprite.atlas_id)
585 .or_insert_with(Vec::new)
586 .push(shaders::GPUISprite {
587 origin: origin.to_float2(),
588 target_size: target_size.to_float2(),
589 source_size: sprite.size.to_float2(),
590 atlas_origin: sprite.atlas_origin.to_float2(),
591 color: icon.color.to_uchar4(),
592 compute_winding: 0,
593 });
594 }
595
596 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
597 command_encoder.set_vertex_buffer(
598 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
599 Some(&self.unit_vertices),
600 0,
601 );
602 command_encoder.set_vertex_bytes(
603 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
604 mem::size_of::<shaders::vector_float2>() as u64,
605 [drawable_size.to_float2()].as_ptr() as *const c_void,
606 );
607
608 for (atlas_id, sprites) in sprites_by_atlas {
609 align_offset(offset);
610 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
611 assert!(
612 next_offset <= INSTANCE_BUFFER_SIZE,
613 "instance buffer exhausted"
614 );
615
616 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
617 command_encoder.set_vertex_buffer(
618 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
619 Some(&self.instances),
620 *offset as u64,
621 );
622 command_encoder.set_vertex_bytes(
623 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
624 mem::size_of::<shaders::vector_float2>() as u64,
625 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
626 as *const c_void,
627 );
628
629 command_encoder.set_fragment_texture(
630 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
631 Some(texture),
632 );
633
634 unsafe {
635 let buffer_contents = (self.instances.contents() as *mut u8)
636 .offset(*offset as isize)
637 as *mut shaders::GPUISprite;
638 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
639 }
640
641 command_encoder.draw_primitives_instanced(
642 metal::MTLPrimitiveType::Triangle,
643 0,
644 6,
645 sprites.len() as u64,
646 );
647 *offset = next_offset;
648 }
649 }
650
651 fn render_images(
652 &mut self,
653 images: &[Image],
654 scale_factor: f32,
655 offset: &mut usize,
656 drawable_size: Vector2F,
657 command_encoder: &metal::RenderCommandEncoderRef,
658 ) {
659 if images.is_empty() {
660 return;
661 }
662
663 let mut images_by_atlas = HashMap::new();
664 for image in images {
665 let origin = image.bounds.origin() * scale_factor;
666 let target_size = image.bounds.size() * scale_factor;
667 let corner_radius = image.corner_radius * scale_factor;
668 let border_width = image.border.width * scale_factor;
669 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
670 images_by_atlas
671 .entry(alloc_id.atlas_id)
672 .or_insert_with(Vec::new)
673 .push(shaders::GPUIImage {
674 origin: origin.to_float2(),
675 target_size: target_size.to_float2(),
676 source_size: atlas_bounds.size().to_float2(),
677 atlas_origin: atlas_bounds.origin().to_float2(),
678 border_top: border_width * (image.border.top as usize as f32),
679 border_right: border_width * (image.border.right as usize as f32),
680 border_bottom: border_width * (image.border.bottom as usize as f32),
681 border_left: border_width * (image.border.left as usize as f32),
682 border_color: image.border.color.to_uchar4(),
683 corner_radius,
684 });
685 }
686
687 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
688 command_encoder.set_vertex_buffer(
689 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
690 Some(&self.unit_vertices),
691 0,
692 );
693 command_encoder.set_vertex_bytes(
694 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
695 mem::size_of::<shaders::vector_float2>() as u64,
696 [drawable_size.to_float2()].as_ptr() as *const c_void,
697 );
698
699 for (atlas_id, images) in images_by_atlas {
700 align_offset(offset);
701 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
702 assert!(
703 next_offset <= INSTANCE_BUFFER_SIZE,
704 "instance buffer exhausted"
705 );
706
707 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
708 command_encoder.set_vertex_buffer(
709 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
710 Some(&self.instances),
711 *offset as u64,
712 );
713 command_encoder.set_vertex_bytes(
714 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
715 mem::size_of::<shaders::vector_float2>() as u64,
716 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
717 as *const c_void,
718 );
719 command_encoder.set_fragment_texture(
720 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
721 Some(texture),
722 );
723
724 unsafe {
725 let buffer_contents = (self.instances.contents() as *mut u8)
726 .offset(*offset as isize)
727 as *mut shaders::GPUIImage;
728 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
729 }
730
731 command_encoder.draw_primitives_instanced(
732 metal::MTLPrimitiveType::Triangle,
733 0,
734 6,
735 images.len() as u64,
736 );
737 *offset = next_offset;
738 }
739 }
740
741 fn render_path_sprites(
742 &mut self,
743 layer_id: usize,
744 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
745 offset: &mut usize,
746 drawable_size: Vector2F,
747 command_encoder: &metal::RenderCommandEncoderRef,
748 ) {
749 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
750 command_encoder.set_vertex_buffer(
751 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
752 Some(&self.unit_vertices),
753 0,
754 );
755 command_encoder.set_vertex_bytes(
756 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
757 mem::size_of::<shaders::vector_float2>() as u64,
758 [drawable_size.to_float2()].as_ptr() as *const c_void,
759 );
760
761 let mut atlas_id = None;
762 let mut atlas_sprite_count = 0;
763 align_offset(offset);
764
765 while let Some(sprite) = sprites.peek() {
766 if sprite.layer_id != layer_id {
767 break;
768 }
769
770 let sprite = sprites.next().unwrap();
771 if let Some(atlas_id) = atlas_id.as_mut() {
772 if sprite.atlas_id != *atlas_id {
773 self.render_path_sprites_for_atlas(
774 offset,
775 *atlas_id,
776 atlas_sprite_count,
777 command_encoder,
778 );
779
780 *atlas_id = sprite.atlas_id;
781 atlas_sprite_count = 0;
782 align_offset(offset);
783 }
784 } else {
785 atlas_id = Some(sprite.atlas_id);
786 }
787
788 unsafe {
789 let buffer_contents = (self.instances.contents() as *mut u8)
790 .offset(*offset as isize)
791 as *mut shaders::GPUISprite;
792 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
793 }
794
795 atlas_sprite_count += 1;
796 }
797
798 if let Some(atlas_id) = atlas_id {
799 self.render_path_sprites_for_atlas(
800 offset,
801 atlas_id,
802 atlas_sprite_count,
803 command_encoder,
804 );
805 }
806 }
807
808 fn render_path_sprites_for_atlas(
809 &mut self,
810 offset: &mut usize,
811 atlas_id: usize,
812 sprite_count: usize,
813 command_encoder: &metal::RenderCommandEncoderRef,
814 ) {
815 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
816 assert!(
817 next_offset <= INSTANCE_BUFFER_SIZE,
818 "instance buffer exhausted"
819 );
820 command_encoder.set_vertex_buffer(
821 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
822 Some(&self.instances),
823 *offset as u64,
824 );
825 let texture = self.path_atlases.texture(atlas_id).unwrap();
826 command_encoder.set_fragment_texture(
827 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
828 Some(texture),
829 );
830 command_encoder.set_vertex_bytes(
831 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
832 mem::size_of::<shaders::vector_float2>() as u64,
833 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
834 as *const c_void,
835 );
836
837 command_encoder.draw_primitives_instanced(
838 metal::MTLPrimitiveType::Triangle,
839 0,
840 6,
841 sprite_count as u64,
842 );
843 *offset = next_offset;
844 }
845
846 fn render_underlines(
847 &mut self,
848 underlines: &[Underline],
849 scale_factor: f32,
850 offset: &mut usize,
851 drawable_size: Vector2F,
852 command_encoder: &metal::RenderCommandEncoderRef,
853 ) {
854 if underlines.is_empty() {
855 return;
856 }
857 align_offset(offset);
858 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
859 assert!(
860 next_offset <= INSTANCE_BUFFER_SIZE,
861 "instance buffer exhausted"
862 );
863
864 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
865 command_encoder.set_vertex_buffer(
866 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
867 Some(&self.unit_vertices),
868 0,
869 );
870 command_encoder.set_vertex_buffer(
871 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
872 Some(&self.instances),
873 *offset as u64,
874 );
875 command_encoder.set_vertex_bytes(
876 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
877 mem::size_of::<shaders::GPUIUniforms>() as u64,
878 [shaders::GPUIUniforms {
879 viewport_size: drawable_size.to_float2(),
880 }]
881 .as_ptr() as *const c_void,
882 );
883
884 let buffer_contents = unsafe {
885 (self.instances.contents() as *mut u8).offset(*offset as isize)
886 as *mut shaders::GPUIUnderline
887 };
888 for (ix, underline) in underlines.iter().enumerate() {
889 let origin = underline.origin * scale_factor;
890 let mut height = underline.thickness;
891 if underline.squiggly {
892 height *= 3.;
893 }
894 let size = vec2f(underline.width, height) * scale_factor;
895 let shader_underline = shaders::GPUIUnderline {
896 origin: origin.round().to_float2(),
897 size: size.round().to_float2(),
898 thickness: underline.thickness * scale_factor,
899 color: underline.color.to_uchar4(),
900 squiggly: underline.squiggly as u8,
901 };
902 unsafe {
903 *(buffer_contents.offset(ix as isize)) = shader_underline;
904 }
905 }
906
907 command_encoder.draw_primitives_instanced(
908 metal::MTLPrimitiveType::Triangle,
909 0,
910 6,
911 underlines.len() as u64,
912 );
913 *offset = next_offset;
914 }
915}
916
917fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
918 let texture_descriptor = metal::TextureDescriptor::new();
919 texture_descriptor.set_width(2048);
920 texture_descriptor.set_height(2048);
921 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
922 texture_descriptor
923 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
924 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
925 texture_descriptor
926}
927
928fn align_offset(offset: &mut usize) {
929 let r = *offset % 256;
930 if r > 0 {
931 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
932 }
933}
934
935fn build_pipeline_state(
936 device: &metal::DeviceRef,
937 library: &metal::LibraryRef,
938 label: &str,
939 vertex_fn_name: &str,
940 fragment_fn_name: &str,
941 pixel_format: metal::MTLPixelFormat,
942) -> metal::RenderPipelineState {
943 let vertex_fn = library
944 .get_function(vertex_fn_name, None)
945 .expect("error locating vertex function");
946 let fragment_fn = library
947 .get_function(fragment_fn_name, None)
948 .expect("error locating fragment function");
949
950 let descriptor = metal::RenderPipelineDescriptor::new();
951 descriptor.set_label(label);
952 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
953 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
954 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
955 color_attachment.set_pixel_format(pixel_format);
956 color_attachment.set_blending_enabled(true);
957 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
958 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
959 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
960 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
961 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
962 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
963
964 device
965 .new_render_pipeline_state(&descriptor)
966 .expect("could not create render pipeline state")
967}
968
969fn build_path_atlas_pipeline_state(
970 device: &metal::DeviceRef,
971 library: &metal::LibraryRef,
972 label: &str,
973 vertex_fn_name: &str,
974 fragment_fn_name: &str,
975 pixel_format: metal::MTLPixelFormat,
976) -> metal::RenderPipelineState {
977 let vertex_fn = library
978 .get_function(vertex_fn_name, None)
979 .expect("error locating vertex function");
980 let fragment_fn = library
981 .get_function(fragment_fn_name, None)
982 .expect("error locating fragment function");
983
984 let descriptor = metal::RenderPipelineDescriptor::new();
985 descriptor.set_label(label);
986 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
987 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
988 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
989 color_attachment.set_pixel_format(pixel_format);
990 color_attachment.set_blending_enabled(true);
991 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
992 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
993 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
994 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
995 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
996 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
997
998 device
999 .new_render_pipeline_state(&descriptor)
1000 .expect("could not create render pipeline state")
1001}
1002
1003mod shaders {
1004 #![allow(non_upper_case_globals)]
1005 #![allow(non_camel_case_types)]
1006 #![allow(non_snake_case)]
1007
1008 use crate::{
1009 color::Color,
1010 geometry::vector::{Vector2F, Vector2I},
1011 };
1012 use std::mem;
1013
1014 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1015
1016 pub trait ToFloat2 {
1017 fn to_float2(&self) -> vector_float2;
1018 }
1019
1020 impl ToFloat2 for (f32, f32) {
1021 fn to_float2(&self) -> vector_float2 {
1022 unsafe {
1023 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1024 output <<= 32;
1025 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1026 output
1027 }
1028 }
1029 }
1030
1031 impl ToFloat2 for Vector2F {
1032 fn to_float2(&self) -> vector_float2 {
1033 unsafe {
1034 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1035 output <<= 32;
1036 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1037 output
1038 }
1039 }
1040 }
1041
1042 impl ToFloat2 for Vector2I {
1043 fn to_float2(&self) -> vector_float2 {
1044 self.to_f32().to_float2()
1045 }
1046 }
1047
1048 impl Color {
1049 pub fn to_uchar4(&self) -> vector_uchar4 {
1050 let mut vec = self.a as vector_uchar4;
1051 vec <<= 8;
1052 vec |= self.b as vector_uchar4;
1053 vec <<= 8;
1054 vec |= self.g as vector_uchar4;
1055 vec <<= 8;
1056 vec |= self.r as vector_uchar4;
1057 vec
1058 }
1059 }
1060}