1use std::ffi::c_void;
6use std::ptr::NonNull;
7
8use mtl_foundation::{Referencing, UInteger};
9use mtl_sys::{msg_send_0, msg_send_1, sel};
10
11use crate::enums::{PixelFormat, ShaderValidation};
12use crate::types::Size;
13
14use super::{PipelineBufferDescriptorArray, RenderPipelineColorAttachmentDescriptorArray};
15
16pub struct MeshRenderPipelineDescriptor(pub(crate) NonNull<c_void>);
17
18impl MeshRenderPipelineDescriptor {
19 pub fn alloc() -> Option<Self> {
23 unsafe {
24 let cls = mtl_sys::Class::get("MTLMeshRenderPipelineDescriptor")?;
25 let ptr: *mut c_void = msg_send_0(cls.as_ptr(), sel!(alloc));
26 Self::from_raw(ptr)
27 }
28 }
29
30 pub fn init(&self) -> Option<Self> {
34 unsafe {
35 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(init));
36 Self::from_raw(ptr)
37 }
38 }
39
40 pub fn new() -> Option<Self> {
42 Self::alloc()?.init()
43 }
44
45 #[inline]
51 pub unsafe fn from_raw(ptr: *mut c_void) -> Option<Self> {
52 NonNull::new(ptr).map(Self)
53 }
54
55 #[inline]
57 pub fn as_raw(&self) -> *mut c_void {
58 self.0.as_ptr()
59 }
60
61 pub fn label(&self) -> Option<String> {
69 unsafe {
70 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(label));
71 if ptr.is_null() {
72 return None;
73 }
74 let utf8_ptr: *const std::ffi::c_char =
75 mtl_sys::msg_send_0(ptr as *const c_void, sel!(UTF8String));
76 if utf8_ptr.is_null() {
77 return None;
78 }
79 let c_str = std::ffi::CStr::from_ptr(utf8_ptr);
80 Some(c_str.to_string_lossy().into_owned())
81 }
82 }
83
84 pub fn set_label(&self, label: &str) {
88 if let Some(ns_label) = mtl_foundation::String::from_str(label) {
89 unsafe {
90 msg_send_1::<(), *const c_void>(self.as_ptr(), sel!(setLabel:), ns_label.as_ptr());
91 }
92 }
93 }
94
95 pub fn object_function(&self) -> Option<crate::Function> {
103 unsafe {
104 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(objectFunction));
105 if ptr.is_null() {
106 return None;
107 }
108 msg_send_0::<*mut c_void>(ptr as *const c_void, sel!(retain));
109 crate::Function::from_raw(ptr)
110 }
111 }
112
113 pub fn set_object_function(&self, function: Option<&crate::Function>) {
117 let ptr = function.map(|f| f.as_ptr()).unwrap_or(std::ptr::null());
118 unsafe {
119 msg_send_1::<(), *const c_void>(self.as_ptr(), sel!(setObjectFunction:), ptr);
120 }
121 }
122
123 pub fn object_buffers(&self) -> Option<PipelineBufferDescriptorArray> {
127 unsafe {
128 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(objectBuffers));
129 PipelineBufferDescriptorArray::from_raw(ptr)
130 }
131 }
132
133 pub fn object_linked_functions(&self) -> Option<crate::LinkedFunctions> {
137 unsafe {
138 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(objectLinkedFunctions));
139 if ptr.is_null() {
140 return None;
141 }
142 msg_send_0::<*mut c_void>(ptr as *const c_void, sel!(retain));
143 crate::LinkedFunctions::from_raw(ptr)
144 }
145 }
146
147 pub fn set_object_linked_functions(&self, functions: Option<&crate::LinkedFunctions>) {
151 let ptr = functions.map(|f| f.as_ptr()).unwrap_or(std::ptr::null());
152 unsafe {
153 msg_send_1::<(), *const c_void>(self.as_ptr(), sel!(setObjectLinkedFunctions:), ptr);
154 }
155 }
156
157 pub fn mesh_function(&self) -> Option<crate::Function> {
165 unsafe {
166 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(meshFunction));
167 if ptr.is_null() {
168 return None;
169 }
170 msg_send_0::<*mut c_void>(ptr as *const c_void, sel!(retain));
171 crate::Function::from_raw(ptr)
172 }
173 }
174
175 pub fn set_mesh_function(&self, function: Option<&crate::Function>) {
179 let ptr = function.map(|f| f.as_ptr()).unwrap_or(std::ptr::null());
180 unsafe {
181 msg_send_1::<(), *const c_void>(self.as_ptr(), sel!(setMeshFunction:), ptr);
182 }
183 }
184
185 pub fn mesh_buffers(&self) -> Option<PipelineBufferDescriptorArray> {
189 unsafe {
190 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(meshBuffers));
191 PipelineBufferDescriptorArray::from_raw(ptr)
192 }
193 }
194
195 pub fn mesh_linked_functions(&self) -> Option<crate::LinkedFunctions> {
199 unsafe {
200 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(meshLinkedFunctions));
201 if ptr.is_null() {
202 return None;
203 }
204 msg_send_0::<*mut c_void>(ptr as *const c_void, sel!(retain));
205 crate::LinkedFunctions::from_raw(ptr)
206 }
207 }
208
209 pub fn set_mesh_linked_functions(&self, functions: Option<&crate::LinkedFunctions>) {
213 let ptr = functions.map(|f| f.as_ptr()).unwrap_or(std::ptr::null());
214 unsafe {
215 msg_send_1::<(), *const c_void>(self.as_ptr(), sel!(setMeshLinkedFunctions:), ptr);
216 }
217 }
218
219 pub fn fragment_function(&self) -> Option<crate::Function> {
227 unsafe {
228 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(fragmentFunction));
229 if ptr.is_null() {
230 return None;
231 }
232 msg_send_0::<*mut c_void>(ptr as *const c_void, sel!(retain));
233 crate::Function::from_raw(ptr)
234 }
235 }
236
237 pub fn set_fragment_function(&self, function: Option<&crate::Function>) {
241 let ptr = function.map(|f| f.as_ptr()).unwrap_or(std::ptr::null());
242 unsafe {
243 msg_send_1::<(), *const c_void>(self.as_ptr(), sel!(setFragmentFunction:), ptr);
244 }
245 }
246
247 pub fn fragment_buffers(&self) -> Option<PipelineBufferDescriptorArray> {
251 unsafe {
252 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(fragmentBuffers));
253 PipelineBufferDescriptorArray::from_raw(ptr)
254 }
255 }
256
257 pub fn fragment_linked_functions(&self) -> Option<crate::LinkedFunctions> {
261 unsafe {
262 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(fragmentLinkedFunctions));
263 if ptr.is_null() {
264 return None;
265 }
266 msg_send_0::<*mut c_void>(ptr as *const c_void, sel!(retain));
267 crate::LinkedFunctions::from_raw(ptr)
268 }
269 }
270
271 pub fn set_fragment_linked_functions(&self, functions: Option<&crate::LinkedFunctions>) {
275 let ptr = functions.map(|f| f.as_ptr()).unwrap_or(std::ptr::null());
276 unsafe {
277 msg_send_1::<(), *const c_void>(self.as_ptr(), sel!(setFragmentLinkedFunctions:), ptr);
278 }
279 }
280
281 pub fn color_attachments(&self) -> Option<RenderPipelineColorAttachmentDescriptorArray> {
289 unsafe {
290 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(colorAttachments));
291 RenderPipelineColorAttachmentDescriptorArray::from_raw(ptr)
292 }
293 }
294
295 #[inline]
299 pub fn depth_attachment_pixel_format(&self) -> PixelFormat {
300 unsafe { msg_send_0(self.as_ptr(), sel!(depthAttachmentPixelFormat)) }
301 }
302
303 #[inline]
307 pub fn set_depth_attachment_pixel_format(&self, format: PixelFormat) {
308 unsafe {
309 msg_send_1::<(), PixelFormat>(
310 self.as_ptr(),
311 sel!(setDepthAttachmentPixelFormat:),
312 format,
313 );
314 }
315 }
316
317 #[inline]
321 pub fn stencil_attachment_pixel_format(&self) -> PixelFormat {
322 unsafe { msg_send_0(self.as_ptr(), sel!(stencilAttachmentPixelFormat)) }
323 }
324
325 #[inline]
329 pub fn set_stencil_attachment_pixel_format(&self, format: PixelFormat) {
330 unsafe {
331 msg_send_1::<(), PixelFormat>(
332 self.as_ptr(),
333 sel!(setStencilAttachmentPixelFormat:),
334 format,
335 );
336 }
337 }
338
339 #[inline]
347 pub fn raster_sample_count(&self) -> UInteger {
348 unsafe { msg_send_0(self.as_ptr(), sel!(rasterSampleCount)) }
349 }
350
351 #[inline]
355 pub fn set_raster_sample_count(&self, count: UInteger) {
356 unsafe {
357 msg_send_1::<(), UInteger>(self.as_ptr(), sel!(setRasterSampleCount:), count);
358 }
359 }
360
361 #[inline]
365 pub fn is_alpha_to_coverage_enabled(&self) -> bool {
366 unsafe { msg_send_0(self.as_ptr(), sel!(isAlphaToCoverageEnabled)) }
367 }
368
369 #[inline]
373 pub fn set_alpha_to_coverage_enabled(&self, enabled: bool) {
374 unsafe {
375 msg_send_1::<(), bool>(self.as_ptr(), sel!(setAlphaToCoverageEnabled:), enabled);
376 }
377 }
378
379 #[inline]
383 pub fn is_alpha_to_one_enabled(&self) -> bool {
384 unsafe { msg_send_0(self.as_ptr(), sel!(isAlphaToOneEnabled)) }
385 }
386
387 #[inline]
391 pub fn set_alpha_to_one_enabled(&self, enabled: bool) {
392 unsafe {
393 msg_send_1::<(), bool>(self.as_ptr(), sel!(setAlphaToOneEnabled:), enabled);
394 }
395 }
396
397 #[inline]
401 pub fn is_rasterization_enabled(&self) -> bool {
402 unsafe { msg_send_0(self.as_ptr(), sel!(isRasterizationEnabled)) }
403 }
404
405 #[inline]
409 pub fn set_rasterization_enabled(&self, enabled: bool) {
410 unsafe {
411 msg_send_1::<(), bool>(self.as_ptr(), sel!(setRasterizationEnabled:), enabled);
412 }
413 }
414
415 #[inline]
423 pub fn max_total_threadgroups_per_mesh_grid(&self) -> UInteger {
424 unsafe { msg_send_0(self.as_ptr(), sel!(maxTotalThreadgroupsPerMeshGrid)) }
425 }
426
427 #[inline]
431 pub fn set_max_total_threadgroups_per_mesh_grid(&self, count: UInteger) {
432 unsafe {
433 msg_send_1::<(), UInteger>(
434 self.as_ptr(),
435 sel!(setMaxTotalThreadgroupsPerMeshGrid:),
436 count,
437 );
438 }
439 }
440
441 #[inline]
445 pub fn max_total_threads_per_object_threadgroup(&self) -> UInteger {
446 unsafe { msg_send_0(self.as_ptr(), sel!(maxTotalThreadsPerObjectThreadgroup)) }
447 }
448
449 #[inline]
453 pub fn set_max_total_threads_per_object_threadgroup(&self, count: UInteger) {
454 unsafe {
455 msg_send_1::<(), UInteger>(
456 self.as_ptr(),
457 sel!(setMaxTotalThreadsPerObjectThreadgroup:),
458 count,
459 );
460 }
461 }
462
463 #[inline]
467 pub fn max_total_threads_per_mesh_threadgroup(&self) -> UInteger {
468 unsafe { msg_send_0(self.as_ptr(), sel!(maxTotalThreadsPerMeshThreadgroup)) }
469 }
470
471 #[inline]
475 pub fn set_max_total_threads_per_mesh_threadgroup(&self, count: UInteger) {
476 unsafe {
477 msg_send_1::<(), UInteger>(
478 self.as_ptr(),
479 sel!(setMaxTotalThreadsPerMeshThreadgroup:),
480 count,
481 );
482 }
483 }
484
485 #[inline]
489 pub fn object_threadgroup_size_is_multiple_of_thread_execution_width(&self) -> bool {
490 unsafe {
491 msg_send_0(
492 self.as_ptr(),
493 sel!(objectThreadgroupSizeIsMultipleOfThreadExecutionWidth),
494 )
495 }
496 }
497
498 #[inline]
502 pub fn set_object_threadgroup_size_is_multiple_of_thread_execution_width(
503 &self,
504 is_multiple: bool,
505 ) {
506 unsafe {
507 msg_send_1::<(), bool>(
508 self.as_ptr(),
509 sel!(setObjectThreadgroupSizeIsMultipleOfThreadExecutionWidth:),
510 is_multiple,
511 );
512 }
513 }
514
515 #[inline]
519 pub fn mesh_threadgroup_size_is_multiple_of_thread_execution_width(&self) -> bool {
520 unsafe {
521 msg_send_0(
522 self.as_ptr(),
523 sel!(meshThreadgroupSizeIsMultipleOfThreadExecutionWidth),
524 )
525 }
526 }
527
528 #[inline]
532 pub fn set_mesh_threadgroup_size_is_multiple_of_thread_execution_width(
533 &self,
534 is_multiple: bool,
535 ) {
536 unsafe {
537 msg_send_1::<(), bool>(
538 self.as_ptr(),
539 sel!(setMeshThreadgroupSizeIsMultipleOfThreadExecutionWidth:),
540 is_multiple,
541 );
542 }
543 }
544
545 #[inline]
549 pub fn required_threads_per_object_threadgroup(&self) -> Size {
550 unsafe { msg_send_0(self.as_ptr(), sel!(requiredThreadsPerObjectThreadgroup)) }
551 }
552
553 #[inline]
557 pub fn set_required_threads_per_object_threadgroup(&self, size: Size) {
558 unsafe {
559 msg_send_1::<(), Size>(
560 self.as_ptr(),
561 sel!(setRequiredThreadsPerObjectThreadgroup:),
562 size,
563 );
564 }
565 }
566
567 #[inline]
571 pub fn required_threads_per_mesh_threadgroup(&self) -> Size {
572 unsafe { msg_send_0(self.as_ptr(), sel!(requiredThreadsPerMeshThreadgroup)) }
573 }
574
575 #[inline]
579 pub fn set_required_threads_per_mesh_threadgroup(&self, size: Size) {
580 unsafe {
581 msg_send_1::<(), Size>(
582 self.as_ptr(),
583 sel!(setRequiredThreadsPerMeshThreadgroup:),
584 size,
585 );
586 }
587 }
588
589 #[inline]
593 pub fn payload_memory_length(&self) -> UInteger {
594 unsafe { msg_send_0(self.as_ptr(), sel!(payloadMemoryLength)) }
595 }
596
597 #[inline]
601 pub fn set_payload_memory_length(&self, length: UInteger) {
602 unsafe {
603 msg_send_1::<(), UInteger>(self.as_ptr(), sel!(setPayloadMemoryLength:), length);
604 }
605 }
606
607 #[inline]
611 pub fn max_vertex_amplification_count(&self) -> UInteger {
612 unsafe { msg_send_0(self.as_ptr(), sel!(maxVertexAmplificationCount)) }
613 }
614
615 #[inline]
619 pub fn set_max_vertex_amplification_count(&self, count: UInteger) {
620 unsafe {
621 msg_send_1::<(), UInteger>(self.as_ptr(), sel!(setMaxVertexAmplificationCount:), count);
622 }
623 }
624
625 #[inline]
633 pub fn support_indirect_command_buffers(&self) -> bool {
634 unsafe { msg_send_0(self.as_ptr(), sel!(supportIndirectCommandBuffers)) }
635 }
636
637 #[inline]
641 pub fn set_support_indirect_command_buffers(&self, support: bool) {
642 unsafe {
643 msg_send_1::<(), bool>(
644 self.as_ptr(),
645 sel!(setSupportIndirectCommandBuffers:),
646 support,
647 );
648 }
649 }
650
651 #[inline]
655 pub fn shader_validation(&self) -> ShaderValidation {
656 unsafe { msg_send_0(self.as_ptr(), sel!(shaderValidation)) }
657 }
658
659 #[inline]
663 pub fn set_shader_validation(&self, validation: ShaderValidation) {
664 unsafe {
665 msg_send_1::<(), ShaderValidation>(
666 self.as_ptr(),
667 sel!(setShaderValidation:),
668 validation,
669 );
670 }
671 }
672
673 pub fn binary_archives_raw(&self) -> *mut c_void {
677 unsafe { msg_send_0(self.as_ptr(), sel!(binaryArchives)) }
678 }
679
680 pub unsafe fn set_binary_archives_raw(&self, archives: *const c_void) {
688 unsafe {
689 msg_send_1::<(), *const c_void>(self.as_ptr(), sel!(setBinaryArchives:), archives);
690 }
691 }
692
693 #[inline]
697 pub fn reset(&self) {
698 unsafe {
699 msg_send_0::<()>(self.as_ptr(), sel!(reset));
700 }
701 }
702}
703
704impl Default for MeshRenderPipelineDescriptor {
705 fn default() -> Self {
706 Self::new().expect("failed to create MeshRenderPipelineDescriptor")
707 }
708}
709
710impl Clone for MeshRenderPipelineDescriptor {
711 fn clone(&self) -> Self {
712 unsafe {
713 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(copy));
714 Self::from_raw(ptr).expect("copy returned null")
715 }
716 }
717}
718
719impl Drop for MeshRenderPipelineDescriptor {
720 fn drop(&mut self) {
721 unsafe {
722 msg_send_0::<()>(self.as_ptr(), sel!(release));
723 }
724 }
725}
726
727impl Referencing for MeshRenderPipelineDescriptor {
728 #[inline]
729 fn as_ptr(&self) -> *const c_void {
730 self.0.as_ptr()
731 }
732}
733
734unsafe impl Send for MeshRenderPipelineDescriptor {}
735unsafe impl Sync for MeshRenderPipelineDescriptor {}
736
737impl std::fmt::Debug for MeshRenderPipelineDescriptor {
738 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
739 f.debug_struct("MeshRenderPipelineDescriptor")
740 .field("label", &self.label())
741 .field("raster_sample_count", &self.raster_sample_count())
742 .finish()
743 }
744}
745
746#[cfg(test)]
747mod tests {
748 use super::*;
749
750 #[test]
751 fn test_mesh_render_pipeline_descriptor_size() {
752 assert_eq!(
753 std::mem::size_of::<MeshRenderPipelineDescriptor>(),
754 std::mem::size_of::<*mut c_void>()
755 );
756 }
757}