1use std::ffi::c_void;
6use std::ptr::NonNull;
7
8use mtl_foundation::{Referencing, UInteger};
9use mtl_sys::{msg_send_0, msg_send_1, sel};
10
11use super::enums::{
12 AlphaToCoverageState, AlphaToOneState, IndirectCommandBufferSupportState,
13 LogicalToPhysicalColorAttachmentMappingState,
14};
15use super::{
16 FunctionDescriptor, PipelineOptions, RenderPipelineColorAttachmentDescriptorArray,
17 StaticLinkingDescriptor,
18};
19use crate::Size;
20
21#[repr(transparent)]
32pub struct MeshRenderPipelineDescriptor(NonNull<c_void>);
33
34impl MeshRenderPipelineDescriptor {
35 #[inline]
37 pub unsafe fn from_raw(ptr: *mut c_void) -> Option<Self> {
38 NonNull::new(ptr).map(Self)
39 }
40
41 #[inline]
43 pub fn as_raw(&self) -> *mut c_void {
44 self.0.as_ptr()
45 }
46
47 pub fn new() -> Option<Self> {
49 unsafe {
50 let class = mtl_sys::Class::get("MTL4MeshRenderPipelineDescriptor")?;
51 let ptr: *mut c_void = msg_send_0(class.as_ptr(), sel!(alloc));
52 if ptr.is_null() {
53 return None;
54 }
55 let ptr: *mut c_void = msg_send_0(ptr, sel!(init));
56 Self::from_raw(ptr)
57 }
58 }
59
60 pub fn label(&self) -> Option<String> {
64 unsafe {
65 let ns_string: *mut c_void = msg_send_0(self.as_ptr(), sel!(label));
66 if ns_string.is_null() {
67 return None;
68 }
69 let c_str: *const i8 = msg_send_0(ns_string, sel!(UTF8String));
70 if c_str.is_null() {
71 return None;
72 }
73 Some(
74 std::ffi::CStr::from_ptr(c_str)
75 .to_string_lossy()
76 .into_owned(),
77 )
78 }
79 }
80
81 pub fn set_label(&self, label: &str) {
83 if let Some(ns_label) = mtl_foundation::String::from_str(label) {
84 unsafe {
85 let _: () = msg_send_1(self.as_ptr(), sel!(setLabel:), ns_label.as_ptr());
86 }
87 }
88 }
89
90 pub fn options(&self) -> Option<PipelineOptions> {
92 unsafe {
93 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(options));
94 PipelineOptions::from_raw(ptr)
95 }
96 }
97
98 pub fn set_options(&self, options: &PipelineOptions) {
100 unsafe {
101 let _: () = msg_send_1(self.as_ptr(), sel!(setOptions:), options.as_ptr());
102 }
103 }
104
105 pub fn object_function_descriptor(&self) -> Option<FunctionDescriptor> {
109 unsafe {
110 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(objectFunctionDescriptor));
111 FunctionDescriptor::from_raw(ptr)
112 }
113 }
114
115 pub fn set_object_function_descriptor(&self, descriptor: &FunctionDescriptor) {
117 unsafe {
118 let _: () = msg_send_1(
119 self.as_ptr(),
120 sel!(setObjectFunctionDescriptor:),
121 descriptor.as_ptr(),
122 );
123 }
124 }
125
126 pub fn object_static_linking_descriptor(&self) -> Option<StaticLinkingDescriptor> {
128 unsafe {
129 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(objectStaticLinkingDescriptor));
130 StaticLinkingDescriptor::from_raw(ptr)
131 }
132 }
133
134 pub fn set_object_static_linking_descriptor(&self, descriptor: &StaticLinkingDescriptor) {
136 unsafe {
137 let _: () = msg_send_1(
138 self.as_ptr(),
139 sel!(setObjectStaticLinkingDescriptor:),
140 descriptor.as_ptr(),
141 );
142 }
143 }
144
145 pub fn mesh_function_descriptor(&self) -> Option<FunctionDescriptor> {
149 unsafe {
150 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(meshFunctionDescriptor));
151 FunctionDescriptor::from_raw(ptr)
152 }
153 }
154
155 pub fn set_mesh_function_descriptor(&self, descriptor: &FunctionDescriptor) {
157 unsafe {
158 let _: () = msg_send_1(
159 self.as_ptr(),
160 sel!(setMeshFunctionDescriptor:),
161 descriptor.as_ptr(),
162 );
163 }
164 }
165
166 pub fn mesh_static_linking_descriptor(&self) -> Option<StaticLinkingDescriptor> {
168 unsafe {
169 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(meshStaticLinkingDescriptor));
170 StaticLinkingDescriptor::from_raw(ptr)
171 }
172 }
173
174 pub fn set_mesh_static_linking_descriptor(&self, descriptor: &StaticLinkingDescriptor) {
176 unsafe {
177 let _: () = msg_send_1(
178 self.as_ptr(),
179 sel!(setMeshStaticLinkingDescriptor:),
180 descriptor.as_ptr(),
181 );
182 }
183 }
184
185 pub fn fragment_function_descriptor(&self) -> Option<FunctionDescriptor> {
189 unsafe {
190 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(fragmentFunctionDescriptor));
191 FunctionDescriptor::from_raw(ptr)
192 }
193 }
194
195 pub fn set_fragment_function_descriptor(&self, descriptor: &FunctionDescriptor) {
197 unsafe {
198 let _: () = msg_send_1(
199 self.as_ptr(),
200 sel!(setFragmentFunctionDescriptor:),
201 descriptor.as_ptr(),
202 );
203 }
204 }
205
206 pub fn fragment_static_linking_descriptor(&self) -> Option<StaticLinkingDescriptor> {
208 unsafe {
209 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(fragmentStaticLinkingDescriptor));
210 StaticLinkingDescriptor::from_raw(ptr)
211 }
212 }
213
214 pub fn set_fragment_static_linking_descriptor(&self, descriptor: &StaticLinkingDescriptor) {
216 unsafe {
217 let _: () = msg_send_1(
218 self.as_ptr(),
219 sel!(setFragmentStaticLinkingDescriptor:),
220 descriptor.as_ptr(),
221 );
222 }
223 }
224
225 pub fn color_attachments(&self) -> Option<RenderPipelineColorAttachmentDescriptorArray> {
229 unsafe {
230 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(colorAttachments));
231 RenderPipelineColorAttachmentDescriptorArray::from_raw(ptr)
232 }
233 }
234
235 pub fn color_attachment_mapping_state(&self) -> LogicalToPhysicalColorAttachmentMappingState {
237 unsafe { msg_send_0(self.as_ptr(), sel!(colorAttachmentMappingState)) }
238 }
239
240 pub fn set_color_attachment_mapping_state(
242 &self,
243 state: LogicalToPhysicalColorAttachmentMappingState,
244 ) {
245 unsafe {
246 let _: () = msg_send_1(self.as_ptr(), sel!(setColorAttachmentMappingState:), state);
247 }
248 }
249
250 pub fn is_rasterization_enabled(&self) -> bool {
254 unsafe { msg_send_0(self.as_ptr(), sel!(isRasterizationEnabled)) }
255 }
256
257 pub fn set_rasterization_enabled(&self, enabled: bool) {
259 unsafe {
260 let _: () = msg_send_1(self.as_ptr(), sel!(setRasterizationEnabled:), enabled);
261 }
262 }
263
264 pub fn raster_sample_count(&self) -> UInteger {
266 unsafe { msg_send_0(self.as_ptr(), sel!(rasterSampleCount)) }
267 }
268
269 pub fn set_raster_sample_count(&self, count: UInteger) {
271 unsafe {
272 let _: () = msg_send_1(self.as_ptr(), sel!(setRasterSampleCount:), count);
273 }
274 }
275
276 pub fn alpha_to_coverage_state(&self) -> AlphaToCoverageState {
280 unsafe { msg_send_0(self.as_ptr(), sel!(alphaToCoverageState)) }
281 }
282
283 pub fn set_alpha_to_coverage_state(&self, state: AlphaToCoverageState) {
285 unsafe {
286 let _: () = msg_send_1(self.as_ptr(), sel!(setAlphaToCoverageState:), state);
287 }
288 }
289
290 pub fn alpha_to_one_state(&self) -> AlphaToOneState {
292 unsafe { msg_send_0(self.as_ptr(), sel!(alphaToOneState)) }
293 }
294
295 pub fn set_alpha_to_one_state(&self, state: AlphaToOneState) {
297 unsafe {
298 let _: () = msg_send_1(self.as_ptr(), sel!(setAlphaToOneState:), state);
299 }
300 }
301
302 pub fn max_total_threads_per_object_threadgroup(&self) -> UInteger {
306 unsafe { msg_send_0(self.as_ptr(), sel!(maxTotalThreadsPerObjectThreadgroup)) }
307 }
308
309 pub fn set_max_total_threads_per_object_threadgroup(&self, max: UInteger) {
311 unsafe {
312 let _: () = msg_send_1(
313 self.as_ptr(),
314 sel!(setMaxTotalThreadsPerObjectThreadgroup:),
315 max,
316 );
317 }
318 }
319
320 pub fn required_threads_per_object_threadgroup(&self) -> Size {
322 unsafe { msg_send_0(self.as_ptr(), sel!(requiredThreadsPerObjectThreadgroup)) }
323 }
324
325 pub fn set_required_threads_per_object_threadgroup(&self, size: Size) {
327 unsafe {
328 let _: () = msg_send_1(
329 self.as_ptr(),
330 sel!(setRequiredThreadsPerObjectThreadgroup:),
331 size,
332 );
333 }
334 }
335
336 pub fn object_threadgroup_size_is_multiple_of_thread_execution_width(&self) -> bool {
338 unsafe {
339 msg_send_0(
340 self.as_ptr(),
341 sel!(objectThreadgroupSizeIsMultipleOfThreadExecutionWidth),
342 )
343 }
344 }
345
346 pub fn set_object_threadgroup_size_is_multiple_of_thread_execution_width(&self, value: bool) {
348 unsafe {
349 let _: () = msg_send_1(
350 self.as_ptr(),
351 sel!(setObjectThreadgroupSizeIsMultipleOfThreadExecutionWidth:),
352 value,
353 );
354 }
355 }
356
357 pub fn max_total_threads_per_mesh_threadgroup(&self) -> UInteger {
361 unsafe { msg_send_0(self.as_ptr(), sel!(maxTotalThreadsPerMeshThreadgroup)) }
362 }
363
364 pub fn set_max_total_threads_per_mesh_threadgroup(&self, max: UInteger) {
366 unsafe {
367 let _: () = msg_send_1(
368 self.as_ptr(),
369 sel!(setMaxTotalThreadsPerMeshThreadgroup:),
370 max,
371 );
372 }
373 }
374
375 pub fn required_threads_per_mesh_threadgroup(&self) -> Size {
377 unsafe { msg_send_0(self.as_ptr(), sel!(requiredThreadsPerMeshThreadgroup)) }
378 }
379
380 pub fn set_required_threads_per_mesh_threadgroup(&self, size: Size) {
382 unsafe {
383 let _: () = msg_send_1(
384 self.as_ptr(),
385 sel!(setRequiredThreadsPerMeshThreadgroup:),
386 size,
387 );
388 }
389 }
390
391 pub fn mesh_threadgroup_size_is_multiple_of_thread_execution_width(&self) -> bool {
393 unsafe {
394 msg_send_0(
395 self.as_ptr(),
396 sel!(meshThreadgroupSizeIsMultipleOfThreadExecutionWidth),
397 )
398 }
399 }
400
401 pub fn set_mesh_threadgroup_size_is_multiple_of_thread_execution_width(&self, value: bool) {
403 unsafe {
404 let _: () = msg_send_1(
405 self.as_ptr(),
406 sel!(setMeshThreadgroupSizeIsMultipleOfThreadExecutionWidth:),
407 value,
408 );
409 }
410 }
411
412 pub fn max_total_threadgroups_per_mesh_grid(&self) -> UInteger {
416 unsafe { msg_send_0(self.as_ptr(), sel!(maxTotalThreadgroupsPerMeshGrid)) }
417 }
418
419 pub fn set_max_total_threadgroups_per_mesh_grid(&self, max: UInteger) {
421 unsafe {
422 let _: () = msg_send_1(
423 self.as_ptr(),
424 sel!(setMaxTotalThreadgroupsPerMeshGrid:),
425 max,
426 );
427 }
428 }
429
430 pub fn payload_memory_length(&self) -> UInteger {
434 unsafe { msg_send_0(self.as_ptr(), sel!(payloadMemoryLength)) }
435 }
436
437 pub fn set_payload_memory_length(&self, length: UInteger) {
439 unsafe {
440 let _: () = msg_send_1(self.as_ptr(), sel!(setPayloadMemoryLength:), length);
441 }
442 }
443
444 pub fn max_vertex_amplification_count(&self) -> UInteger {
448 unsafe { msg_send_0(self.as_ptr(), sel!(maxVertexAmplificationCount)) }
449 }
450
451 pub fn set_max_vertex_amplification_count(&self, count: UInteger) {
453 unsafe {
454 let _: () = msg_send_1(self.as_ptr(), sel!(setMaxVertexAmplificationCount:), count);
455 }
456 }
457
458 pub fn support_object_binary_linking(&self) -> bool {
462 unsafe { msg_send_0(self.as_ptr(), sel!(supportObjectBinaryLinking)) }
463 }
464
465 pub fn set_support_object_binary_linking(&self, support: bool) {
467 unsafe {
468 let _: () = msg_send_1(self.as_ptr(), sel!(setSupportObjectBinaryLinking:), support);
469 }
470 }
471
472 pub fn support_mesh_binary_linking(&self) -> bool {
474 unsafe { msg_send_0(self.as_ptr(), sel!(supportMeshBinaryLinking)) }
475 }
476
477 pub fn set_support_mesh_binary_linking(&self, support: bool) {
479 unsafe {
480 let _: () = msg_send_1(self.as_ptr(), sel!(setSupportMeshBinaryLinking:), support);
481 }
482 }
483
484 pub fn support_fragment_binary_linking(&self) -> bool {
486 unsafe { msg_send_0(self.as_ptr(), sel!(supportFragmentBinaryLinking)) }
487 }
488
489 pub fn set_support_fragment_binary_linking(&self, support: bool) {
491 unsafe {
492 let _: () = msg_send_1(
493 self.as_ptr(),
494 sel!(setSupportFragmentBinaryLinking:),
495 support,
496 );
497 }
498 }
499
500 pub fn support_indirect_command_buffers(&self) -> IndirectCommandBufferSupportState {
504 unsafe { msg_send_0(self.as_ptr(), sel!(supportIndirectCommandBuffers)) }
505 }
506
507 pub fn set_support_indirect_command_buffers(&self, state: IndirectCommandBufferSupportState) {
509 unsafe {
510 let _: () = msg_send_1(
511 self.as_ptr(),
512 sel!(setSupportIndirectCommandBuffers:),
513 state,
514 );
515 }
516 }
517
518 pub fn reset(&self) {
520 unsafe {
521 let _: () = msg_send_0(self.as_ptr(), sel!(reset));
522 }
523 }
524}
525
526impl Clone for MeshRenderPipelineDescriptor {
527 fn clone(&self) -> Self {
528 unsafe {
529 mtl_sys::msg_send_0::<*mut c_void>(self.as_ptr(), mtl_sys::sel!(retain));
530 }
531 Self(self.0)
532 }
533}
534
535impl Drop for MeshRenderPipelineDescriptor {
536 fn drop(&mut self) {
537 unsafe {
538 mtl_sys::msg_send_0::<()>(self.as_ptr(), mtl_sys::sel!(release));
539 }
540 }
541}
542
543impl Referencing for MeshRenderPipelineDescriptor {
544 #[inline]
545 fn as_ptr(&self) -> *const c_void {
546 self.0.as_ptr()
547 }
548}
549
550unsafe impl Send for MeshRenderPipelineDescriptor {}
551unsafe impl Sync for MeshRenderPipelineDescriptor {}
552
553impl std::fmt::Debug for MeshRenderPipelineDescriptor {
554 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
555 f.debug_struct("MeshRenderPipelineDescriptor")
556 .field("label", &self.label())
557 .field("raster_sample_count", &self.raster_sample_count())
558 .field("is_rasterization_enabled", &self.is_rasterization_enabled())
559 .finish()
560 }
561}
562
563#[cfg(test)]
564mod tests {
565 use super::*;
566
567 #[test]
568 fn test_mesh_render_pipeline_descriptor_size() {
569 assert_eq!(
570 std::mem::size_of::<MeshRenderPipelineDescriptor>(),
571 std::mem::size_of::<*mut c_void>()
572 );
573 }
574}