1use std::ffi::c_void;
6use std::ptr::NonNull;
7
8use mtl_foundation::{Referencing, UInteger};
9use mtl_sys::{msg_send_0, msg_send_1, msg_send_2, msg_send_3, msg_send_4, msg_send_5, sel};
10
11use super::enums::VisibilityOptions;
12use crate::{ComputePipelineState, Device, Size};
13
14#[repr(transparent)]
24pub struct ComputeCommandEncoder(NonNull<c_void>);
25
26impl ComputeCommandEncoder {
27 #[inline]
29 pub unsafe fn from_raw(ptr: *mut c_void) -> Option<Self> {
30 NonNull::new(ptr).map(Self)
31 }
32
33 #[inline]
35 pub fn as_raw(&self) -> *mut c_void {
36 self.0.as_ptr()
37 }
38
39 pub fn device(&self) -> Option<Device> {
43 unsafe {
44 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(device));
45 Device::from_raw(ptr)
46 }
47 }
48
49 pub fn label(&self) -> Option<String> {
53 unsafe {
54 let ns_string: *mut c_void = msg_send_0(self.as_ptr(), sel!(label));
55 if ns_string.is_null() {
56 return None;
57 }
58 let c_str: *const i8 = msg_send_0(ns_string, sel!(UTF8String));
59 if c_str.is_null() {
60 return None;
61 }
62 Some(
63 std::ffi::CStr::from_ptr(c_str)
64 .to_string_lossy()
65 .into_owned(),
66 )
67 }
68 }
69
70 pub fn set_label(&self, label: &str) {
74 if let Some(ns_label) = mtl_foundation::String::from_str(label) {
75 unsafe {
76 let _: () = msg_send_1(self.as_ptr(), sel!(setLabel:), ns_label.as_ptr());
77 }
78 }
79 }
80
81 pub fn set_compute_pipeline_state(&self, pipeline: &ComputePipelineState) {
87 unsafe {
88 let _: () = msg_send_1(
89 self.as_ptr(),
90 sel!(setComputePipelineState:),
91 pipeline.as_ptr(),
92 );
93 }
94 }
95
96 pub fn set_argument_table(&self, table: *const c_void, index: UInteger) {
102 unsafe {
103 let _: () = msg_send_2(self.as_ptr(), sel!(setArgumentTable:atIndex:), table, index);
104 }
105 }
106
107 pub fn set_buffer(&self, buffer: *const c_void, offset: UInteger, index: UInteger) {
113 unsafe {
114 let _: () = msg_send_3(
115 self.as_ptr(),
116 sel!(setBuffer:offset:atIndex:),
117 buffer,
118 offset,
119 index,
120 );
121 }
122 }
123
124 pub fn set_buffers(
128 &self,
129 buffers: *const *const c_void,
130 offsets: *const UInteger,
131 range_location: UInteger,
132 range_length: UInteger,
133 ) {
134 unsafe {
135 let range = (range_location, range_length);
137 let _: () = msg_send_3(
138 self.as_ptr(),
139 sel!(setBuffers:offsets:withRange:),
140 buffers,
141 offsets,
142 range,
143 );
144 }
145 }
146
147 pub fn set_bytes(&self, bytes: *const c_void, length: UInteger, index: UInteger) {
151 unsafe {
152 let _: () = msg_send_3(
153 self.as_ptr(),
154 sel!(setBytes:length:atIndex:),
155 bytes,
156 length,
157 index,
158 );
159 }
160 }
161
162 pub fn set_texture(&self, texture: *const c_void, index: UInteger) {
168 unsafe {
169 let _: () = msg_send_2(self.as_ptr(), sel!(setTexture:atIndex:), texture, index);
170 }
171 }
172
173 pub fn set_textures(
177 &self,
178 textures: *const *const c_void,
179 range_location: UInteger,
180 range_length: UInteger,
181 ) {
182 unsafe {
183 let range = (range_location, range_length);
184 let _: () = msg_send_2(self.as_ptr(), sel!(setTextures:withRange:), textures, range);
185 }
186 }
187
188 pub fn set_sampler_state(&self, sampler: *const c_void, index: UInteger) {
194 unsafe {
195 let _: () = msg_send_2(
196 self.as_ptr(),
197 sel!(setSamplerState:atIndex:),
198 sampler,
199 index,
200 );
201 }
202 }
203
204 pub fn set_sampler_state_with_lod(
208 &self,
209 sampler: *const c_void,
210 lod_min_clamp: f32,
211 lod_max_clamp: f32,
212 index: UInteger,
213 ) {
214 unsafe {
215 let _: () = msg_send_4(
216 self.as_ptr(),
217 sel!(setSamplerState:lodMinClamp:lodMaxClamp:atIndex:),
218 sampler,
219 lod_min_clamp,
220 lod_max_clamp,
221 index,
222 );
223 }
224 }
225
226 pub fn set_threadgroup_memory_length(&self, length: UInteger, index: UInteger) {
232 unsafe {
233 let _: () = msg_send_2(
234 self.as_ptr(),
235 sel!(setThreadgroupMemoryLength:atIndex:),
236 length,
237 index,
238 );
239 }
240 }
241
242 pub fn dispatch_threadgroups(
248 &self,
249 threadgroups_per_grid: Size,
250 threads_per_threadgroup: Size,
251 ) {
252 unsafe {
253 let _: () = msg_send_2(
254 self.as_ptr(),
255 sel!(dispatchThreadgroups:threadsPerThreadgroup:),
256 threadgroups_per_grid,
257 threads_per_threadgroup,
258 );
259 }
260 }
261
262 pub fn dispatch_threads(&self, threads_per_grid: Size, threads_per_threadgroup: Size) {
266 unsafe {
267 let _: () = msg_send_2(
268 self.as_ptr(),
269 sel!(dispatchThreads:threadsPerThreadgroup:),
270 threads_per_grid,
271 threads_per_threadgroup,
272 );
273 }
274 }
275
276 pub fn dispatch_threadgroups_indirect(
280 &self,
281 indirect_buffer: *const c_void,
282 indirect_buffer_offset: UInteger,
283 threads_per_threadgroup: Size,
284 ) {
285 unsafe {
286 let _: () = msg_send_3(
287 self.as_ptr(),
288 sel!(dispatchThreadgroupsWithIndirectBuffer:indirectBufferOffset:threadsPerThreadgroup:),
289 indirect_buffer,
290 indirect_buffer_offset,
291 threads_per_threadgroup,
292 );
293 }
294 }
295
296 pub fn dispatch_threads_indirect(
300 &self,
301 indirect_buffer: *const c_void,
302 indirect_buffer_offset: UInteger,
303 threads_per_threadgroup: Size,
304 ) {
305 unsafe {
306 let _: () = msg_send_3(
307 self.as_ptr(),
308 sel!(dispatchThreadsWithIndirectBuffer:indirectBufferOffset:threadsPerThreadgroup:),
309 indirect_buffer,
310 indirect_buffer_offset,
311 threads_per_threadgroup,
312 );
313 }
314 }
315
316 pub fn barrier(&self) {
322 unsafe {
323 let _: () = msg_send_0(self.as_ptr(), sel!(barrier));
324 }
325 }
326
327 pub fn barrier_buffer(&self, buffer: *const c_void, visibility: VisibilityOptions) {
331 unsafe {
332 let _: () = msg_send_2(
333 self.as_ptr(),
334 sel!(barrierWithBuffer:visibilityOptions:),
335 buffer,
336 visibility.0,
337 );
338 }
339 }
340
341 pub fn barrier_texture(&self, texture: *const c_void, visibility: VisibilityOptions) {
345 unsafe {
346 let _: () = msg_send_2(
347 self.as_ptr(),
348 sel!(barrierWithTexture:visibilityOptions:),
349 texture,
350 visibility.0,
351 );
352 }
353 }
354
355 pub fn update_fence(&self, fence: *const c_void) {
361 unsafe {
362 let _: () = msg_send_1(self.as_ptr(), sel!(updateFence:), fence);
363 }
364 }
365
366 pub fn wait_for_fence(&self, fence: *const c_void) {
370 unsafe {
371 let _: () = msg_send_1(self.as_ptr(), sel!(waitForFence:), fence);
372 }
373 }
374
375 pub fn use_resource(&self, resource: *const c_void, usage: UInteger) {
381 unsafe {
382 let _: () = msg_send_2(self.as_ptr(), sel!(useResource:usage:), resource, usage);
383 }
384 }
385
386 pub fn use_resources(&self, resources: *const *const c_void, count: UInteger, usage: UInteger) {
390 unsafe {
391 let _: () = msg_send_3(
392 self.as_ptr(),
393 sel!(useResources:count:usage:),
394 resources,
395 count,
396 usage,
397 );
398 }
399 }
400
401 pub fn use_heap(&self, heap: *const c_void, usage: UInteger) {
405 unsafe {
406 let _: () = msg_send_2(self.as_ptr(), sel!(useHeap:usage:), heap, usage);
407 }
408 }
409
410 pub fn use_heaps(&self, heaps: *const *const c_void, count: UInteger, usage: UInteger) {
414 unsafe {
415 let _: () = msg_send_3(
416 self.as_ptr(),
417 sel!(useHeaps:count:usage:),
418 heaps,
419 count,
420 usage,
421 );
422 }
423 }
424
425 pub fn push_debug_group(&self, name: &str) {
431 if let Some(ns_name) = mtl_foundation::String::from_str(name) {
432 unsafe {
433 let _: () = msg_send_1(self.as_ptr(), sel!(pushDebugGroup:), ns_name.as_ptr());
434 }
435 }
436 }
437
438 pub fn pop_debug_group(&self) {
442 unsafe {
443 let _: () = msg_send_0(self.as_ptr(), sel!(popDebugGroup));
444 }
445 }
446
447 pub fn insert_debug_signpost(&self, name: &str) {
451 if let Some(ns_name) = mtl_foundation::String::from_str(name) {
452 unsafe {
453 let _: () = msg_send_1(self.as_ptr(), sel!(insertDebugSignpost:), ns_name.as_ptr());
454 }
455 }
456 }
457
458 pub fn end_encoding(&self) {
464 unsafe {
465 let _: () = msg_send_0(self.as_ptr(), sel!(endEncoding));
466 }
467 }
468
469 pub fn copy_from_buffer_to_buffer(
475 &self,
476 source_buffer: *const c_void,
477 source_offset: UInteger,
478 destination_buffer: *const c_void,
479 destination_offset: UInteger,
480 size: UInteger,
481 ) {
482 unsafe {
483 let _: () = msg_send_5(
484 self.as_ptr(),
485 sel!(copyFromBuffer:sourceOffset:toBuffer:destinationOffset:size:),
486 source_buffer,
487 source_offset,
488 destination_buffer,
489 destination_offset,
490 size,
491 );
492 }
493 }
494
495 pub fn fill_buffer(
499 &self,
500 buffer: *const c_void,
501 range_location: UInteger,
502 range_length: UInteger,
503 value: u8,
504 ) {
505 unsafe {
506 let range = (range_location, range_length);
507 let _: () = msg_send_3(
508 self.as_ptr(),
509 sel!(fillBuffer:range:value:),
510 buffer,
511 range,
512 value,
513 );
514 }
515 }
516
517 pub fn generate_mipmaps(&self, texture: *const c_void) {
523 unsafe {
524 let _: () = msg_send_1(self.as_ptr(), sel!(generateMipmapsForTexture:), texture);
525 }
526 }
527
528 pub fn optimize_contents_for_cpu_access(&self, texture: *const c_void) {
532 unsafe {
533 let _: () = msg_send_1(self.as_ptr(), sel!(optimizeContentsForCPUAccess:), texture);
534 }
535 }
536
537 pub fn optimize_contents_for_cpu_access_slice_level(
541 &self,
542 texture: *const c_void,
543 slice: UInteger,
544 level: UInteger,
545 ) {
546 unsafe {
547 let _: () = msg_send_3(
548 self.as_ptr(),
549 sel!(optimizeContentsForCPUAccess:slice:level:),
550 texture,
551 slice,
552 level,
553 );
554 }
555 }
556
557 pub fn optimize_contents_for_gpu_access(&self, texture: *const c_void) {
561 unsafe {
562 let _: () = msg_send_1(self.as_ptr(), sel!(optimizeContentsForGPUAccess:), texture);
563 }
564 }
565
566 pub fn optimize_contents_for_gpu_access_slice_level(
570 &self,
571 texture: *const c_void,
572 slice: UInteger,
573 level: UInteger,
574 ) {
575 unsafe {
576 let _: () = msg_send_3(
577 self.as_ptr(),
578 sel!(optimizeContentsForGPUAccess:slice:level:),
579 texture,
580 slice,
581 level,
582 );
583 }
584 }
585
586 pub fn write_timestamp(
592 &self,
593 granularity: super::TimestampGranularity,
594 counter_heap: *const c_void,
595 index: UInteger,
596 ) {
597 unsafe {
598 let _: () = msg_send_3(
599 self.as_ptr(),
600 sel!(writeTimestampWithGranularity:intoHeap:atIndex:),
601 granularity.0,
602 counter_heap,
603 index,
604 );
605 }
606 }
607
608 pub fn build_acceleration_structure(
614 &self,
615 acceleration_structure: &crate::AccelerationStructure,
616 descriptor: &super::AccelerationStructureDescriptor,
617 scratch_buffer: super::BufferRange,
618 ) {
619 unsafe {
620 let _: () = msg_send_3(
621 self.as_ptr(),
622 sel!(buildAccelerationStructure:descriptor:scratchBuffer:),
623 acceleration_structure.as_ptr(),
624 descriptor.as_ptr(),
625 scratch_buffer,
626 );
627 }
628 }
629
630 pub fn copy_acceleration_structure(
634 &self,
635 source: &crate::AccelerationStructure,
636 destination: &crate::AccelerationStructure,
637 ) {
638 unsafe {
639 let _: () = msg_send_2(
640 self.as_ptr(),
641 sel!(copyAccelerationStructure:toAccelerationStructure:),
642 source.as_ptr(),
643 destination.as_ptr(),
644 );
645 }
646 }
647
648 pub fn copy_and_compact_acceleration_structure(
652 &self,
653 source: &crate::AccelerationStructure,
654 destination: &crate::AccelerationStructure,
655 ) {
656 unsafe {
657 let _: () = msg_send_2(
658 self.as_ptr(),
659 sel!(copyAndCompactAccelerationStructure:toAccelerationStructure:),
660 source.as_ptr(),
661 destination.as_ptr(),
662 );
663 }
664 }
665
666 pub fn refit_acceleration_structure(
670 &self,
671 source: &crate::AccelerationStructure,
672 descriptor: &super::AccelerationStructureDescriptor,
673 destination: &crate::AccelerationStructure,
674 scratch_buffer: super::BufferRange,
675 ) {
676 unsafe {
677 let _: () = msg_send_4(
678 self.as_ptr(),
679 sel!(refitAccelerationStructure:descriptor:destination:scratchBuffer:),
680 source.as_ptr(),
681 descriptor.as_ptr(),
682 destination.as_ptr(),
683 scratch_buffer,
684 );
685 }
686 }
687
688 pub fn refit_acceleration_structure_with_options(
692 &self,
693 source: &crate::AccelerationStructure,
694 descriptor: &super::AccelerationStructureDescriptor,
695 destination: &crate::AccelerationStructure,
696 scratch_buffer: super::BufferRange,
697 options: crate::AccelerationStructureRefitOptions,
698 ) {
699 unsafe {
700 let _: () = msg_send_5(
701 self.as_ptr(),
702 sel!(refitAccelerationStructure:descriptor:destination:scratchBuffer:options:),
703 source.as_ptr(),
704 descriptor.as_ptr(),
705 destination.as_ptr(),
706 scratch_buffer,
707 options,
708 );
709 }
710 }
711
712 pub fn write_compacted_acceleration_structure_size(
716 &self,
717 acceleration_structure: &crate::AccelerationStructure,
718 buffer: super::BufferRange,
719 ) {
720 unsafe {
721 let _: () = msg_send_2(
722 self.as_ptr(),
723 sel!(writeCompactedAccelerationStructureSize:toBuffer:),
724 acceleration_structure.as_ptr(),
725 buffer,
726 );
727 }
728 }
729
730 pub fn copy_from_tensor(
736 &self,
737 source_tensor: &crate::Tensor,
738 source_origin: &crate::TensorExtents,
739 source_dimensions: &crate::TensorExtents,
740 destination_tensor: &crate::Tensor,
741 destination_origin: &crate::TensorExtents,
742 destination_dimensions: &crate::TensorExtents,
743 ) {
744 unsafe {
745 mtl_sys::msg_send_6::<
746 (),
747 *const c_void,
748 *const c_void,
749 *const c_void,
750 *const c_void,
751 *const c_void,
752 *const c_void,
753 >(
754 self.as_ptr(),
755 sel!(copyFromTensor:sourceOrigin:sourceDimensions:toTensor:destinationOrigin:destinationDimensions:),
756 source_tensor.as_ptr(),
757 source_origin.as_ptr(),
758 source_dimensions.as_ptr(),
759 destination_tensor.as_ptr(),
760 destination_origin.as_ptr(),
761 destination_dimensions.as_ptr(),
762 );
763 }
764 }
765
766 pub fn copy_from_texture_to_texture(
772 &self,
773 source_texture: &crate::Texture,
774 destination_texture: &crate::Texture,
775 ) {
776 unsafe {
777 let _: () = msg_send_2(
778 self.as_ptr(),
779 sel!(copyFromTexture:toTexture:),
780 source_texture.as_ptr(),
781 destination_texture.as_ptr(),
782 );
783 }
784 }
785
786 pub fn copy_from_texture_with_slices(
790 &self,
791 source_texture: &crate::Texture,
792 source_slice: UInteger,
793 source_level: UInteger,
794 destination_texture: &crate::Texture,
795 destination_slice: UInteger,
796 destination_level: UInteger,
797 slice_count: UInteger,
798 level_count: UInteger,
799 ) {
800 unsafe {
801 mtl_sys::msg_send_8::<
802 (),
803 *const c_void,
804 UInteger,
805 UInteger,
806 *const c_void,
807 UInteger,
808 UInteger,
809 UInteger,
810 UInteger,
811 >(
812 self.as_ptr(),
813 sel!(copyFromTexture:sourceSlice:sourceLevel:toTexture:destinationSlice:destinationLevel:sliceCount:levelCount:),
814 source_texture.as_ptr(),
815 source_slice,
816 source_level,
817 destination_texture.as_ptr(),
818 destination_slice,
819 destination_level,
820 slice_count,
821 level_count,
822 );
823 }
824 }
825
826 pub fn copy_from_texture_with_origin(
830 &self,
831 source_texture: &crate::Texture,
832 source_slice: UInteger,
833 source_level: UInteger,
834 source_origin: crate::Origin,
835 source_size: Size,
836 destination_texture: &crate::Texture,
837 destination_slice: UInteger,
838 destination_level: UInteger,
839 destination_origin: crate::Origin,
840 ) {
841 unsafe {
842 mtl_sys::msg_send_9::<
843 (),
844 *const c_void,
845 UInteger,
846 UInteger,
847 crate::Origin,
848 Size,
849 *const c_void,
850 UInteger,
851 UInteger,
852 crate::Origin,
853 >(
854 self.as_ptr(),
855 sel!(copyFromTexture:sourceSlice:sourceLevel:sourceOrigin:sourceSize:toTexture:destinationSlice:destinationLevel:destinationOrigin:),
856 source_texture.as_ptr(),
857 source_slice,
858 source_level,
859 source_origin,
860 source_size,
861 destination_texture.as_ptr(),
862 destination_slice,
863 destination_level,
864 destination_origin,
865 );
866 }
867 }
868
869 pub fn copy_from_texture_to_buffer(
873 &self,
874 source_texture: &crate::Texture,
875 source_slice: UInteger,
876 source_level: UInteger,
877 source_origin: crate::Origin,
878 source_size: Size,
879 destination_buffer: &crate::Buffer,
880 destination_offset: UInteger,
881 destination_bytes_per_row: UInteger,
882 destination_bytes_per_image: UInteger,
883 ) {
884 unsafe {
885 mtl_sys::msg_send_9::<
886 (),
887 *const c_void,
888 UInteger,
889 UInteger,
890 crate::Origin,
891 Size,
892 *const c_void,
893 UInteger,
894 UInteger,
895 UInteger,
896 >(
897 self.as_ptr(),
898 sel!(copyFromTexture:sourceSlice:sourceLevel:sourceOrigin:sourceSize:toBuffer:destinationOffset:destinationBytesPerRow:destinationBytesPerImage:),
899 source_texture.as_ptr(),
900 source_slice,
901 source_level,
902 source_origin,
903 source_size,
904 destination_buffer.as_ptr(),
905 destination_offset,
906 destination_bytes_per_row,
907 destination_bytes_per_image,
908 );
909 }
910 }
911
912 pub fn copy_from_texture_to_buffer_with_options(
916 &self,
917 source_texture: &crate::Texture,
918 source_slice: UInteger,
919 source_level: UInteger,
920 source_origin: crate::Origin,
921 source_size: Size,
922 destination_buffer: &crate::Buffer,
923 destination_offset: UInteger,
924 destination_bytes_per_row: UInteger,
925 destination_bytes_per_image: UInteger,
926 options: crate::BlitOption,
927 ) {
928 unsafe {
929 mtl_sys::msg_send_10::<
930 (),
931 *const c_void,
932 UInteger,
933 UInteger,
934 crate::Origin,
935 Size,
936 *const c_void,
937 UInteger,
938 UInteger,
939 UInteger,
940 crate::BlitOption,
941 >(
942 self.as_ptr(),
943 sel!(copyFromTexture:sourceSlice:sourceLevel:sourceOrigin:sourceSize:toBuffer:destinationOffset:destinationBytesPerRow:destinationBytesPerImage:options:),
944 source_texture.as_ptr(),
945 source_slice,
946 source_level,
947 source_origin,
948 source_size,
949 destination_buffer.as_ptr(),
950 destination_offset,
951 destination_bytes_per_row,
952 destination_bytes_per_image,
953 options,
954 );
955 }
956 }
957
958 pub fn copy_indirect_command_buffer(
964 &self,
965 source: &crate::IndirectCommandBuffer,
966 source_range_location: UInteger,
967 source_range_length: UInteger,
968 destination: &crate::IndirectCommandBuffer,
969 destination_index: UInteger,
970 ) {
971 unsafe {
972 let range = mtl_foundation::Range::new(source_range_location, source_range_length);
973 let _: () = msg_send_4(
974 self.as_ptr(),
975 sel!(copyIndirectCommandBuffer:sourceRange:destination:destinationIndex:),
976 source.as_ptr(),
977 range,
978 destination.as_ptr(),
979 destination_index,
980 );
981 }
982 }
983
984 pub fn optimize_indirect_command_buffer(
988 &self,
989 indirect_command_buffer: &crate::IndirectCommandBuffer,
990 range_location: UInteger,
991 range_length: UInteger,
992 ) {
993 unsafe {
994 let range = mtl_foundation::Range::new(range_location, range_length);
995 let _: () = msg_send_2(
996 self.as_ptr(),
997 sel!(optimizeIndirectCommandBuffer:withRange:),
998 indirect_command_buffer.as_ptr(),
999 range,
1000 );
1001 }
1002 }
1003
1004 pub fn reset_commands_in_buffer(
1008 &self,
1009 buffer: &crate::IndirectCommandBuffer,
1010 range_location: UInteger,
1011 range_length: UInteger,
1012 ) {
1013 unsafe {
1014 let range = mtl_foundation::Range::new(range_location, range_length);
1015 let _: () = msg_send_2(
1016 self.as_ptr(),
1017 sel!(resetCommandsInBuffer:withRange:),
1018 buffer.as_ptr(),
1019 range,
1020 );
1021 }
1022 }
1023
1024 pub fn execute_commands_in_buffer(
1028 &self,
1029 indirect_command_buffer: &crate::IndirectCommandBuffer,
1030 range_location: UInteger,
1031 range_length: UInteger,
1032 ) {
1033 unsafe {
1034 let range = mtl_foundation::Range::new(range_location, range_length);
1035 let _: () = msg_send_2(
1036 self.as_ptr(),
1037 sel!(executeCommandsInBuffer:withRange:),
1038 indirect_command_buffer.as_ptr(),
1039 range,
1040 );
1041 }
1042 }
1043
1044 pub fn execute_commands_in_buffer_indirect(
1048 &self,
1049 indirect_command_buffer: &crate::IndirectCommandBuffer,
1050 indirect_range_buffer: u64,
1051 ) {
1052 unsafe {
1053 let _: () = msg_send_2(
1054 self.as_ptr(),
1055 sel!(executeCommandsInBuffer:indirectBuffer:),
1056 indirect_command_buffer.as_ptr(),
1057 indirect_range_buffer,
1058 );
1059 }
1060 }
1061
1062 pub fn stages(&self) -> crate::Stages {
1068 unsafe { msg_send_0(self.as_ptr(), sel!(stages)) }
1069 }
1070}
1071
1072impl Clone for ComputeCommandEncoder {
1073 fn clone(&self) -> Self {
1074 unsafe {
1075 mtl_sys::msg_send_0::<*mut c_void>(self.as_ptr(), mtl_sys::sel!(retain));
1076 }
1077 Self(self.0)
1078 }
1079}
1080
1081impl Drop for ComputeCommandEncoder {
1082 fn drop(&mut self) {
1083 unsafe {
1084 mtl_sys::msg_send_0::<()>(self.as_ptr(), mtl_sys::sel!(release));
1085 }
1086 }
1087}
1088
1089impl Referencing for ComputeCommandEncoder {
1090 #[inline]
1091 fn as_ptr(&self) -> *const c_void {
1092 self.0.as_ptr()
1093 }
1094}
1095
1096unsafe impl Send for ComputeCommandEncoder {}
1097unsafe impl Sync for ComputeCommandEncoder {}
1098
1099impl std::fmt::Debug for ComputeCommandEncoder {
1100 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1101 f.debug_struct("ComputeCommandEncoder")
1102 .field("label", &self.label())
1103 .finish()
1104 }
1105}
1106
1107#[cfg(test)]
1108mod tests {
1109 use super::*;
1110
1111 #[test]
1112 fn test_compute_command_encoder_size() {
1113 assert_eq!(
1114 std::mem::size_of::<ComputeCommandEncoder>(),
1115 std::mem::size_of::<*mut c_void>()
1116 );
1117 }
1118}