mtl_gpu/io/
command_queue_descriptor.rs1use std::ffi::c_void;
4use std::ptr::NonNull;
5
6use mtl_foundation::{Referencing, UInteger};
7use mtl_sys::{msg_send_0, msg_send_1, sel};
8
9use crate::enums::{IOCommandQueueType, IOPriority};
10
11#[repr(transparent)]
15pub struct IOCommandQueueDescriptor(pub(crate) NonNull<c_void>);
16
17impl IOCommandQueueDescriptor {
18 pub fn new() -> Option<Self> {
22 unsafe {
23 let class = mtl_sys::Class::get("MTLIOCommandQueueDescriptor")?;
24 let ptr: *mut c_void = msg_send_0(class.as_ptr(), sel!(alloc));
25 if ptr.is_null() {
26 return None;
27 }
28 let ptr: *mut c_void = msg_send_0(ptr, sel!(init));
29 Self::from_raw(ptr)
30 }
31 }
32
33 #[inline]
39 pub unsafe fn from_raw(ptr: *mut c_void) -> Option<Self> {
40 NonNull::new(ptr).map(Self)
41 }
42
43 #[inline]
45 pub fn as_raw(&self) -> *mut c_void {
46 self.0.as_ptr()
47 }
48
49 #[inline]
53 pub fn max_command_buffer_count(&self) -> UInteger {
54 unsafe { msg_send_0(self.as_ptr(), sel!(maxCommandBufferCount)) }
55 }
56
57 #[inline]
61 pub fn set_max_command_buffer_count(&self, count: UInteger) {
62 unsafe {
63 msg_send_1::<(), UInteger>(self.as_ptr(), sel!(setMaxCommandBufferCount:), count);
64 }
65 }
66
67 #[inline]
71 pub fn max_commands_in_flight(&self) -> UInteger {
72 unsafe { msg_send_0(self.as_ptr(), sel!(maxCommandsInFlight)) }
73 }
74
75 #[inline]
79 pub fn set_max_commands_in_flight(&self, count: UInteger) {
80 unsafe {
81 msg_send_1::<(), UInteger>(self.as_ptr(), sel!(setMaxCommandsInFlight:), count);
82 }
83 }
84
85 #[inline]
89 pub fn priority(&self) -> IOPriority {
90 unsafe { msg_send_0(self.as_ptr(), sel!(priority)) }
91 }
92
93 #[inline]
97 pub fn set_priority(&self, priority: IOPriority) {
98 unsafe {
99 msg_send_1::<(), IOPriority>(self.as_ptr(), sel!(setPriority:), priority);
100 }
101 }
102
103 #[inline]
107 pub fn queue_type(&self) -> IOCommandQueueType {
108 unsafe { msg_send_0(self.as_ptr(), sel!(type)) }
109 }
110
111 #[inline]
115 pub fn set_queue_type(&self, queue_type: IOCommandQueueType) {
116 unsafe {
117 msg_send_1::<(), IOCommandQueueType>(self.as_ptr(), sel!(setType:), queue_type);
118 }
119 }
120
121 #[inline]
125 pub fn scratch_buffer_allocator_ptr(&self) -> *const c_void {
126 unsafe { msg_send_0(self.as_ptr(), sel!(scratchBufferAllocator)) }
127 }
128
129 pub unsafe fn set_scratch_buffer_allocator_ptr(&self, allocator: *const c_void) {
137 unsafe {
138 msg_send_1::<(), *const c_void>(
139 self.as_ptr(),
140 sel!(setScratchBufferAllocator:),
141 allocator,
142 );
143 }
144 }
145}
146
147impl Default for IOCommandQueueDescriptor {
148 fn default() -> Self {
149 Self::new().expect("failed to create IO command queue descriptor")
150 }
151}
152
153impl Clone for IOCommandQueueDescriptor {
154 fn clone(&self) -> Self {
155 unsafe {
156 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(copy));
157 Self::from_raw(ptr).expect("failed to copy IO command queue descriptor")
158 }
159 }
160}
161
162impl Drop for IOCommandQueueDescriptor {
163 fn drop(&mut self) {
164 unsafe {
165 msg_send_0::<()>(self.as_ptr(), sel!(release));
166 }
167 }
168}
169
170impl Referencing for IOCommandQueueDescriptor {
171 #[inline]
172 fn as_ptr(&self) -> *const c_void {
173 self.0.as_ptr()
174 }
175}
176
177unsafe impl Send for IOCommandQueueDescriptor {}
178unsafe impl Sync for IOCommandQueueDescriptor {}