1use std::ffi::c_void;
8use std::ptr::NonNull;
9
10use mtl_foundation::{Referencing, UInteger};
11use mtl_sys::{msg_send_0, msg_send_1, sel};
12
13use crate::enums::{
14 CPUCacheMode, HazardTrackingMode, HeapType, PurgeableState, ResourceOptions, SparsePageSize,
15 StorageMode,
16};
17
18#[repr(transparent)]
22pub struct Heap(pub(crate) NonNull<c_void>);
23
24impl Heap {
25 #[inline]
31 pub unsafe fn from_raw(ptr: *mut c_void) -> Option<Self> {
32 NonNull::new(ptr).map(Self)
33 }
34
35 #[inline]
37 pub fn as_raw(&self) -> *mut c_void {
38 self.0.as_ptr()
39 }
40
41 pub fn label(&self) -> Option<String> {
49 unsafe {
50 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(label));
51 if ptr.is_null() {
52 return None;
53 }
54 let utf8_ptr: *const std::ffi::c_char =
55 mtl_sys::msg_send_0(ptr as *const c_void, sel!(UTF8String));
56 if utf8_ptr.is_null() {
57 return None;
58 }
59 let c_str = std::ffi::CStr::from_ptr(utf8_ptr);
60 Some(c_str.to_string_lossy().into_owned())
61 }
62 }
63
64 pub fn set_label(&self, label: &str) {
68 if let Some(ns_label) = mtl_foundation::String::from_str(label) {
69 unsafe {
70 msg_send_1::<(), *const c_void>(self.as_ptr(), sel!(setLabel:), ns_label.as_ptr());
71 }
72 }
73 }
74
75 pub fn device(&self) -> crate::Device {
79 unsafe {
80 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(device));
81 let _: *mut c_void = msg_send_0(ptr, sel!(retain));
82 crate::Device::from_raw(ptr).expect("heap has no device")
83 }
84 }
85
86 #[inline]
90 pub fn storage_mode(&self) -> StorageMode {
91 unsafe { msg_send_0(self.as_ptr(), sel!(storageMode)) }
92 }
93
94 #[inline]
98 pub fn cpu_cache_mode(&self) -> CPUCacheMode {
99 unsafe { msg_send_0(self.as_ptr(), sel!(cpuCacheMode)) }
100 }
101
102 #[inline]
106 pub fn hazard_tracking_mode(&self) -> HazardTrackingMode {
107 unsafe { msg_send_0(self.as_ptr(), sel!(hazardTrackingMode)) }
108 }
109
110 #[inline]
114 pub fn resource_options(&self) -> ResourceOptions {
115 unsafe { msg_send_0(self.as_ptr(), sel!(resourceOptions)) }
116 }
117
118 #[inline]
122 pub fn size(&self) -> UInteger {
123 unsafe { msg_send_0(self.as_ptr(), sel!(size)) }
124 }
125
126 #[inline]
130 pub fn used_size(&self) -> UInteger {
131 unsafe { msg_send_0(self.as_ptr(), sel!(usedSize)) }
132 }
133
134 #[inline]
138 pub fn current_allocated_size(&self) -> UInteger {
139 unsafe { msg_send_0(self.as_ptr(), sel!(currentAllocatedSize)) }
140 }
141
142 #[inline]
146 pub fn max_available_size(&self, alignment: UInteger) -> UInteger {
147 unsafe {
148 msg_send_1(
149 self.as_ptr(),
150 sel!(maxAvailableSizeWithAlignment:),
151 alignment,
152 )
153 }
154 }
155
156 #[inline]
160 pub fn heap_type(&self) -> HeapType {
161 unsafe { msg_send_0(self.as_ptr(), sel!(type)) }
162 }
163
164 #[inline]
172 pub fn set_purgeable_state(&self, state: PurgeableState) -> PurgeableState {
173 unsafe { msg_send_1(self.as_ptr(), sel!(setPurgeableState:), state) }
174 }
175
176 pub fn new_buffer(
184 &self,
185 length: UInteger,
186 options: ResourceOptions,
187 ) -> Option<crate::buffer::Buffer> {
188 unsafe {
189 let ptr: *mut c_void = mtl_sys::msg_send_2(
190 self.as_ptr(),
191 sel!(newBufferWithLength: options:),
192 length,
193 options,
194 );
195 crate::buffer::Buffer::from_raw(ptr)
196 }
197 }
198
199 pub fn new_buffer_with_offset(
203 &self,
204 length: UInteger,
205 options: ResourceOptions,
206 offset: UInteger,
207 ) -> Option<crate::buffer::Buffer> {
208 unsafe {
209 let ptr: *mut c_void = mtl_sys::msg_send_3(
210 self.as_ptr(),
211 sel!(newBufferWithLength: options: offset:),
212 length,
213 options,
214 offset,
215 );
216 crate::buffer::Buffer::from_raw(ptr)
217 }
218 }
219
220 pub unsafe fn new_texture(&self, descriptor: *const c_void) -> Option<crate::texture::Texture> {
228 unsafe {
229 let ptr: *mut c_void =
230 msg_send_1(self.as_ptr(), sel!(newTextureWithDescriptor:), descriptor);
231 crate::texture::Texture::from_raw(ptr)
232 }
233 }
234
235 pub unsafe fn new_texture_with_offset(
243 &self,
244 descriptor: *const c_void,
245 offset: UInteger,
246 ) -> Option<crate::texture::Texture> {
247 unsafe {
248 let ptr: *mut c_void = mtl_sys::msg_send_2(
249 self.as_ptr(),
250 sel!(newTextureWithDescriptor: offset:),
251 descriptor,
252 offset,
253 );
254 crate::texture::Texture::from_raw(ptr)
255 }
256 }
257
258 pub fn new_acceleration_structure_with_size(
266 &self,
267 size: UInteger,
268 ) -> Option<crate::acceleration::AccelerationStructure> {
269 unsafe {
270 let ptr: *mut c_void =
271 msg_send_1(self.as_ptr(), sel!(newAccelerationStructureWithSize:), size);
272 crate::acceleration::AccelerationStructure::from_raw(ptr)
273 }
274 }
275
276 pub fn new_acceleration_structure(
280 &self,
281 descriptor: &crate::acceleration::AccelerationStructureDescriptor,
282 ) -> Option<crate::acceleration::AccelerationStructure> {
283 unsafe {
284 let ptr: *mut c_void = msg_send_1(
285 self.as_ptr(),
286 sel!(newAccelerationStructureWithDescriptor:),
287 descriptor.as_ptr(),
288 );
289 crate::acceleration::AccelerationStructure::from_raw(ptr)
290 }
291 }
292
293 pub fn new_acceleration_structure_with_size_and_offset(
297 &self,
298 size: UInteger,
299 offset: UInteger,
300 ) -> Option<crate::acceleration::AccelerationStructure> {
301 unsafe {
302 let ptr: *mut c_void = mtl_sys::msg_send_2(
303 self.as_ptr(),
304 sel!(newAccelerationStructureWithSize: offset:),
305 size,
306 offset,
307 );
308 crate::acceleration::AccelerationStructure::from_raw(ptr)
309 }
310 }
311
312 pub fn new_acceleration_structure_with_offset(
316 &self,
317 descriptor: &crate::acceleration::AccelerationStructureDescriptor,
318 offset: UInteger,
319 ) -> Option<crate::acceleration::AccelerationStructure> {
320 unsafe {
321 let ptr: *mut c_void = mtl_sys::msg_send_2(
322 self.as_ptr(),
323 sel!(newAccelerationStructureWithDescriptor: offset:),
324 descriptor.as_ptr(),
325 offset,
326 );
327 crate::acceleration::AccelerationStructure::from_raw(ptr)
328 }
329 }
330}
331
332impl Clone for Heap {
333 fn clone(&self) -> Self {
334 unsafe {
335 msg_send_0::<*mut c_void>(self.as_ptr(), sel!(retain));
336 }
337 Self(self.0)
338 }
339}
340
341impl Drop for Heap {
342 fn drop(&mut self) {
343 unsafe {
344 msg_send_0::<()>(self.as_ptr(), sel!(release));
345 }
346 }
347}
348
349impl Referencing for Heap {
350 #[inline]
351 fn as_ptr(&self) -> *const c_void {
352 self.0.as_ptr()
353 }
354}
355
356unsafe impl Send for Heap {}
357unsafe impl Sync for Heap {}
358
359impl std::fmt::Debug for Heap {
360 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
361 f.debug_struct("Heap")
362 .field("size", &self.size())
363 .field("used_size", &self.used_size())
364 .field("label", &self.label())
365 .finish()
366 }
367}
368
369#[repr(transparent)]
377pub struct HeapDescriptor(pub(crate) NonNull<c_void>);
378
379impl HeapDescriptor {
380 pub fn new() -> Option<Self> {
384 unsafe {
385 let class = mtl_sys::Class::get("MTLHeapDescriptor")?;
386 let ptr: *mut c_void = msg_send_0(class.as_ptr(), sel!(alloc));
387 if ptr.is_null() {
388 return None;
389 }
390 let ptr: *mut c_void = msg_send_0(ptr, sel!(init));
391 Self::from_raw(ptr)
392 }
393 }
394
395 #[inline]
401 pub unsafe fn from_raw(ptr: *mut c_void) -> Option<Self> {
402 NonNull::new(ptr).map(Self)
403 }
404
405 #[inline]
407 pub fn as_raw(&self) -> *mut c_void {
408 self.0.as_ptr()
409 }
410
411 #[inline]
415 pub fn size(&self) -> UInteger {
416 unsafe { msg_send_0(self.as_ptr(), sel!(size)) }
417 }
418
419 #[inline]
423 pub fn set_size(&self, size: UInteger) {
424 unsafe {
425 msg_send_1::<(), UInteger>(self.as_ptr(), sel!(setSize:), size);
426 }
427 }
428
429 #[inline]
433 pub fn storage_mode(&self) -> StorageMode {
434 unsafe { msg_send_0(self.as_ptr(), sel!(storageMode)) }
435 }
436
437 #[inline]
441 pub fn set_storage_mode(&self, mode: StorageMode) {
442 unsafe {
443 msg_send_1::<(), StorageMode>(self.as_ptr(), sel!(setStorageMode:), mode);
444 }
445 }
446
447 #[inline]
451 pub fn cpu_cache_mode(&self) -> CPUCacheMode {
452 unsafe { msg_send_0(self.as_ptr(), sel!(cpuCacheMode)) }
453 }
454
455 #[inline]
459 pub fn set_cpu_cache_mode(&self, mode: CPUCacheMode) {
460 unsafe {
461 msg_send_1::<(), CPUCacheMode>(self.as_ptr(), sel!(setCpuCacheMode:), mode);
462 }
463 }
464
465 #[inline]
469 pub fn hazard_tracking_mode(&self) -> HazardTrackingMode {
470 unsafe { msg_send_0(self.as_ptr(), sel!(hazardTrackingMode)) }
471 }
472
473 #[inline]
477 pub fn set_hazard_tracking_mode(&self, mode: HazardTrackingMode) {
478 unsafe {
479 msg_send_1::<(), HazardTrackingMode>(self.as_ptr(), sel!(setHazardTrackingMode:), mode);
480 }
481 }
482
483 #[inline]
487 pub fn resource_options(&self) -> ResourceOptions {
488 unsafe { msg_send_0(self.as_ptr(), sel!(resourceOptions)) }
489 }
490
491 #[inline]
495 pub fn set_resource_options(&self, options: ResourceOptions) {
496 unsafe {
497 msg_send_1::<(), ResourceOptions>(self.as_ptr(), sel!(setResourceOptions:), options);
498 }
499 }
500
501 #[inline]
505 pub fn heap_type(&self) -> HeapType {
506 unsafe { msg_send_0(self.as_ptr(), sel!(type)) }
507 }
508
509 #[inline]
513 pub fn set_heap_type(&self, heap_type: HeapType) {
514 unsafe {
515 msg_send_1::<(), HeapType>(self.as_ptr(), sel!(setType:), heap_type);
516 }
517 }
518
519 #[inline]
527 pub fn sparse_page_size(&self) -> SparsePageSize {
528 unsafe { msg_send_0(self.as_ptr(), sel!(sparsePageSize)) }
529 }
530
531 #[inline]
535 pub fn set_sparse_page_size(&self, size: SparsePageSize) {
536 unsafe {
537 msg_send_1::<(), SparsePageSize>(self.as_ptr(), sel!(setSparsePageSize:), size);
538 }
539 }
540
541 #[inline]
545 pub fn max_compatible_placement_sparse_page_size(&self) -> SparsePageSize {
546 unsafe { msg_send_0(self.as_ptr(), sel!(maxCompatiblePlacementSparsePageSize)) }
547 }
548
549 #[inline]
553 pub fn set_max_compatible_placement_sparse_page_size(&self, size: SparsePageSize) {
554 unsafe {
555 msg_send_1::<(), SparsePageSize>(
556 self.as_ptr(),
557 sel!(setMaxCompatiblePlacementSparsePageSize:),
558 size,
559 );
560 }
561 }
562}
563
564impl Default for HeapDescriptor {
565 fn default() -> Self {
566 Self::new().expect("failed to create heap descriptor")
567 }
568}
569
570impl Clone for HeapDescriptor {
571 fn clone(&self) -> Self {
572 unsafe {
573 let ptr: *mut c_void = msg_send_0(self.as_ptr(), sel!(copy));
574 Self::from_raw(ptr).expect("failed to copy heap descriptor")
575 }
576 }
577}
578
579impl Drop for HeapDescriptor {
580 fn drop(&mut self) {
581 unsafe {
582 msg_send_0::<()>(self.as_ptr(), sel!(release));
583 }
584 }
585}
586
587impl Referencing for HeapDescriptor {
588 #[inline]
589 fn as_ptr(&self) -> *const c_void {
590 self.0.as_ptr()
591 }
592}
593
594unsafe impl Send for HeapDescriptor {}
595unsafe impl Sync for HeapDescriptor {}
596
597impl std::fmt::Debug for HeapDescriptor {
598 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
599 f.debug_struct("HeapDescriptor")
600 .field("size", &self.size())
601 .field("storage_mode", &self.storage_mode())
602 .field("heap_type", &self.heap_type())
603 .finish()
604 }
605}
606
607#[cfg(test)]
608mod tests {
609 use super::*;
610
611 #[test]
612 fn test_heap_size() {
613 assert_eq!(
614 std::mem::size_of::<Heap>(),
615 std::mem::size_of::<*mut c_void>()
616 );
617 }
618
619 #[test]
620 fn test_heap_descriptor_creation() {
621 let desc = HeapDescriptor::new();
622 assert!(desc.is_some());
623 }
624}