demo_vk/graphics/vulkan/allocator/
block.rs1use {
2 crate::{graphics::vulkan::allocator::HumanizedSize, trace},
3 anyhow::{bail, Result},
4 ash::vk,
5};
6
7#[derive(Copy, Clone, Eq, PartialEq)]
9pub struct Block {
10 offset: u64,
11 size: u64,
12 memory: vk::DeviceMemory,
13 mapped_ptr: *mut std::ffi::c_void,
14 memory_type_index: u32,
15 device_addressable: bool,
16}
17
18unsafe impl Send for Block {}
24
25unsafe impl Sync for Block {}
28
29impl Block {
30 pub(super) fn new(
32 offset: u64,
33 size: u64,
34 memory: vk::DeviceMemory,
35 mapped_ptr: *mut std::ffi::c_void,
36 memory_type_index: u32,
37 device_addressable: bool,
38 ) -> Self {
39 Self {
40 offset,
41 size,
42 memory,
43 mapped_ptr,
44 memory_type_index,
45 device_addressable,
46 }
47 }
48
49 pub(super) fn is_device_addressable(&self) -> bool {
52 self.device_addressable
53 }
54
55 pub fn is_subregion_of(&self, other: &Block) -> bool {
60 if self.memory() != other.memory() {
61 return false;
63 }
64
65 let start = self.offset();
66 let end = (self.offset() + self.size()) - 1;
67
68 let starts_within =
69 start >= other.offset() && start < other.offset() + other.size();
70 let ends_within =
71 end >= other.offset() && end < other.offset() + other.size();
72
73 starts_within && ends_within
74 }
75
76 pub fn subregion(&self, offset: u64, size: u64) -> Result<Self> {
86 if offset >= self.size || offset + size > self.size {
87 bail!(trace!(
88 "Subregion at {} with size {:?} is out of bounds! {:#?}",
89 offset,
90 HumanizedSize(size),
91 self,
92 )());
93 }
94
95 let mapped_ptr: *mut std::ffi::c_void = if self.mapped_ptr.is_null() {
96 std::ptr::null_mut()
97 } else {
98 unsafe { self.mapped_ptr.byte_offset(offset as isize) }
99 };
100
101 Ok(Block {
102 offset: self.offset + offset,
103 size,
104 memory: self.memory,
105 mapped_ptr,
106 memory_type_index: self.memory_type_index,
107 device_addressable: self.device_addressable,
108 })
109 }
110
111 pub fn offset(&self) -> u64 {
113 self.offset
114 }
115
116 pub fn size(&self) -> u64 {
118 self.size
119 }
120
121 pub fn memory(&self) -> vk::DeviceMemory {
126 self.memory
127 }
128
129 pub fn mapped_ptr(&self) -> *mut std::ffi::c_void {
136 self.mapped_ptr
137 }
138
139 pub fn memory_type_index(&self) -> u32 {
141 self.memory_type_index
142 }
143}
144
145impl std::fmt::Debug for Block {
146 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
147 f.debug_struct("Block")
148 .field("offset", &self.offset)
149 .field("size", &HumanizedSize(self.size))
150 .field("memory_type_index", &self.memory_type_index)
151 .field("memory", &self.memory)
152 .field("mapped_ptr", &self.mapped_ptr)
153 .finish()
154 }
155}
156
157#[cfg(test)]
158mod test {
159 use {super::*, std::ffi::c_void, vk::Handle};
160
161 #[test]
162 pub fn is_subregion_of_should_be_true_for_contained_blocks() -> Result<()> {
163 let block = Block::new(
164 0,
165 100,
166 vk::DeviceMemory::null(),
167 std::ptr::null_mut(),
168 0,
169 false,
170 );
171 assert!(block.subregion(99, 1)?.is_subregion_of(&block));
172 assert!(block.subregion(0, 100)?.is_subregion_of(&block));
173 assert!(block.subregion(50, 50)?.is_subregion_of(&block));
174 assert!(block.subregion(2, 80)?.is_subregion_of(&block));
175 Ok(())
176 }
177
178 #[test]
179 pub fn is_subregion_of_should_be_false_for_partial_overlaps() -> Result<()>
180 {
181 let blk = Block::new(
182 50,
183 50,
184 vk::DeviceMemory::null(),
185 std::ptr::null_mut(),
186 0,
187 false,
188 );
189 let end_overlap = Block { offset: 75, ..blk };
190 assert!(!end_overlap.is_subregion_of(&blk));
191
192 let start_overlap = Block { offset: 25, ..blk };
193 assert!(!start_overlap.is_subregion_of(&blk));
194 Ok(())
195 }
196
197 #[test]
198 pub fn is_subregion_of_should_be_true_for_identical_blocks() {
199 let block = Block::new(
200 0,
201 100,
202 vk::DeviceMemory::null(),
203 std::ptr::null_mut(),
204 0,
205 false,
206 );
207 assert!(block.is_subregion_of(&block));
208 }
209
210 #[test]
211 pub fn is_subregion_of_should_be_false_when_device_memory_does_not_match() {
212 let a = Block::new(
213 0,
214 100,
215 vk::DeviceMemory::from_raw(1), std::ptr::null_mut(),
217 0,
218 false,
219 );
220 let b = Block::new(
221 0,
222 100,
223 vk::DeviceMemory::from_raw(2), std::ptr::null_mut(),
225 0,
226 false,
227 );
228 assert!(!a.is_subregion_of(&b));
229 }
230
231 #[test]
232 pub fn subregion_should_fail_when_offset_out_of_bounds() {
233 assert!(Block {
235 offset: 2,
236 size: 100,
237 memory: vk::DeviceMemory::null(),
238 mapped_ptr: std::ptr::null_mut(),
239 memory_type_index: 0,
240 device_addressable: false,
241 }
242 .subregion(100, 0)
243 .is_err());
244
245 assert!(Block {
247 offset: 2,
248 size: 100,
249 memory: vk::DeviceMemory::null(),
250 mapped_ptr: std::ptr::null_mut(),
251 memory_type_index: 0,
252 device_addressable: false,
253 }
254 .subregion(50, 51)
255 .is_err());
256 }
257
258 #[test]
259 pub fn subregion_should_use_cumulative_offset() -> Result<()> {
260 let block = Block {
261 offset: 5,
262 size: 100,
263 memory: vk::DeviceMemory::null(),
264 mapped_ptr: std::ptr::null_mut(),
265 memory_type_index: 0,
266 device_addressable: false,
267 };
268 let sub = block.subregion(3, 80)?;
269 assert!(sub.offset == 8);
270 assert!(sub.size == 80);
271 assert!(sub.mapped_ptr.is_null());
272 Ok(())
273 }
274
275 #[test]
276 pub fn subregion_should_update_mapped_ptr() -> Result<()> {
277 let buffer = [0_u8; 100];
278 let block = Block {
279 offset: 0,
280 size: 100,
281 memory: vk::DeviceMemory::null(),
282 mapped_ptr: buffer.as_ptr() as *mut c_void,
283 memory_type_index: 0,
284 device_addressable: false,
285 };
286 let sub = block.subregion(3, 80)?;
287 let ptr_offset =
288 unsafe { sub.mapped_ptr.byte_offset_from(block.mapped_ptr) };
289 assert!(ptr_offset == (sub.offset as isize));
290
291 let sub2 = sub.subregion(15, 30)?;
292 assert!(
293 unsafe { sub2.mapped_ptr.byte_offset_from(block.mapped_ptr) }
294 == 15 + 3
295 );
296 assert!(
297 unsafe { sub2.mapped_ptr.byte_offset_from(sub.mapped_ptr) } == 15
298 );
299 Ok(())
300 }
301}