Files
a0_basic_app
a1_vehicle
a2_async_sim
ab_glyph
ab_glyph_rasterizer
adler
adler32
agents
aho_corasick
anyhow
approx
aquamarine
ash
atty
bitflags
bytemuck
byteorder
cache_padded
cfg_if
chrono
color_quant
crc32fast
crossbeam_channel
crossbeam_deque
crossbeam_epoch
crossbeam_utils
deflate
draw2d
either
flexi_logger
generic_array
gif
glfw
glfw_sys
glob
image
indoc
itertools
jpeg_decoder
lazy_static
libc
libloading
log
matrixmultiply
memchr
memoffset
miniz_oxide
nalgebra
base
geometry
linalg
third_party
num_complex
num_cpus
num_integer
num_iter
num_rational
num_traits
owned_ttf_parser
paste
png
proc_macro2
proc_macro_error
proc_macro_error_attr
quote
raw_window_handle
rawpointer
rayon
rayon_core
regex
regex_syntax
scoped_threadpool
scopeguard
semver
semver_parser
serde
serde_derive
simba
smawk
spin_sleep
syn
terminal_size
textwrap
thiserror
thiserror_impl
tiff
time
triple_buffer
ttf_parser
typenum
unicode_width
unicode_xid
unindent
vk_sys
weezl
yansi
 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
use super::{Allocation, DeviceAllocator, MemUnit};

use anyhow::Result;
use ash::vk;

/// An allocator which forces all allocations to have a fixed offset.
///
/// This has little practical use, but is convenient when verifying that other
/// parts of the code properly handle allocation offsets.
pub struct ForcedOffsetAllocator<Alloc: DeviceAllocator> {
    alignment: u64,
    allocator: Alloc,
}

impl<Alloc: DeviceAllocator> ForcedOffsetAllocator<Alloc> {
    pub fn new(allocator: Alloc, alignment: MemUnit) -> Self {
        Self {
            allocator,
            alignment: alignment.to_bytes(),
        }
    }

    fn offset(&self) -> u64 {
        self.alignment * 100
    }
}

impl<Alloc: DeviceAllocator> DeviceAllocator for ForcedOffsetAllocator<Alloc> {
    /// Use the underlying allocator implementation to allocate an oversized
    /// piece of memory, then set an offset to compensate.
    ///
    /// This has no practical use other than proving that code properly handles
    /// memory offsets.
    unsafe fn allocate(
        &mut self,
        allocate_info: vk::MemoryAllocateInfo,
    ) -> Result<Allocation> {
        let expanded_allocate_info = vk::MemoryAllocateInfo {
            memory_type_index: allocate_info.memory_type_index,
            allocation_size: allocate_info.allocation_size + self.offset(),
            ..Default::default()
        };
        let mut allocation = self.allocator.allocate(expanded_allocate_info)?;
        allocation.offset += self.offset();
        allocation.byte_size = allocate_info.allocation_size;
        Ok(allocation)
    }

    /// Undo the offset+size adjustments which were applied by [Self::allocate],
    /// then use the underlying allocator to actually free the memory.
    unsafe fn free(&mut self, allocation: &Allocation) -> Result<()> {
        if allocation.is_null() {
            Ok(())
        } else {
            let mut adjusted = allocation.clone();
            adjusted.offset -= self.offset();
            adjusted.byte_size += self.offset();
            self.allocator.free(&adjusted)
        }
    }
}