|
| 1 | +use deno_core::v8; |
| 2 | +use deno_core::v8::UniqueRef; |
| 3 | +use std::ffi::c_void; |
| 4 | +use std::sync::atomic::{AtomicUsize, Ordering}; |
| 5 | +use std::sync::Arc; |
| 6 | + |
| 7 | +pub struct CustomAllocator { |
| 8 | + max: usize, |
| 9 | + count: AtomicUsize, |
| 10 | +} |
| 11 | + |
| 12 | +#[allow(clippy::unnecessary_cast)] |
| 13 | +unsafe extern "C" fn allocate(allocator: &CustomAllocator, n: usize) -> *mut c_void { |
| 14 | + allocator.count.fetch_add(n, Ordering::SeqCst); |
| 15 | + let count_loaded = allocator.count.load(Ordering::SeqCst); |
| 16 | + if count_loaded > allocator.max { |
| 17 | + return std::ptr::null::<*mut [u8]>() as *mut c_void; |
| 18 | + } |
| 19 | + |
| 20 | + Box::into_raw(vec![0u8; n].into_boxed_slice()) as *mut [u8] as *mut c_void |
| 21 | +} |
| 22 | + |
| 23 | +#[allow(clippy::unnecessary_cast)] |
| 24 | +#[allow(clippy::uninit_vec)] |
| 25 | +unsafe extern "C" fn allocate_uninitialized(allocator: &CustomAllocator, n: usize) -> *mut c_void { |
| 26 | + allocator.count.fetch_add(n, Ordering::SeqCst); |
| 27 | + let count_loaded = allocator.count.load(Ordering::SeqCst); |
| 28 | + if count_loaded > allocator.max { |
| 29 | + return std::ptr::null::<*mut [u8]>() as *mut c_void; |
| 30 | + } |
| 31 | + |
| 32 | + let mut store = Vec::with_capacity(n); |
| 33 | + store.set_len(n); |
| 34 | + Box::into_raw(store.into_boxed_slice()) as *mut [u8] as *mut c_void |
| 35 | +} |
| 36 | + |
| 37 | +unsafe extern "C" fn free(allocator: &CustomAllocator, data: *mut c_void, n: usize) { |
| 38 | + allocator.count.fetch_sub(n, Ordering::SeqCst); |
| 39 | + let _ = Box::from_raw(std::slice::from_raw_parts_mut(data as *mut u8, n)); |
| 40 | +} |
| 41 | + |
| 42 | +#[allow(clippy::unnecessary_cast)] |
| 43 | +unsafe extern "C" fn reallocate( |
| 44 | + allocator: &CustomAllocator, |
| 45 | + prev: *mut c_void, |
| 46 | + oldlen: usize, |
| 47 | + newlen: usize, |
| 48 | +) -> *mut c_void { |
| 49 | + allocator |
| 50 | + .count |
| 51 | + .fetch_add(newlen.wrapping_sub(oldlen), Ordering::SeqCst); |
| 52 | + let count_loaded = allocator.count.load(Ordering::SeqCst); |
| 53 | + if count_loaded > allocator.max { |
| 54 | + return std::ptr::null::<*mut [u8]>() as *mut c_void; |
| 55 | + } |
| 56 | + |
| 57 | + let old_store = Box::from_raw(std::slice::from_raw_parts_mut(prev as *mut u8, oldlen)); |
| 58 | + let mut new_store = Vec::with_capacity(newlen); |
| 59 | + let copy_len = oldlen.min(newlen); |
| 60 | + new_store.extend_from_slice(&old_store[..copy_len]); |
| 61 | + new_store.resize(newlen, 0u8); |
| 62 | + Box::into_raw(new_store.into_boxed_slice()) as *mut [u8] as *mut c_void |
| 63 | +} |
| 64 | + |
| 65 | +unsafe extern "C" fn drop(allocator: *const CustomAllocator) { |
| 66 | + Arc::from_raw(allocator); |
| 67 | +} |
| 68 | + |
| 69 | +pub fn custom_allocator(max: usize) -> UniqueRef<deno_core::v8::Allocator> { |
| 70 | + let vtable: &'static v8::RustAllocatorVtable<CustomAllocator> = &v8::RustAllocatorVtable { |
| 71 | + allocate, |
| 72 | + allocate_uninitialized, |
| 73 | + free, |
| 74 | + reallocate, |
| 75 | + drop, |
| 76 | + }; |
| 77 | + let allocator = Arc::new(CustomAllocator { |
| 78 | + count: AtomicUsize::new(0), |
| 79 | + max, |
| 80 | + }); |
| 81 | + unsafe { v8::new_rust_allocator(Arc::into_raw(allocator), vtable) } |
| 82 | +} |
0 commit comments