@@ -11,9 +11,11 @@ use self::socket::{SockAddr, Socket};
11
11
use self :: thread:: Thread ;
12
12
use self :: ucred:: Ucred ;
13
13
use self :: uio:: { Uio , UioSeg } ;
14
+ use core:: alloc:: { GlobalAlloc , Layout } ;
14
15
use core:: ffi:: { c_char, c_int} ;
15
16
use core:: marker:: PhantomData ;
16
17
use core:: ops:: Deref ;
18
+ use core:: ptr:: { null_mut, read_unaligned, write_unaligned} ;
17
19
pub use okf_macros:: * ;
18
20
19
21
pub mod errno;
@@ -129,6 +131,8 @@ pub trait Kernel: MappedKernel {
129
131
/// - `auio` cannot be null.
130
132
unsafe fn kern_writev ( self , td : * mut Self :: Thread , fd : c_int , auio : * mut Self :: Uio ) -> c_int ;
131
133
134
+ /// The returned memory guarantee to be 8 byte aligment.
135
+ ///
132
136
/// # Safety
133
137
/// `ty` cannot be null.
134
138
unsafe fn malloc ( self , size : usize , ty : * mut Self :: Malloc , flags : MallocFlags ) -> * mut u8 ;
@@ -361,3 +365,80 @@ impl<T> StaticOps for MutableOps<T> {
361
365
Self ( value as _ )
362
366
}
363
367
}
368
+
369
+ /// Implementation of [`GlobalAlloc`] using `malloc` and `free` on `M_TEMP`.
370
+ pub struct Allocator < K : Kernel > ( PhantomData < K > ) ;
371
+
372
+ impl < K : Kernel > Allocator < K > {
373
+ pub const fn new ( ) -> Self {
374
+ Self ( PhantomData )
375
+ }
376
+
377
+ /// # Safety
378
+ /// `layout` must be non-zero.
379
+ #[ inline( never) ]
380
+ unsafe fn alloc ( layout : Layout , flags : MallocFlags ) -> * mut u8 {
381
+ // Calculate allocation size to include a spare room for align adjustment.
382
+ let size = if layout. align ( ) <= 8 {
383
+ layout. size ( )
384
+ } else {
385
+ match layout. size ( ) . checked_add ( layout. align ( ) - 8 ) {
386
+ Some ( v) => v,
387
+ None => return null_mut ( ) ,
388
+ }
389
+ } ;
390
+
391
+ // We will store how many bytes that we have shifted at the end.
392
+ let size = match size. checked_add ( size_of :: < usize > ( ) ) {
393
+ Some ( v) => v,
394
+ None => return null_mut ( ) ,
395
+ } ;
396
+
397
+ // Allocate.
398
+ let k = K :: default ( ) ;
399
+ let t = k. var ( K :: M_TEMP ) ;
400
+ let mem = k. malloc ( size, t. ptr ( ) , flags) ;
401
+
402
+ if mem. is_null ( ) {
403
+ return null_mut ( ) ;
404
+ }
405
+
406
+ // Get number of bytes to shift so the alignment is correct.
407
+ let misaligned = ( mem as usize ) % layout. align ( ) ;
408
+ let adjust = if misaligned == 0 {
409
+ 0
410
+ } else {
411
+ layout. align ( ) - misaligned
412
+ } ;
413
+
414
+ // Store how many bytes have been shifted.
415
+ let mem = mem. add ( adjust) ;
416
+
417
+ write_unaligned ( mem. add ( layout. size ( ) ) . cast ( ) , adjust) ;
418
+
419
+ mem
420
+ }
421
+ }
422
+
423
+ unsafe impl < K : Kernel > GlobalAlloc for Allocator < K > {
424
+ unsafe fn alloc ( & self , layout : Layout ) -> * mut u8 {
425
+ Self :: alloc ( layout, MallocFlags :: WAITOK )
426
+ }
427
+
428
+ #[ inline( never) ]
429
+ unsafe fn dealloc ( & self , ptr : * mut u8 , layout : Layout ) {
430
+ // Get original address before alignment.
431
+ let adjusted: usize = read_unaligned ( ptr. add ( layout. size ( ) ) . cast ( ) ) ;
432
+ let ptr = ptr. sub ( adjusted) ;
433
+
434
+ // Free the memory.
435
+ let k = K :: default ( ) ;
436
+ let t = k. var ( K :: M_TEMP ) ;
437
+
438
+ k. free ( ptr, t. ptr ( ) ) ;
439
+ }
440
+
441
+ unsafe fn alloc_zeroed ( & self , layout : Layout ) -> * mut u8 {
442
+ Self :: alloc ( layout, MallocFlags :: WAITOK | MallocFlags :: ZERO )
443
+ }
444
+ }
0 commit comments