@@ -16,6 +16,7 @@ use self::vnode::{Vnode, VnodeOp, VopLookup, VopRead, VopReadDir, VopUnlock, Vop
16
16
use core:: alloc:: { GlobalAlloc , Layout } ;
17
17
use core:: ffi:: { c_char, c_int} ;
18
18
use core:: marker:: PhantomData ;
19
+ use core:: mem:: transmute;
19
20
use core:: num:: NonZero ;
20
21
use core:: ops:: Deref ;
21
22
use core:: ptr:: { null_mut, read_unaligned, write_unaligned} ;
@@ -62,6 +63,7 @@ pub trait Kernel: MappedKernel {
62
63
const MOUNTLIST : StaticMut < TailQueue < Self :: Mount > > ;
63
64
const MOUNTLIST_MTX : StaticMut < Self :: Mtx > ;
64
65
const NOCPU : u32 ;
66
+ const PANIC : Function < extern "C" fn ( * const c_char , ...) -> !> ;
65
67
const VDIR : c_int ;
66
68
const VOP_LOOKUP : StaticMut < Self :: VnodeOp > ;
67
69
const VOP_READ : StaticMut < Self :: VnodeOp > ;
@@ -91,10 +93,10 @@ pub trait Kernel: MappedKernel {
91
93
type VopUnlock : VopUnlock ;
92
94
type VopVector : VopVector ;
93
95
94
- fn var < O : StaticOff > ( self , off : O ) -> O :: Ops {
95
- let value = unsafe { self . addr ( ) . add ( off. value ( ) ) } ;
96
+ fn get < O : Offset > ( self , off : O ) -> O :: Ops {
97
+ let addr = unsafe { self . addr ( ) . add ( off. get ( ) ) } ;
96
98
97
- <O :: Ops as StaticOps >:: new ( value )
99
+ <O :: Ops as OffsetOps >:: new ( addr )
98
100
}
99
101
100
102
/// # Safety
@@ -284,16 +286,16 @@ pub trait MappedKernel: Default + Sized + Copy + Send + Sync + 'static {
284
286
fn addr ( self ) -> * const u8 ;
285
287
}
286
288
287
- /// Offset of a static value in the kernel.
288
- pub trait StaticOff : Copy {
289
- type Ops : StaticOps ;
289
+ /// Offset of something in the kernel.
290
+ pub trait Offset : Copy {
291
+ type Ops : OffsetOps ;
290
292
291
- fn value ( self ) -> usize ;
293
+ fn get ( self ) -> usize ;
292
294
}
293
295
294
- /// Operations on a static value .
295
- pub trait StaticOps : Copy {
296
- fn new ( value : * const u8 ) -> Self ;
296
+ /// Contains possible operations on an item at the [`Offset`] .
297
+ pub trait OffsetOps : Copy {
298
+ fn new ( addr : * const u8 ) -> Self ;
297
299
}
298
300
299
301
/// Offset of an immutable static value in the kernel.
@@ -324,20 +326,20 @@ impl<T> Clone for Static<T> {
324
326
325
327
impl < T > Copy for Static < T > { }
326
328
327
- impl < T > StaticOff for Static < T > {
329
+ impl < T > Offset for Static < T > {
328
330
type Ops = ImmutableOps < T > ;
329
331
330
- fn value ( self ) -> usize {
332
+ fn get ( self ) -> usize {
331
333
self . off
332
334
}
333
335
}
334
336
335
- /// Implementation of [`StaticOps `] for [`Static`].
337
+ /// Implementation of [`OffsetOps `] for [`Static`].
336
338
pub struct ImmutableOps < T > ( * const T ) ;
337
339
338
- impl < T > StaticOps for ImmutableOps < T > {
339
- fn new ( value : * const u8 ) -> Self {
340
- Self ( value . cast ( ) )
340
+ impl < T > OffsetOps for ImmutableOps < T > {
341
+ fn new ( addr : * const u8 ) -> Self {
342
+ Self ( addr . cast ( ) )
341
343
}
342
344
}
343
345
@@ -385,10 +387,10 @@ impl<T> Clone for StaticMut<T> {
385
387
386
388
impl < T > Copy for StaticMut < T > { }
387
389
388
- impl < T > StaticOff for StaticMut < T > {
390
+ impl < T > Offset for StaticMut < T > {
389
391
type Ops = MutableOps < T > ;
390
392
391
- fn value ( self ) -> usize {
393
+ fn get ( self ) -> usize {
392
394
self . off
393
395
}
394
396
}
@@ -397,7 +399,7 @@ impl<T> StaticOff for StaticMut<T> {
397
399
pub struct MutableOps < T > ( * mut T ) ;
398
400
399
401
impl < T > MutableOps < T > {
400
- pub fn ptr ( self ) -> * mut T {
402
+ pub fn as_mut_ptr ( self ) -> * mut T {
401
403
self . 0
402
404
}
403
405
@@ -426,9 +428,82 @@ impl<T> Clone for MutableOps<T> {
426
428
427
429
impl < T > Copy for MutableOps < T > { }
428
430
429
- impl < T > StaticOps for MutableOps < T > {
430
- fn new ( value : * const u8 ) -> Self {
431
- Self ( value as _ )
431
+ impl < T > OffsetOps for MutableOps < T > {
432
+ fn new ( addr : * const u8 ) -> Self {
433
+ Self ( addr. cast_mut ( ) . cast ( ) )
434
+ }
435
+ }
436
+
437
+ /// Offset of a function in the kernel.
438
+ pub struct Function < T : KernelFn > {
439
+ off : usize ,
440
+ phantom : PhantomData < T > ,
441
+ }
442
+
443
+ impl < T : KernelFn > Function < T > {
444
+ /// # Safety
445
+ /// Behavior is undefined if `off` is not valid.
446
+ pub const unsafe fn new ( off : usize ) -> Self {
447
+ Self {
448
+ off,
449
+ phantom : PhantomData ,
450
+ }
451
+ }
452
+ }
453
+
454
+ impl < T : KernelFn > Clone for Function < T > {
455
+ fn clone ( & self ) -> Self {
456
+ * self
457
+ }
458
+ }
459
+
460
+ impl < T : KernelFn > Copy for Function < T > { }
461
+
462
+ impl < T : KernelFn > Offset for Function < T > {
463
+ type Ops = ImmutableOps < T > ;
464
+
465
+ fn get ( self ) -> usize {
466
+ self . off
467
+ }
468
+ }
469
+
470
+ /// Implementation of [`OffsetOps`] for [`Function`].
471
+ pub struct FunctionOps < T > {
472
+ addr : * const u8 ,
473
+ phantom : PhantomData < T > ,
474
+ }
475
+
476
+ impl < T : KernelFn > FunctionOps < T > {
477
+ pub fn as_ptr ( self ) -> T {
478
+ T :: from_addr ( self . addr )
479
+ }
480
+ }
481
+
482
+ impl < T > Clone for FunctionOps < T > {
483
+ fn clone ( & self ) -> Self {
484
+ * self
485
+ }
486
+ }
487
+
488
+ impl < T > Copy for FunctionOps < T > { }
489
+
490
+ impl < T > OffsetOps for FunctionOps < T > {
491
+ fn new ( addr : * const u8 ) -> Self {
492
+ Self {
493
+ addr,
494
+ phantom : PhantomData ,
495
+ }
496
+ }
497
+ }
498
+
499
+ /// Provides method to cast kernel address into a function pointer.
500
+ pub trait KernelFn : Copy {
501
+ fn from_addr ( addr : * const u8 ) -> Self ;
502
+ }
503
+
504
+ impl < R , A1 > KernelFn for extern "C" fn ( A1 , ...) -> R {
505
+ fn from_addr ( addr : * const u8 ) -> Self {
506
+ unsafe { transmute ( addr) }
432
507
}
433
508
}
434
509
@@ -462,8 +537,8 @@ impl<K: Kernel> Allocator<K> {
462
537
463
538
// Allocate.
464
539
let k = K :: default ( ) ;
465
- let t = k. var ( K :: M_TEMP ) ;
466
- let mem = unsafe { k. malloc ( size, t. ptr ( ) , flags) } ;
540
+ let t = k. get ( K :: M_TEMP ) ;
541
+ let mem = unsafe { k. malloc ( size, t. as_mut_ptr ( ) , flags) } ;
467
542
468
543
if mem. is_null ( ) {
469
544
return null_mut ( ) ;
@@ -505,9 +580,9 @@ unsafe impl<K: Kernel> GlobalAlloc for Allocator<K> {
505
580
506
581
// Free the memory.
507
582
let k = K :: default ( ) ;
508
- let t = k. var ( K :: M_TEMP ) ;
583
+ let t = k. get ( K :: M_TEMP ) ;
509
584
510
- unsafe { k. free ( ptr, t. ptr ( ) ) } ;
585
+ unsafe { k. free ( ptr, t. as_mut_ptr ( ) ) } ;
511
586
}
512
587
513
588
unsafe fn alloc_zeroed ( & self , layout : Layout ) -> * mut u8 {
0 commit comments