@@ -350,6 +350,7 @@ void kcsan_restore_irqtrace(struct task_struct *task)
350
350
static noinline void kcsan_found_watchpoint (const volatile void * ptr ,
351
351
size_t size ,
352
352
int type ,
353
+ unsigned long ip ,
353
354
atomic_long_t * watchpoint ,
354
355
long encoded_watchpoint )
355
356
{
@@ -396,7 +397,7 @@ static noinline void kcsan_found_watchpoint(const volatile void *ptr,
396
397
397
398
if (consumed ) {
398
399
kcsan_save_irqtrace (current );
399
- kcsan_report_set_info (ptr , size , type , watchpoint - watchpoints );
400
+ kcsan_report_set_info (ptr , size , type , ip , watchpoint - watchpoints );
400
401
kcsan_restore_irqtrace (current );
401
402
} else {
402
403
/*
@@ -416,7 +417,7 @@ static noinline void kcsan_found_watchpoint(const volatile void *ptr,
416
417
}
417
418
418
419
static noinline void
419
- kcsan_setup_watchpoint (const volatile void * ptr , size_t size , int type )
420
+ kcsan_setup_watchpoint (const volatile void * ptr , size_t size , int type , unsigned long ip )
420
421
{
421
422
const bool is_write = (type & KCSAN_ACCESS_WRITE ) != 0 ;
422
423
const bool is_assert = (type & KCSAN_ACCESS_ASSERT ) != 0 ;
@@ -568,8 +569,8 @@ kcsan_setup_watchpoint(const volatile void *ptr, size_t size, int type)
568
569
if (is_assert && value_change == KCSAN_VALUE_CHANGE_TRUE )
569
570
atomic_long_inc (& kcsan_counters [KCSAN_COUNTER_ASSERT_FAILURES ]);
570
571
571
- kcsan_report_known_origin (ptr , size , type , value_change ,
572
- watchpoint - watchpoints ,
572
+ kcsan_report_known_origin (ptr , size , type , ip ,
573
+ value_change , watchpoint - watchpoints ,
573
574
old , new , access_mask );
574
575
} else if (value_change == KCSAN_VALUE_CHANGE_TRUE ) {
575
576
/* Inferring a race, since the value should not have changed. */
@@ -578,8 +579,10 @@ kcsan_setup_watchpoint(const volatile void *ptr, size_t size, int type)
578
579
if (is_assert )
579
580
atomic_long_inc (& kcsan_counters [KCSAN_COUNTER_ASSERT_FAILURES ]);
580
581
581
- if (IS_ENABLED (CONFIG_KCSAN_REPORT_RACE_UNKNOWN_ORIGIN ) || is_assert )
582
- kcsan_report_unknown_origin (ptr , size , type , old , new , access_mask );
582
+ if (IS_ENABLED (CONFIG_KCSAN_REPORT_RACE_UNKNOWN_ORIGIN ) || is_assert ) {
583
+ kcsan_report_unknown_origin (ptr , size , type , ip ,
584
+ old , new , access_mask );
585
+ }
583
586
}
584
587
585
588
/*
@@ -596,8 +599,8 @@ kcsan_setup_watchpoint(const volatile void *ptr, size_t size, int type)
596
599
user_access_restore (ua_flags );
597
600
}
598
601
599
- static __always_inline void check_access ( const volatile void * ptr , size_t size ,
600
- int type )
602
+ static __always_inline void
603
+ check_access ( const volatile void * ptr , size_t size , int type , unsigned long ip )
601
604
{
602
605
const bool is_write = (type & KCSAN_ACCESS_WRITE ) != 0 ;
603
606
atomic_long_t * watchpoint ;
@@ -625,13 +628,12 @@ static __always_inline void check_access(const volatile void *ptr, size_t size,
625
628
*/
626
629
627
630
if (unlikely (watchpoint != NULL ))
628
- kcsan_found_watchpoint (ptr , size , type , watchpoint ,
629
- encoded_watchpoint );
631
+ kcsan_found_watchpoint (ptr , size , type , ip , watchpoint , encoded_watchpoint );
630
632
else {
631
633
struct kcsan_ctx * ctx = get_ctx (); /* Call only once in fast-path. */
632
634
633
635
if (unlikely (should_watch (ptr , size , type , ctx )))
634
- kcsan_setup_watchpoint (ptr , size , type );
636
+ kcsan_setup_watchpoint (ptr , size , type , ip );
635
637
else if (unlikely (ctx -> scoped_accesses .prev ))
636
638
kcsan_check_scoped_accesses ();
637
639
}
@@ -757,7 +759,7 @@ kcsan_begin_scoped_access(const volatile void *ptr, size_t size, int type,
757
759
{
758
760
struct kcsan_ctx * ctx = get_ctx ();
759
761
760
- __kcsan_check_access (ptr , size , type );
762
+ check_access (ptr , size , type , _RET_IP_ );
761
763
762
764
ctx -> disable_count ++ ; /* Disable KCSAN, in case list debugging is on. */
763
765
@@ -802,7 +804,7 @@ EXPORT_SYMBOL(kcsan_end_scoped_access);
802
804
803
805
void __kcsan_check_access (const volatile void * ptr , size_t size , int type )
804
806
{
805
- check_access (ptr , size , type );
807
+ check_access (ptr , size , type , _RET_IP_ );
806
808
}
807
809
EXPORT_SYMBOL (__kcsan_check_access );
808
810
@@ -823,7 +825,7 @@ EXPORT_SYMBOL(__kcsan_check_access);
823
825
void __tsan_read##size(void *ptr); \
824
826
void __tsan_read##size(void *ptr) \
825
827
{ \
826
- check_access(ptr, size, 0); \
828
+ check_access(ptr, size, 0, _RET_IP_); \
827
829
} \
828
830
EXPORT_SYMBOL(__tsan_read##size); \
829
831
void __tsan_unaligned_read##size(void *ptr) \
@@ -832,7 +834,7 @@ EXPORT_SYMBOL(__kcsan_check_access);
832
834
void __tsan_write##size(void *ptr); \
833
835
void __tsan_write##size(void *ptr) \
834
836
{ \
835
- check_access(ptr, size, KCSAN_ACCESS_WRITE); \
837
+ check_access(ptr, size, KCSAN_ACCESS_WRITE, _RET_IP_); \
836
838
} \
837
839
EXPORT_SYMBOL(__tsan_write##size); \
838
840
void __tsan_unaligned_write##size(void *ptr) \
@@ -842,7 +844,8 @@ EXPORT_SYMBOL(__kcsan_check_access);
842
844
void __tsan_read_write##size(void *ptr) \
843
845
{ \
844
846
check_access(ptr, size, \
845
- KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE); \
847
+ KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE, \
848
+ _RET_IP_); \
846
849
} \
847
850
EXPORT_SYMBOL(__tsan_read_write##size); \
848
851
void __tsan_unaligned_read_write##size(void *ptr) \
@@ -858,14 +861,14 @@ DEFINE_TSAN_READ_WRITE(16);
858
861
void __tsan_read_range (void * ptr , size_t size );
859
862
void __tsan_read_range (void * ptr , size_t size )
860
863
{
861
- check_access (ptr , size , 0 );
864
+ check_access (ptr , size , 0 , _RET_IP_ );
862
865
}
863
866
EXPORT_SYMBOL (__tsan_read_range );
864
867
865
868
void __tsan_write_range (void * ptr , size_t size );
866
869
void __tsan_write_range (void * ptr , size_t size )
867
870
{
868
- check_access (ptr , size , KCSAN_ACCESS_WRITE );
871
+ check_access (ptr , size , KCSAN_ACCESS_WRITE , _RET_IP_ );
869
872
}
870
873
EXPORT_SYMBOL (__tsan_write_range );
871
874
@@ -886,7 +889,8 @@ EXPORT_SYMBOL(__tsan_write_range);
886
889
IS_ALIGNED((unsigned long)ptr, size); \
887
890
if (IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS) && is_atomic) \
888
891
return; \
889
- check_access(ptr, size, is_atomic ? KCSAN_ACCESS_ATOMIC : 0); \
892
+ check_access(ptr, size, is_atomic ? KCSAN_ACCESS_ATOMIC : 0, \
893
+ _RET_IP_); \
890
894
} \
891
895
EXPORT_SYMBOL(__tsan_volatile_read##size); \
892
896
void __tsan_unaligned_volatile_read##size(void *ptr) \
@@ -901,7 +905,8 @@ EXPORT_SYMBOL(__tsan_write_range);
901
905
return; \
902
906
check_access(ptr, size, \
903
907
KCSAN_ACCESS_WRITE | \
904
- (is_atomic ? KCSAN_ACCESS_ATOMIC : 0)); \
908
+ (is_atomic ? KCSAN_ACCESS_ATOMIC : 0), \
909
+ _RET_IP_); \
905
910
} \
906
911
EXPORT_SYMBOL(__tsan_volatile_write##size); \
907
912
void __tsan_unaligned_volatile_write##size(void *ptr) \
@@ -955,7 +960,7 @@ EXPORT_SYMBOL(__tsan_init);
955
960
u##bits __tsan_atomic##bits##_load(const u##bits *ptr, int memorder) \
956
961
{ \
957
962
if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \
958
- check_access(ptr, bits / BITS_PER_BYTE, KCSAN_ACCESS_ATOMIC); \
963
+ check_access(ptr, bits / BITS_PER_BYTE, KCSAN_ACCESS_ATOMIC, _RET_IP_); \
959
964
} \
960
965
return __atomic_load_n(ptr, memorder); \
961
966
} \
@@ -965,7 +970,7 @@ EXPORT_SYMBOL(__tsan_init);
965
970
{ \
966
971
if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \
967
972
check_access(ptr, bits / BITS_PER_BYTE, \
968
- KCSAN_ACCESS_WRITE | KCSAN_ACCESS_ATOMIC); \
973
+ KCSAN_ACCESS_WRITE | KCSAN_ACCESS_ATOMIC, _RET_IP_); \
969
974
} \
970
975
__atomic_store_n(ptr, v, memorder); \
971
976
} \
@@ -978,7 +983,7 @@ EXPORT_SYMBOL(__tsan_init);
978
983
if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \
979
984
check_access(ptr, bits / BITS_PER_BYTE, \
980
985
KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE | \
981
- KCSAN_ACCESS_ATOMIC); \
986
+ KCSAN_ACCESS_ATOMIC, _RET_IP_); \
982
987
} \
983
988
return __atomic_##op##suffix(ptr, v, memorder); \
984
989
} \
@@ -1010,7 +1015,7 @@ EXPORT_SYMBOL(__tsan_init);
1010
1015
if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \
1011
1016
check_access(ptr, bits / BITS_PER_BYTE, \
1012
1017
KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE | \
1013
- KCSAN_ACCESS_ATOMIC); \
1018
+ KCSAN_ACCESS_ATOMIC, _RET_IP_); \
1014
1019
} \
1015
1020
return __atomic_compare_exchange_n(ptr, exp, val, weak, mo, fail_mo); \
1016
1021
} \
@@ -1025,7 +1030,7 @@ EXPORT_SYMBOL(__tsan_init);
1025
1030
if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \
1026
1031
check_access(ptr, bits / BITS_PER_BYTE, \
1027
1032
KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE | \
1028
- KCSAN_ACCESS_ATOMIC); \
1033
+ KCSAN_ACCESS_ATOMIC, _RET_IP_); \
1029
1034
} \
1030
1035
__atomic_compare_exchange_n(ptr, &exp, val, 0, mo, fail_mo); \
1031
1036
return exp; \
0 commit comments