@@ -948,10 +948,29 @@ struct text_poke_loc {
948
948
const u8 text [POKE_MAX_OPCODE_SIZE ];
949
949
};
950
950
951
- static struct bp_patching_desc {
951
+ struct bp_patching_desc {
952
952
struct text_poke_loc * vec ;
953
953
int nr_entries ;
954
- } bp_patching ;
954
+ atomic_t refs ;
955
+ };
956
+
957
+ static struct bp_patching_desc * bp_desc ;
958
+
959
+ static inline struct bp_patching_desc * try_get_desc (struct bp_patching_desc * * descp )
960
+ {
961
+ struct bp_patching_desc * desc = READ_ONCE (* descp ); /* rcu_dereference */
962
+
963
+ if (!desc || !atomic_inc_not_zero (& desc -> refs ))
964
+ return NULL ;
965
+
966
+ return desc ;
967
+ }
968
+
969
+ static inline void put_desc (struct bp_patching_desc * desc )
970
+ {
971
+ smp_mb__before_atomic ();
972
+ atomic_dec (& desc -> refs );
973
+ }
955
974
956
975
static inline void * text_poke_addr (struct text_poke_loc * tp )
957
976
{
@@ -972,26 +991,26 @@ NOKPROBE_SYMBOL(patch_cmp);
972
991
973
992
int notrace poke_int3_handler (struct pt_regs * regs )
974
993
{
994
+ struct bp_patching_desc * desc ;
975
995
struct text_poke_loc * tp ;
996
+ int len , ret = 0 ;
976
997
void * ip ;
977
- int len ;
998
+
999
+ if (user_mode (regs ))
1000
+ return 0 ;
978
1001
979
1002
/*
980
1003
* Having observed our INT3 instruction, we now must observe
981
- * bp_patching.nr_entries.
1004
+ * bp_desc:
982
1005
*
983
- * nr_entries != 0 INT3
1006
+ * bp_desc = desc INT3
984
1007
* WMB RMB
985
- * write INT3 if (nr_entries)
986
- *
987
- * Idem for other elements in bp_patching.
1008
+ * write INT3 if (desc)
988
1009
*/
989
1010
smp_rmb ();
990
1011
991
- if (likely (!bp_patching .nr_entries ))
992
- return 0 ;
993
-
994
- if (user_mode (regs ))
1012
+ desc = try_get_desc (& bp_desc );
1013
+ if (!desc )
995
1014
return 0 ;
996
1015
997
1016
/*
@@ -1002,16 +1021,16 @@ int notrace poke_int3_handler(struct pt_regs *regs)
1002
1021
/*
1003
1022
* Skip the binary search if there is a single member in the vector.
1004
1023
*/
1005
- if (unlikely (bp_patching . nr_entries > 1 )) {
1006
- tp = bsearch (ip , bp_patching . vec , bp_patching . nr_entries ,
1024
+ if (unlikely (desc -> nr_entries > 1 )) {
1025
+ tp = bsearch (ip , desc -> vec , desc -> nr_entries ,
1007
1026
sizeof (struct text_poke_loc ),
1008
1027
patch_cmp );
1009
1028
if (!tp )
1010
- return 0 ;
1029
+ goto out_put ;
1011
1030
} else {
1012
- tp = bp_patching . vec ;
1031
+ tp = desc -> vec ;
1013
1032
if (text_poke_addr (tp ) != ip )
1014
- return 0 ;
1033
+ goto out_put ;
1015
1034
}
1016
1035
1017
1036
len = text_opcode_size (tp -> opcode );
@@ -1023,7 +1042,7 @@ int notrace poke_int3_handler(struct pt_regs *regs)
1023
1042
* Someone poked an explicit INT3, they'll want to handle it,
1024
1043
* do not consume.
1025
1044
*/
1026
- return 0 ;
1045
+ goto out_put ;
1027
1046
1028
1047
case CALL_INSN_OPCODE :
1029
1048
int3_emulate_call (regs , (long )ip + tp -> rel32 );
@@ -1038,7 +1057,11 @@ int notrace poke_int3_handler(struct pt_regs *regs)
1038
1057
BUG ();
1039
1058
}
1040
1059
1041
- return 1 ;
1060
+ ret = 1 ;
1061
+
1062
+ out_put :
1063
+ put_desc (desc );
1064
+ return ret ;
1042
1065
}
1043
1066
NOKPROBE_SYMBOL (poke_int3_handler );
1044
1067
@@ -1069,14 +1092,18 @@ static int tp_vec_nr;
1069
1092
*/
1070
1093
static void text_poke_bp_batch (struct text_poke_loc * tp , unsigned int nr_entries )
1071
1094
{
1095
+ struct bp_patching_desc desc = {
1096
+ .vec = tp ,
1097
+ .nr_entries = nr_entries ,
1098
+ .refs = ATOMIC_INIT (1 ),
1099
+ };
1072
1100
unsigned char int3 = INT3_INSN_OPCODE ;
1073
1101
unsigned int i ;
1074
1102
int do_sync ;
1075
1103
1076
1104
lockdep_assert_held (& text_mutex );
1077
1105
1078
- bp_patching .vec = tp ;
1079
- bp_patching .nr_entries = nr_entries ;
1106
+ smp_store_release (& bp_desc , & desc ); /* rcu_assign_pointer */
1080
1107
1081
1108
/*
1082
1109
* Corresponding read barrier in int3 notifier for making sure the
@@ -1131,17 +1158,12 @@ static void text_poke_bp_batch(struct text_poke_loc *tp, unsigned int nr_entries
1131
1158
text_poke_sync ();
1132
1159
1133
1160
/*
1134
- * sync_core() implies an smp_mb() and orders this store against
1135
- * the writing of the new instruction .
1161
+ * Remove and synchronize_rcu(), except we have a very primitive
1162
+ * refcount based completion .
1136
1163
*/
1137
- bp_patching .nr_entries = 0 ;
1138
- /*
1139
- * This sync_core () call ensures that all INT3 handlers in progress
1140
- * have finished. This allows poke_int3_handler() after this to
1141
- * avoid touching bp_paching.vec by checking nr_entries == 0.
1142
- */
1143
- text_poke_sync ();
1144
- bp_patching .vec = NULL ;
1164
+ WRITE_ONCE (bp_desc , NULL ); /* RCU_INIT_POINTER */
1165
+ if (!atomic_dec_and_test (& desc .refs ))
1166
+ atomic_cond_read_acquire (& desc .refs , !VAL );
1145
1167
}
1146
1168
1147
1169
void text_poke_loc_init (struct text_poke_loc * tp , void * addr ,
0 commit comments