@@ -100,7 +100,7 @@ do { \
100
100
case 4: __put_user_asm(x, ptr, retval, 4, "s32i", __cb); break; \
101
101
case 8: { \
102
102
__typeof__(*ptr) __v64 = x; \
103
- retval = __copy_to_user(ptr, &__v64, 8); \
103
+ retval = __copy_to_user(ptr, &__v64, 8) ? -EFAULT : 0; \
104
104
break; \
105
105
} \
106
106
default: __put_user_bad(); \
@@ -132,14 +132,14 @@ do { \
132
132
#define __check_align_1 ""
133
133
134
134
#define __check_align_2 \
135
- " _bbci.l %3, 0, 1f \n" \
136
- " movi %0 , %4 \n" \
135
+ " _bbci.l %[addr], 0, 1f \n" \
136
+ " movi %[err] , %[efault] \n" \
137
137
" _j 2f \n"
138
138
139
139
#define __check_align_4 \
140
- " _bbsi.l %3, 0, 0f \n" \
141
- " _bbci.l %3, 1, 1f \n" \
142
- "0: movi %0 , %4 \n" \
140
+ " _bbsi.l %[addr], 0, 0f \n" \
141
+ " _bbci.l %[addr], 1, 1f \n" \
142
+ "0: movi %[err] , %[efault] \n" \
143
143
" _j 2f \n"
144
144
145
145
@@ -151,40 +151,40 @@ do { \
151
151
* WARNING: If you modify this macro at all, verify that the
152
152
* __check_align_* macros still work.
153
153
*/
154
- #define __put_user_asm (x , addr , err , align , insn , cb ) \
154
+ #define __put_user_asm (x_ , addr_ , err_ , align , insn , cb )\
155
155
__asm__ __volatile__( \
156
156
__check_align_##align \
157
- "1: "insn" %2 , %3 , 0 \n" \
157
+ "1: "insn" %[x] , %[addr] , 0 \n" \
158
158
"2: \n" \
159
159
" .section .fixup,\"ax\" \n" \
160
160
" .align 4 \n" \
161
161
" .literal_position \n" \
162
162
"5: \n" \
163
- " movi %1 , 2b \n" \
164
- " movi %0 , %4 \n" \
165
- " jx %1 \n" \
163
+ " movi %[tmp] , 2b \n" \
164
+ " movi %[err] , %[efault] \n" \
165
+ " jx %[tmp] \n" \
166
166
" .previous \n" \
167
167
" .section __ex_table,\"a\" \n" \
168
168
" .long 1b, 5b \n" \
169
169
" .previous" \
170
- :"=r" (err ), "=r" (cb) \
171
- :"r" ((int)(x)), "r" (addr ), "i" (-EFAULT), "0" (err ))
170
+ :[err] "+r"(err_ ), [tmp] "=r"(cb) \
171
+ :[x] "r"(x_), [addr] "r"(addr_ ), [efault] "i"(-EFAULT))
172
172
173
173
#define __get_user_nocheck (x , ptr , size ) \
174
174
({ \
175
- long __gu_err, __gu_val; \
176
- __get_user_size(__gu_val, (ptr), (size), __gu_err); \
177
- (x) = (__force __typeof__(*(ptr)))__gu_val; \
175
+ long __gu_err; \
176
+ __get_user_size((x), (ptr), (size), __gu_err); \
178
177
__gu_err; \
179
178
})
180
179
181
180
#define __get_user_check (x , ptr , size ) \
182
181
({ \
183
- long __gu_err = -EFAULT, __gu_val = 0; \
182
+ long __gu_err = -EFAULT; \
184
183
const __typeof__(*(ptr)) *__gu_addr = (ptr); \
185
- if (access_ok(__gu_addr, size)) \
186
- __get_user_size(__gu_val, __gu_addr, (size), __gu_err); \
187
- (x) = (__force __typeof__(*(ptr)))__gu_val; \
184
+ if (access_ok(__gu_addr, size)) \
185
+ __get_user_size((x), __gu_addr, (size), __gu_err); \
186
+ else \
187
+ (x) = 0; \
188
188
__gu_err; \
189
189
})
190
190
@@ -198,8 +198,17 @@ do { \
198
198
case 1: __get_user_asm(x, ptr, retval, 1, "l8ui", __cb); break;\
199
199
case 2: __get_user_asm(x, ptr, retval, 2, "l16ui", __cb); break;\
200
200
case 4: __get_user_asm(x, ptr, retval, 4, "l32i", __cb); break;\
201
- case 8: retval = __copy_from_user(&x, ptr, 8); break; \
202
- default: (x) = __get_user_bad(); \
201
+ case 8: { \
202
+ u64 __x; \
203
+ if (unlikely(__copy_from_user(&__x, ptr, 8))) { \
204
+ retval = -EFAULT; \
205
+ (x) = 0; \
206
+ } else { \
207
+ (x) = *(__force __typeof__((ptr)))&__x; \
208
+ } \
209
+ break; \
210
+ } \
211
+ default: (x) = 0; __get_user_bad(); \
203
212
} \
204
213
} while (0)
205
214
@@ -208,25 +217,28 @@ do { \
208
217
* WARNING: If you modify this macro at all, verify that the
209
218
* __check_align_* macros still work.
210
219
*/
211
- #define __get_user_asm (x , addr , err , align , insn , cb ) \
212
- __asm__ __volatile__( \
213
- __check_align_##align \
214
- "1: "insn" %2, %3, 0 \n" \
215
- "2: \n" \
216
- " .section .fixup,\"ax\" \n" \
217
- " .align 4 \n" \
218
- " .literal_position \n" \
219
- "5: \n" \
220
- " movi %1, 2b \n" \
221
- " movi %2, 0 \n" \
222
- " movi %0, %4 \n" \
223
- " jx %1 \n" \
224
- " .previous \n" \
225
- " .section __ex_table,\"a\" \n" \
226
- " .long 1b, 5b \n" \
227
- " .previous" \
228
- :"=r" (err), "=r" (cb), "=r" (x) \
229
- :"r" (addr), "i" (-EFAULT), "0" (err))
220
+ #define __get_user_asm (x_ , addr_ , err_ , align , insn , cb ) \
221
+ do { \
222
+ u32 __x = 0; \
223
+ __asm__ __volatile__( \
224
+ __check_align_##align \
225
+ "1: "insn" %[x], %[addr], 0 \n" \
226
+ "2: \n" \
227
+ " .section .fixup,\"ax\" \n" \
228
+ " .align 4 \n" \
229
+ " .literal_position \n" \
230
+ "5: \n" \
231
+ " movi %[tmp], 2b \n" \
232
+ " movi %[err], %[efault] \n" \
233
+ " jx %[tmp] \n" \
234
+ " .previous \n" \
235
+ " .section __ex_table,\"a\" \n" \
236
+ " .long 1b, 5b \n" \
237
+ " .previous" \
238
+ :[err] "+r"(err_), [tmp] "=r"(cb), [x] "+r"(__x) \
239
+ :[addr] "r"(addr_), [efault] "i"(-EFAULT)); \
240
+ (x_) = (__force __typeof__(*(addr_)))__x; \
241
+ } while (0)
230
242
231
243
232
244
/*
0 commit comments