8
8
@inline UnsafeAtomics. store! (x, v, ord) = UnsafeAtomics. store! (x, v, ord, none)
9
9
@inline UnsafeAtomics. cas! (x, cmp, new, ord) = UnsafeAtomics. cas! (x, cmp, new, ord, ord, none)
10
10
@inline UnsafeAtomics. modify! (ptr, op, x, ord) = UnsafeAtomics. modify! (ptr, op, x, ord, none)
11
- @inline UnsafeAtomics. fence (ord) = UnsafeAtomics. fence (ord. , none)
11
+ @inline UnsafeAtomics. fence (ord) = UnsafeAtomics. fence (ord, none)
12
12
13
13
# ! format: off
14
14
# https://github.com/JuliaLang/julia/blob/v1.6.3/base/atomics.jl#L23-L30
@@ -74,47 +74,51 @@ for typ in (inttypes..., floattypes...)
74
74
for ord in orderings
75
75
ord in (release, acq_rel) && continue
76
76
77
- if ATOMIC_INTRINSICS && sizeof (typ) <= MAX_POINTERATOMIC_SIZE
78
- @eval function UnsafeAtomics. load (x:: Ptr{$typ} , :: $ (typeof (ord)))
79
- return Core. Intrinsics. atomic_pointerref (x, base_ordering ($ ord))
80
- end
81
- else
82
- @eval function UnsafeAtomics. load (x:: Ptr{$typ} , :: $ (typeof (ord)))
83
- return llvmcall (
84
- $ ("""
85
- %ptr = inttoptr i$WORD_SIZE %0 to $lt *
86
- %rv = load atomic $rt %ptr $ord , align $(sizeof (typ))
87
- ret $lt %rv
88
- """ ),
89
- $ typ,
90
- Tuple{Ptr{$ typ}},
91
- x,
92
- )
77
+ for sync in syncscopes
78
+ if ATOMIC_INTRINSICS && sizeof (typ) <= MAX_POINTERATOMIC_SIZE && sync == none
79
+ @eval function UnsafeAtomics. load (x:: Ptr{$typ} , :: $ (typeof (ord)), :: $ (typeof (sync)))
80
+ return Core. Intrinsics. atomic_pointerref (x, base_ordering ($ ord))
81
+ end
82
+ else
83
+ @eval function UnsafeAtomics. load (x:: Ptr{$typ} , :: $ (typeof (ord)), :: $ (typeof (sync)))
84
+ return llvmcall (
85
+ $ ("""
86
+ %ptr = inttoptr i$WORD_SIZE %0 to $lt *
87
+ %rv = load atomic $rt %ptr $ord , align $(sizeof (typ))
88
+ ret $lt %rv
89
+ """ ),
90
+ $ typ,
91
+ Tuple{Ptr{$ typ}},
92
+ x,
93
+ )
94
+ end
93
95
end
94
96
end
95
97
end
96
98
97
99
for ord in orderings
98
100
ord in (acquire, acq_rel) && continue
99
-
100
- if ATOMIC_INTRINSICS && sizeof (typ) <= MAX_POINTERATOMIC_SIZE
101
- @eval function UnsafeAtomics. store! (x:: Ptr{$typ} , v:: $typ , :: $ (typeof (ord)))
102
- Core. Intrinsics. atomic_pointerset (x, v, base_ordering ($ ord))
103
- return nothing
104
- end
105
- else
106
- @eval function UnsafeAtomics. store! (x:: Ptr{$typ} , v:: $typ , :: $ (typeof (ord)))
107
- return llvmcall (
108
- $ ("""
109
- %ptr = inttoptr i$WORD_SIZE %0 to $lt *
110
- store atomic $lt %1, $lt * %ptr $ord , align $(sizeof (typ))
111
- ret void
112
- """ ),
113
- Cvoid,
114
- Tuple{Ptr{$ typ},$ typ},
115
- x,
116
- v,
117
- )
101
+
102
+ for sync in syncscopes
103
+ if ATOMIC_INTRINSICS && sizeof (typ) <= MAX_POINTERATOMIC_SIZE && sync == none
104
+ @eval function UnsafeAtomics. store! (x:: Ptr{$typ} , v:: $typ , :: $ (typeof (ord)), :: $ (typeof (sync)))
105
+ Core. Intrinsics. atomic_pointerset (x, v, base_ordering ($ ord))
106
+ return nothing
107
+ end
108
+ else
109
+ @eval function UnsafeAtomics. store! (x:: Ptr{$typ} , v:: $typ , :: $ (typeof (ord)), :: $ (typeof (sync)))
110
+ return llvmcall (
111
+ $ ("""
112
+ %ptr = inttoptr i$WORD_SIZE %0 to $lt *
113
+ store atomic $lt %1, $lt * %ptr $ord , align $(sizeof (typ))
114
+ ret void
115
+ """ ),
116
+ Cvoid,
117
+ Tuple{Ptr{$ typ},$ typ},
118
+ x,
119
+ v,
120
+ )
121
+ end
118
122
end
119
123
end
120
124
end
@@ -124,54 +128,58 @@ for typ in (inttypes..., floattypes...)
124
128
125
129
typ <: AbstractFloat && break
126
130
127
- if ATOMIC_INTRINSICS && sizeof (typ) <= MAX_POINTERATOMIC_SIZE
128
- @eval function UnsafeAtomics. cas! (
129
- x:: Ptr{$typ} ,
130
- cmp:: $typ ,
131
- new:: $typ ,
132
- :: $ (typeof (success_ordering)),
133
- :: $ (typeof (failure_ordering)),
134
- )
135
- return Core. Intrinsics. atomic_pointerreplace (
136
- x,
137
- cmp,
138
- new,
139
- base_ordering ($ success_ordering),
140
- base_ordering ($ failure_ordering)
131
+ for sync in syncscopes
132
+ if ATOMIC_INTRINSICS && sizeof (typ) <= MAX_POINTERATOMIC_SIZE && sync == none
133
+ @eval function UnsafeAtomics. cas! (
134
+ x:: Ptr{$typ} ,
135
+ cmp:: $typ ,
136
+ new:: $typ ,
137
+ :: $ (typeof (success_ordering)),
138
+ :: $ (typeof (failure_ordering)),
139
+ :: $ (typeof (sync)),
141
140
)
142
- end
143
- else
144
- @eval function UnsafeAtomics. cas! (
145
- x:: Ptr{$typ} ,
146
- cmp:: $typ ,
147
- new:: $typ ,
148
- :: $ (typeof (success_ordering)),
149
- :: $ (typeof (failure_ordering)),
150
- )
151
- success = Ref {Int8} ()
152
- GC. @preserve success begin
153
- old = llvmcall (
154
- $ (
155
- """
156
- %ptr = inttoptr i$WORD_SIZE %0 to $lt *
157
- %rs = cmpxchg $lt * %ptr, $lt %1, $lt %2 $success_ordering $failure_ordering
158
- %rv = extractvalue { $lt , i1 } %rs, 0
159
- %s1 = extractvalue { $lt , i1 } %rs, 1
160
- %s8 = zext i1 %s1 to i8
161
- %sptr = inttoptr i$WORD_SIZE %3 to i8*
162
- store i8 %s8, i8* %sptr
163
- ret $lt %rv
164
- """
165
- ),
166
- $ typ,
167
- Tuple{Ptr{$ typ},$ typ,$ typ,Ptr{Int8}},
141
+ return Core. Intrinsics. atomic_pointerreplace (
168
142
x,
169
143
cmp,
170
144
new,
171
- Ptr {Int8} (pointer_from_objref (success)),
145
+ base_ordering ($ success_ordering),
146
+ base_ordering ($ failure_ordering)
172
147
)
173
148
end
174
- return (old = old, success = ! iszero (success[]))
149
+ else
150
+ @eval function UnsafeAtomics. cas! (
151
+ x:: Ptr{$typ} ,
152
+ cmp:: $typ ,
153
+ new:: $typ ,
154
+ :: $ (typeof (success_ordering)),
155
+ :: $ (typeof (failure_ordering)),
156
+ :: $ (typeof (sync)),
157
+ )
158
+ success = Ref {Int8} ()
159
+ GC. @preserve success begin
160
+ old = llvmcall (
161
+ $ (
162
+ """
163
+ %ptr = inttoptr i$WORD_SIZE %0 to $lt *
164
+ %rs = cmpxchg $lt * %ptr, $lt %1, $lt %2 $success_ordering $failure_ordering
165
+ %rv = extractvalue { $lt , i1 } %rs, 0
166
+ %s1 = extractvalue { $lt , i1 } %rs, 1
167
+ %s8 = zext i1 %s1 to i8
168
+ %sptr = inttoptr i$WORD_SIZE %3 to i8*
169
+ store i8 %s8, i8* %sptr
170
+ ret $lt %rv
171
+ """
172
+ ),
173
+ $ typ,
174
+ Tuple{Ptr{$ typ},$ typ,$ typ,Ptr{Int8}},
175
+ x,
176
+ cmp,
177
+ new,
178
+ Ptr {Int8} (pointer_from_objref (success)),
179
+ )
180
+ end
181
+ return (old = old, success = ! iszero (success[]))
182
+ end
175
183
end
176
184
end
177
185
end
@@ -193,60 +201,81 @@ for typ in (inttypes..., floattypes...)
193
201
end
194
202
end
195
203
for ord in orderings
196
- # Enable this code iff https://github.com/JuliaLang/julia/pull/45122 get's merged
197
- if false && ATOMIC_INTRINSICS && sizeof (typ) <= MAX_POINTERATOMIC_SIZE
198
- @eval function UnsafeAtomics. modify! (
204
+ for sync in syncscopes
205
+ # Enable this code iff https://github.com/JuliaLang/julia/pull/45122 get's merged
206
+ if false && ATOMIC_INTRINSICS && sizeof (typ) <= MAX_POINTERATOMIC_SIZE && sync == none
207
+ @eval function UnsafeAtomics. modify! (
208
+ x:: Ptr{$typ} ,
209
+ op:: typeof ($ op),
210
+ v:: $typ ,
211
+ :: $ (typeof (ord)),
212
+ :: $ (typeof (sync)),
213
+ )
214
+ return Core. Intrinsics. atomic_pointermodify (x, op, v, base_ordering ($ ord))
215
+ end
216
+ else
217
+ @eval function UnsafeAtomics. modify! (
199
218
x:: Ptr{$typ} ,
200
- op :: typeof ($ op),
219
+ :: typeof ($ op),
201
220
v:: $typ ,
202
221
:: $ (typeof (ord)),
222
+ :: $ (typeof (sync)),
203
223
)
204
- return Core. Intrinsics. atomic_pointermodify (x, op, v, base_ordering ($ ord))
205
- end
206
- else
207
- @eval function UnsafeAtomics. modify! (
208
- x:: Ptr{$typ} ,
209
- :: typeof ($ op),
210
- v:: $typ ,
211
- :: $ (typeof (ord)),
212
- )
213
- old = llvmcall (
214
- $ ("""
215
- %ptr = inttoptr i$WORD_SIZE %0 to $lt *
216
- %rv = atomicrmw $rmw $lt * %ptr, $lt %1 $ord
217
- ret $lt %rv
218
- """ ),
219
- $ typ,
220
- Tuple{Ptr{$ typ},$ typ},
221
- x,
222
- v,
223
- )
224
- return old => $ op (old, v)
224
+ old = llvmcall (
225
+ $ ("""
226
+ %ptr = inttoptr i$WORD_SIZE %0 to $lt *
227
+ %rv = atomicrmw $rmw $lt * %ptr, $lt %1 $ord
228
+ ret $lt %rv
229
+ """ ),
230
+ $ typ,
231
+ Tuple{Ptr{$ typ},$ typ},
232
+ x,
233
+ v,
234
+ )
235
+ return old => $ op (old, v)
236
+ end
225
237
end
226
238
end
227
239
end
228
240
end
229
241
end
230
242
231
- # Core.Intrinsics.atomic_fence was introduced in 1.10
232
- function UnsafeAtomics. fence (ord:: Ordering )
233
- Core. Intrinsics. atomic_fence (base_ordering (ord))
234
- return nothing
235
- end
236
- if Sys. ARCH == :x86_64
237
- # FIXME : Disable this once on LLVM 19
238
- # This is unfortunatly required for good-performance on AMD
239
- # https://github.com/llvm/llvm-project/pull/106555
240
- function UnsafeAtomics. fence (:: typeof (seq_cst))
241
- Base. llvmcall (
242
- (raw """
243
- define void @fence() #0 {
244
- entry:
245
- tail call void asm sideeffect "lock orq $$0 , (%rsp)", ""(); should this have ~{memory}
246
- ret void
247
- }
248
- attributes #0 = { alwaysinline }
249
- """ , " fence" ), Nothing, Tuple{})
243
+ for sync in syncscopes
244
+ if sync == none
245
+ # Core.Intrinsics.atomic_fence was introduced in 1.10
246
+ @eval function UnsafeAtomics. fence (ord:: Ordering , :: $ (typeof (sync)))
247
+ Core. Intrinsics. atomic_fence (base_ordering (ord))
248
+ return nothing
249
+ end
250
+ if Sys. ARCH == :x86_64
251
+ # FIXME : Disable this once on LLVM 19
252
+ # This is unfortunatly required for good-performance on AMD
253
+ # https://github.com/llvm/llvm-project/pull/106555
254
+ @eval function UnsafeAtomics. fence (:: typeof (seq_cst), :: $ (typeof (sync)))
255
+ Base. llvmcall (
256
+ (raw """
257
+ define void @fence() #0 {
258
+ entry:
259
+ tail call void asm sideeffect "lock orq $$0 , (%rsp)", ""(); should this have ~{memory}
260
+ ret void
261
+ }
262
+ attributes #0 = { alwaysinline }
263
+ """ , " fence" ), Nothing, Tuple{})
264
+ end
265
+ end
266
+ else
267
+ for ord in orderings
268
+ @eval function UnsafeAtomics. fence (:: $ (typeof (ord)), :: $ (typeof (sync)))
269
+ return llvmcall (
270
+ $ ("""
271
+ fence $sync $ord
272
+ ret void
273
+ """ ),
274
+ Cvoid,
275
+ Tuple{},
276
+ )
277
+ end
278
+ end
250
279
end
251
280
end
252
281
0 commit comments