@@ -1067,6 +1067,199 @@ TLI_DEFINE_VECFUNC("tgammaf", "armpl_vtgammaq_f32", FIXED(4), NOMASK, "_ZGV_LLVM
1067
1067
TLI_DEFINE_VECFUNC(" tgamma" , " armpl_svtgamma_f64_x" , SCALABLE(2 ), MASKED, "_ZGVsMxv")
1068
1068
TLI_DEFINE_VECFUNC(" tgammaf" , " armpl_svtgamma_f32_x" , SCALABLE(4 ), MASKED, "_ZGVsMxv")
1069
1069
1070
+ #elif defined(TLI_DEFINE_AMDLIBM_VECFUNCS)
1071
+ TLI_DEFINE_VECFUNC (" sinf" , " amd_vrs16_sinf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1072
+ TLI_DEFINE_VECFUNC(" sinf" , " amd_vrs8_sinf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1073
+ TLI_DEFINE_VECFUNC(" sinf" , " amd_vrs4_sinf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1074
+ TLI_DEFINE_VECFUNC(" sin" , " amd_vrd8_sin" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1075
+ TLI_DEFINE_VECFUNC(" sin" , " amd_vrd4_sin" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1076
+ TLI_DEFINE_VECFUNC(" sin" , " amd_vrd2_sin" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1077
+
1078
+ TLI_DEFINE_VECFUNC(" llvm.sin.f32" , " amd_vrs16_sinf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1079
+ TLI_DEFINE_VECFUNC(" llvm.sin.f32" , " amd_vrs8_sinf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1080
+ TLI_DEFINE_VECFUNC(" llvm.sin.f32" , " amd_vrs4_sinf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1081
+ TLI_DEFINE_VECFUNC(" llvm.sin.f64" , " amd_vrd8_sin" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1082
+ TLI_DEFINE_VECFUNC(" llvm.sin.f64" , " amd_vrd4_sin" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1083
+ TLI_DEFINE_VECFUNC(" llvm.sin.f64" , " amd_vrd2_sin" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1084
+
1085
+ TLI_DEFINE_VECFUNC(" cosf" , " amd_vrs16_cosf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1086
+ TLI_DEFINE_VECFUNC(" cosf" , " amd_vrs8_cosf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1087
+ TLI_DEFINE_VECFUNC(" cosf" , " amd_vrs4_cosf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1088
+ TLI_DEFINE_VECFUNC(" cos" , " amd_vrd8_cos" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1089
+ TLI_DEFINE_VECFUNC(" cos" , " amd_vrd4_cos" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1090
+ TLI_DEFINE_VECFUNC(" cos" , " amd_vrd2_cos" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1091
+
1092
+ TLI_DEFINE_VECFUNC(" llvm.cos.f32" , " amd_vrs16_cosf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1093
+ TLI_DEFINE_VECFUNC(" llvm.cos.f32" , " amd_vrs8_cosf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1094
+ TLI_DEFINE_VECFUNC(" llvm.cos.f32" , " amd_vrs4_cosf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1095
+ TLI_DEFINE_VECFUNC(" llvm.cos.f64" , " amd_vrd8_cos" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1096
+ TLI_DEFINE_VECFUNC(" llvm.cos.f64" , " amd_vrd4_cos" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1097
+ TLI_DEFINE_VECFUNC(" llvm.cos.f64" , " amd_vrd2_cos" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1098
+
1099
+ TLI_DEFINE_VECFUNC(" expf" , " amd_vrs16_expf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1100
+ TLI_DEFINE_VECFUNC(" expf" , " amd_vrs8_expf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1101
+ TLI_DEFINE_VECFUNC(" expf" , " amd_vrs4_expf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1102
+ TLI_DEFINE_VECFUNC(" exp" , " amd_vrd2_exp" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1103
+ TLI_DEFINE_VECFUNC(" exp" , " amd_vrd4_exp" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1104
+ TLI_DEFINE_VECFUNC(" exp" , " amd_vrd8_exp" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1105
+
1106
+ TLI_DEFINE_VECFUNC(" __expf_finite" , " amd_vrs16_expf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1107
+ TLI_DEFINE_VECFUNC(" __expf_finite" , " amd_vrs8_expf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1108
+ TLI_DEFINE_VECFUNC(" __expf_finite" , " amd_vrs4_expf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1109
+ TLI_DEFINE_VECFUNC(" __exp_finite" , " amd_vrd2_exp" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1110
+ TLI_DEFINE_VECFUNC(" __exp_finite" , " amd_vrd4_exp" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1111
+ TLI_DEFINE_VECFUNC(" __exp_finite" , " amd_vrd8_exp" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1112
+
1113
+ TLI_DEFINE_VECFUNC(" llvm.exp.f32" , " amd_vrs16_expf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1114
+ TLI_DEFINE_VECFUNC(" llvm.exp.f32" , " amd_vrs8_expf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1115
+ TLI_DEFINE_VECFUNC(" llvm.exp.f32" , " amd_vrs4_expf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1116
+ TLI_DEFINE_VECFUNC(" llvm.exp.f64" , " amd_vrd2_exp" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1117
+ TLI_DEFINE_VECFUNC(" llvm.exp.f64" , " amd_vrd4_exp" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1118
+ TLI_DEFINE_VECFUNC(" llvm.exp.f64" , " amd_vrd8_exp" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1119
+
1120
+ TLI_DEFINE_VECFUNC(" exp2f" , " amd_vrs16_exp2f" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1121
+ TLI_DEFINE_VECFUNC(" exp2f" , " amd_vrs8_exp2f" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1122
+ TLI_DEFINE_VECFUNC(" exp2f" , " amd_vrs4_exp2f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1123
+ TLI_DEFINE_VECFUNC(" exp2" , " amd_vrd2_exp2" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1124
+ TLI_DEFINE_VECFUNC(" exp2" , " amd_vrd4_exp2" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1125
+ TLI_DEFINE_VECFUNC(" exp2" , " amd_vrd8_exp2" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1126
+
1127
+ TLI_DEFINE_VECFUNC(" __exp2f_finite" , " amd_vrs16_exp2f" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1128
+ TLI_DEFINE_VECFUNC(" __exp2f_finite" , " amd_vrs8_exp2f" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1129
+ TLI_DEFINE_VECFUNC(" __exp2f_finite" , " amd_vrs4_exp2f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1130
+ TLI_DEFINE_VECFUNC(" __exp2_finite" , " amd_vrd2_exp2" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1131
+ TLI_DEFINE_VECFUNC(" __exp2_finite" , " amd_vrd4_exp2" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1132
+ TLI_DEFINE_VECFUNC(" __exp2_finite" , " amd_vrd8_exp2" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1133
+
1134
+ TLI_DEFINE_VECFUNC(" llvm.exp2.f32" , " amd_vrs16_exp2f" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1135
+ TLI_DEFINE_VECFUNC(" llvm.exp2.f32" , " amd_vrs8_exp2f" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1136
+ TLI_DEFINE_VECFUNC(" llvm.exp2.f32" , " amd_vrs4_exp2f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1137
+ TLI_DEFINE_VECFUNC(" llvm.exp2.f64" , " amd_vrd2_exp2" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1138
+ TLI_DEFINE_VECFUNC(" llvm.exp2.f64" , " amd_vrd4_exp2" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1139
+ TLI_DEFINE_VECFUNC(" llvm.exp2.f64" , " amd_vrd8_exp2" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1140
+
1141
+ TLI_DEFINE_VECFUNC(" powf" , " amd_vrs16_powf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16vv")
1142
+ TLI_DEFINE_VECFUNC(" powf" , " amd_vrs8_powf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8vv")
1143
+ TLI_DEFINE_VECFUNC(" powf" , " amd_vrs4_powf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4vv")
1144
+ TLI_DEFINE_VECFUNC(" pow" , " amd_vrd2_pow" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2vv")
1145
+ TLI_DEFINE_VECFUNC(" pow" , " amd_vrd4_pow" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4vv")
1146
+ TLI_DEFINE_VECFUNC(" pow" , " amd_vrd8_pow" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8vv")
1147
+
1148
+ TLI_DEFINE_VECFUNC(" __powf_finite" , " amd_vrs16_powf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16vv")
1149
+ TLI_DEFINE_VECFUNC(" __powf_finite" , " amd_vrs8_powf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8vv")
1150
+ TLI_DEFINE_VECFUNC(" __powf_finite" , " amd_vrs4_powf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4vv")
1151
+ TLI_DEFINE_VECFUNC(" __pow_finite" , " amd_vrd2_pow" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2vv")
1152
+ TLI_DEFINE_VECFUNC(" __pow_finite" , " amd_vrd4_pow" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4vv")
1153
+ TLI_DEFINE_VECFUNC(" __pow_finite" , " amd_vrd8_pow" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8vv")
1154
+
1155
+ TLI_DEFINE_VECFUNC(" llvm.pow.f32" , " amd_vrs16_powf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16vv")
1156
+ TLI_DEFINE_VECFUNC(" llvm.pow.f32" , " amd_vrs8_powf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8vv")
1157
+ TLI_DEFINE_VECFUNC(" llvm.pow.f32" , " amd_vrs4_powf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4vv")
1158
+ TLI_DEFINE_VECFUNC(" llvm.pow.f64" , " amd_vrd2_pow" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2vv")
1159
+ TLI_DEFINE_VECFUNC(" llvm.pow.f64" , " amd_vrd4_pow" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4vv")
1160
+ TLI_DEFINE_VECFUNC(" llvm.pow.f64" , " amd_vrd8_pow" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8vv")
1161
+
1162
+ TLI_DEFINE_VECFUNC(" logf" , " amd_vrs16_logf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1163
+ TLI_DEFINE_VECFUNC(" logf" , " amd_vrs8_logf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1164
+ TLI_DEFINE_VECFUNC(" logf" , " amd_vrs4_logf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1165
+ TLI_DEFINE_VECFUNC(" log" , " amd_vrd2_log" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1166
+ TLI_DEFINE_VECFUNC(" log" , " amd_vrd4_log" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1167
+ TLI_DEFINE_VECFUNC(" log" , " amd_vrd8_log" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1168
+
1169
+ TLI_DEFINE_VECFUNC(" __logf_finite" , " amd_vrs16_logf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1170
+ TLI_DEFINE_VECFUNC(" __logf_finite" , " amd_vrs8_logf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1171
+ TLI_DEFINE_VECFUNC(" __logf_finite" , " amd_vrs4_logf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1172
+ TLI_DEFINE_VECFUNC(" __log_finite" , " amd_vrd2_log" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1173
+ TLI_DEFINE_VECFUNC(" __log_finite" , " amd_vrd4_log" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1174
+ TLI_DEFINE_VECFUNC(" __log_finite" , " amd_vrd8_log" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1175
+
1176
+ TLI_DEFINE_VECFUNC(" llvm.log.f32" , " amd_vrs16_logf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1177
+ TLI_DEFINE_VECFUNC(" llvm.log.f32" , " amd_vrs8_logf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1178
+ TLI_DEFINE_VECFUNC(" llvm.log.f32" , " amd_vrs4_logf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1179
+ TLI_DEFINE_VECFUNC(" llvm.log.f64" , " amd_vrd2_log" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1180
+ TLI_DEFINE_VECFUNC(" llvm.log.f64" , " amd_vrd4_log" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1181
+ TLI_DEFINE_VECFUNC(" llvm.log.f64" , " amd_vrd8_log" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1182
+
1183
+ TLI_DEFINE_VECFUNC(" log2f" , " amd_vrs16_log2f" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1184
+ TLI_DEFINE_VECFUNC(" log2f" , " amd_vrs8_log2f" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1185
+ TLI_DEFINE_VECFUNC(" log2f" , " amd_vrs4_log2f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1186
+ TLI_DEFINE_VECFUNC(" log2" , " amd_vrd2_log2" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1187
+ TLI_DEFINE_VECFUNC(" log2" , " amd_vrd4_log2" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1188
+ TLI_DEFINE_VECFUNC(" log2" , " amd_vrd8_log2" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1189
+
1190
+ TLI_DEFINE_VECFUNC(" __log2f_finite" , " amd_vrs16_log2f" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1191
+ TLI_DEFINE_VECFUNC(" __log2f_finite" , " amd_vrs8_log2f" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1192
+ TLI_DEFINE_VECFUNC(" __log2f_finite" , " amd_vrs4_log2f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1193
+ TLI_DEFINE_VECFUNC(" __log2_finite" , " amd_vrd2_log2" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1194
+ TLI_DEFINE_VECFUNC(" __log2_finite" , " amd_vrd4_log2" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1195
+ TLI_DEFINE_VECFUNC(" __log2_finite" , " amd_vrd8_log2" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1196
+
1197
+ TLI_DEFINE_VECFUNC(" llvm.log2.f32" , " amd_vrs16_log2f" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1198
+ TLI_DEFINE_VECFUNC(" llvm.log2.f32" , " amd_vrs8_log2f" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1199
+ TLI_DEFINE_VECFUNC(" llvm.log2.f32" , " amd_vrs4_log2f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1200
+ TLI_DEFINE_VECFUNC(" llvm.log2.f64" , " amd_vrd2_log2" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1201
+ TLI_DEFINE_VECFUNC(" llvm.log2.f64" , " amd_vrd4_log2" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1202
+ TLI_DEFINE_VECFUNC(" llvm.log2.f64" , " amd_vrd8_log2" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1203
+
1204
+ TLI_DEFINE_VECFUNC(" log10f" , " amd_vrs16_log10f" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1205
+ TLI_DEFINE_VECFUNC(" log10f" , " amd_vrs8_log10f" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1206
+ TLI_DEFINE_VECFUNC(" log10f" , " amd_vrs4_log10f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1207
+
1208
+ TLI_DEFINE_VECFUNC(" __log10f_finite" , " amd_vrs16_log10f" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1209
+ TLI_DEFINE_VECFUNC(" __log10f_finite" , " amd_vrs8_log10f" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1210
+ TLI_DEFINE_VECFUNC(" __log10f_finite" , " amd_vrs4_log10f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1211
+
1212
+ TLI_DEFINE_VECFUNC(" llvm.log10.f32" , " amd_vrs16_log10f" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1213
+ TLI_DEFINE_VECFUNC(" llvm.log10.f32" , " amd_vrs8_log10f" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1214
+ TLI_DEFINE_VECFUNC(" llvm.log10.f32" , " amd_vrs4_log10f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1215
+
1216
+ TLI_DEFINE_VECFUNC(" erff" , " amd_vrs4_erff" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1217
+ TLI_DEFINE_VECFUNC(" erff" , " amd_vrs8_erff" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1218
+ TLI_DEFINE_VECFUNC(" erff" , " amd_vrs16_erff" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1219
+ TLI_DEFINE_VECFUNC(" erf" , " amd_vrd2_erf" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1220
+ TLI_DEFINE_VECFUNC(" erf" , " amd_vrd4_erf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1221
+ TLI_DEFINE_VECFUNC(" erf" , " amd_vrd8_erf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1222
+
1223
+ TLI_DEFINE_VECFUNC(" exp10" , " amd_vrd2_exp10" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1224
+ TLI_DEFINE_VECFUNC(" exp10f" , " amd_vrs4_exp10f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1225
+
1226
+ TLI_DEFINE_VECFUNC(" expm1" , " amd_vrd2_expm1" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1227
+ TLI_DEFINE_VECFUNC(" expm1f" , " amd_vrs4_expm1f" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1228
+
1229
+ TLI_DEFINE_VECFUNC(" log1p" , " amd_vrd2_log1p" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1230
+ TLI_DEFINE_VECFUNC(" log1pf" , " amd_vrs4_log1pf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1231
+
1232
+ TLI_DEFINE_VECFUNC(" tan" , " amd_vrd2_tan" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1233
+ TLI_DEFINE_VECFUNC(" tan" , " amd_vrd4_tan" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1234
+ TLI_DEFINE_VECFUNC(" tan" , " amd_vrd8_tan" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1235
+ TLI_DEFINE_VECFUNC(" tanf" , " amd_vrs4_tanf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1236
+ TLI_DEFINE_VECFUNC(" tanf" , " amd_vrs8_tanf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1237
+ TLI_DEFINE_VECFUNC(" tanf" , " amd_vrs16_tanf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1238
+
1239
+ TLI_DEFINE_VECFUNC(" asin" , " amd_vrd8_asin" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1240
+ TLI_DEFINE_VECFUNC(" asinf" , " amd_vrs4_asinf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1241
+ TLI_DEFINE_VECFUNC(" asinf" , " amd_vrs8_asinf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1242
+ TLI_DEFINE_VECFUNC(" asinf" , " amd_vrs16_asinf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1243
+
1244
+ TLI_DEFINE_VECFUNC(" acosf" , " amd_vrs4_acosf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1245
+ TLI_DEFINE_VECFUNC(" acosf" , " amd_vrs8_acosf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1246
+
1247
+ TLI_DEFINE_VECFUNC(" atan" , " amd_vrd2_atan" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1248
+ TLI_DEFINE_VECFUNC(" atan" , " amd_vrd4_atan" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1249
+ TLI_DEFINE_VECFUNC(" atan" , " amd_vrd8_atan" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1250
+ TLI_DEFINE_VECFUNC(" atanf" , " amd_vrs4_atanf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1251
+ TLI_DEFINE_VECFUNC(" atanf" , " amd_vrs8_atanf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1252
+ TLI_DEFINE_VECFUNC(" atanf" , " amd_vrs16_atanf" , FIXED(16 ), NOMASK, "_ZGV_LLVM_N16v")
1253
+
1254
+ TLI_DEFINE_VECFUNC(" coshf" , " amd_vrs4_coshf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1255
+ TLI_DEFINE_VECFUNC(" coshf" , " amd_vrs8_coshf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1256
+
1257
+ TLI_DEFINE_VECFUNC(" tanhf" , " amd_vrs4_tanhf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1258
+ TLI_DEFINE_VECFUNC(" tanhf" , " amd_vrs8_tanhf" , FIXED(8 ), NOMASK, "_ZGV_LLVM_N8v")
1259
+
1260
+ TLI_DEFINE_VECFUNC(" cbrt" , " amd_vrd2_cbrt" , FIXED(2 ), NOMASK, "_ZGV_LLVM_N2v")
1261
+ TLI_DEFINE_VECFUNC(" cbrtf" , " amd_vrs4_cbrtf" , FIXED(4 ), NOMASK, "_ZGV_LLVM_N4v")
1262
+
1070
1263
#else
1071
1264
#error "Must choose which vector library functions are to be defined."
1072
1265
#endif
@@ -1087,3 +1280,4 @@ TLI_DEFINE_VECFUNC("tgammaf", "armpl_svtgamma_f32_x", SCALABLE(4), MASKED, "_ZGV
1087
1280
#undef TLI_DEFINE_SLEEFGNUABI_SCALABLE_VECFUNCS
1088
1281
#undef TLI_DEFINE_MASSV_VECFUNCS_NAMES
1089
1282
#undef TLI_DEFINE_ARMPL_VECFUNCS
1283
+ #undef TLI_DEFINE_AMDLIBM_VECFUNCS
0 commit comments