@@ -1269,115 +1269,24 @@ vect_init_vector (vec_info *vinfo, stmt_vec_info stmt_info, tree val, tree type,
1269
1269
}
1270
1270
1271
1271
1272
- /* Function vect_get_vec_defs_for_operand.
1273
-
1274
- OP is an operand in STMT_VINFO. This function returns a vector of
1275
- NCOPIES defs that will be used in the vectorized stmts for STMT_VINFO.
1276
-
1277
- In the case that OP is an SSA_NAME which is defined in the loop, then
1278
- STMT_VINFO_VEC_STMTS of the defining stmt holds the relevant defs.
1279
-
1280
- In case OP is an invariant or constant, a new stmt that creates a vector def
1281
- needs to be introduced. VECTYPE may be used to specify a required type for
1282
- vector invariant. */
1283
-
1284
- void
1285
- vect_get_vec_defs_for_operand (vec_info *vinfo, stmt_vec_info stmt_vinfo,
1286
- unsigned ncopies,
1287
- tree op, vec<tree> *vec_oprnds, tree vectype)
1288
- {
1289
- gimple *def_stmt;
1290
- enum vect_def_type dt;
1291
- bool is_simple_use;
1292
- loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
1293
-
1294
- if (dump_enabled_p ())
1295
- dump_printf_loc (MSG_NOTE, vect_location,
1296
- " vect_get_vec_defs_for_operand: %T\n " , op);
1297
-
1298
- stmt_vec_info def_stmt_info;
1299
- is_simple_use = vect_is_simple_use (op, loop_vinfo, &dt,
1300
- &def_stmt_info, &def_stmt);
1301
- gcc_assert (is_simple_use);
1302
- if (def_stmt && dump_enabled_p ())
1303
- dump_printf_loc (MSG_NOTE, vect_location, " def_stmt = %G" , def_stmt);
1304
-
1305
- vec_oprnds->create (ncopies);
1306
- if (dt == vect_constant_def || dt == vect_external_def)
1307
- {
1308
- tree stmt_vectype = STMT_VINFO_VECTYPE (stmt_vinfo);
1309
- tree vector_type;
1310
-
1311
- if (vectype)
1312
- vector_type = vectype;
1313
- else if (VECT_SCALAR_BOOLEAN_TYPE_P (TREE_TYPE (op))
1314
- && VECTOR_BOOLEAN_TYPE_P (stmt_vectype))
1315
- vector_type = truth_type_for (stmt_vectype);
1316
- else
1317
- vector_type = get_vectype_for_scalar_type (loop_vinfo, TREE_TYPE (op));
1318
-
1319
- gcc_assert (vector_type);
1320
- /* A masked load can have a default SSA definition as else operand.
1321
- We should "vectorize" this instead of creating a duplicate from the
1322
- scalar default. */
1323
- tree vop;
1324
- if (TREE_CODE (op) == SSA_NAME
1325
- && SSA_NAME_IS_DEFAULT_DEF (op)
1326
- && VAR_P (SSA_NAME_VAR (op)))
1327
- vop = get_or_create_ssa_default_def (cfun,
1328
- create_tmp_var (vector_type));
1329
- else
1330
- vop = vect_init_vector (vinfo, stmt_vinfo, op, vector_type, NULL );
1331
- while (ncopies--)
1332
- vec_oprnds->quick_push (vop);
1333
- }
1334
- else
1335
- {
1336
- def_stmt_info = vect_stmt_to_vectorize (def_stmt_info);
1337
- gcc_assert (STMT_VINFO_VEC_STMTS (def_stmt_info).length () == ncopies);
1338
- for (unsigned i = 0 ; i < ncopies; ++i)
1339
- vec_oprnds->quick_push (gimple_get_lhs
1340
- (STMT_VINFO_VEC_STMTS (def_stmt_info)[i]));
1341
- }
1342
- }
1343
-
1344
-
1345
1272
/* Get vectorized definitions for OP0 and OP1. */
1346
1273
1347
1274
void
1348
- vect_get_vec_defs (vec_info *vinfo , stmt_vec_info stmt_info , slp_tree slp_node,
1349
- unsigned ncopies ,
1350
- tree op0, tree vectype0 , vec<tree> *vec_oprnds0,
1351
- tree op1, tree vectype1 , vec<tree> *vec_oprnds1,
1352
- tree op2, tree vectype2 , vec<tree> *vec_oprnds2,
1353
- tree op3, tree vectype3 , vec<tree> *vec_oprnds3)
1275
+ vect_get_vec_defs (vec_info *, stmt_vec_info, slp_tree slp_node,
1276
+ unsigned ,
1277
+ tree op0, tree, vec<tree> *vec_oprnds0,
1278
+ tree op1, tree, vec<tree> *vec_oprnds1,
1279
+ tree op2, tree, vec<tree> *vec_oprnds2,
1280
+ tree op3, tree, vec<tree> *vec_oprnds3)
1354
1281
{
1355
- if (slp_node)
1356
- {
1357
- if (op0)
1358
- vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[0 ], vec_oprnds0);
1359
- if (op1)
1360
- vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[1 ], vec_oprnds1);
1361
- if (op2)
1362
- vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[2 ], vec_oprnds2);
1363
- if (op3)
1364
- vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[3 ], vec_oprnds3);
1365
- }
1366
- else
1367
- {
1368
- if (op0)
1369
- vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1370
- op0, vec_oprnds0, vectype0);
1371
- if (op1)
1372
- vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1373
- op1, vec_oprnds1, vectype1);
1374
- if (op2)
1375
- vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1376
- op2, vec_oprnds2, vectype2);
1377
- if (op3)
1378
- vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1379
- op3, vec_oprnds3, vectype3);
1380
- }
1282
+ if (op0)
1283
+ vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[0 ], vec_oprnds0);
1284
+ if (op1)
1285
+ vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[1 ], vec_oprnds1);
1286
+ if (op2)
1287
+ vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[2 ], vec_oprnds2);
1288
+ if (op3)
1289
+ vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[3 ], vec_oprnds3);
1381
1290
}
1382
1291
1383
1292
void
@@ -1566,8 +1475,7 @@ check_load_store_for_partial_vectors (loop_vec_info loop_vinfo, tree vectype,
1566
1475
bool is_load = (vls_type == VLS_LOAD);
1567
1476
if (memory_access_type == VMAT_LOAD_STORE_LANES)
1568
1477
{
1569
- if (slp_node)
1570
- nvectors /= group_size;
1478
+ nvectors /= group_size;
1571
1479
internal_fn ifn
1572
1480
= (is_load ? vect_load_lanes_supported (vectype, group_size, true ,
1573
1481
elsvals)
@@ -2768,9 +2676,8 @@ vect_check_store_rhs (vec_info *vinfo, stmt_vec_info stmt_info,
2768
2676
&& internal_store_fn_p (gimple_call_internal_fn (call)))
2769
2677
op_no = internal_fn_stored_value_index (gimple_call_internal_fn (call));
2770
2678
}
2771
- if (slp_node)
2772
- op_no = vect_slp_child_index_for_operand
2773
- (stmt_info->stmt , op_no, STMT_VINFO_GATHER_SCATTER_P (stmt_info));
2679
+ op_no = vect_slp_child_index_for_operand
2680
+ (stmt_info->stmt , op_no, STMT_VINFO_GATHER_SCATTER_P (stmt_info));
2774
2681
2775
2682
enum vect_def_type rhs_dt;
2776
2683
tree rhs_vectype;
@@ -3081,8 +2988,7 @@ vect_build_one_scatter_store_call (vec_info *vinfo, stmt_vec_info stmt_info,
3081
2988
containing loop. */
3082
2989
3083
2990
static void
3084
- vect_get_gather_scatter_ops (loop_vec_info loop_vinfo,
3085
- class loop *loop, stmt_vec_info stmt_info,
2991
+ vect_get_gather_scatter_ops (class loop *loop,
3086
2992
slp_tree slp_node, gather_scatter_info *gs_info,
3087
2993
tree *dataref_ptr, vec<tree> *vec_offset)
3088
2994
{
@@ -3095,16 +3001,7 @@ vect_get_gather_scatter_ops (loop_vec_info loop_vinfo,
3095
3001
new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
3096
3002
gcc_assert (!new_bb);
3097
3003
}
3098
- if (slp_node)
3099
- vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[0 ], vec_offset);
3100
- else
3101
- {
3102
- unsigned ncopies
3103
- = vect_get_num_copies (loop_vinfo, gs_info->offset_vectype );
3104
- vect_get_vec_defs_for_operand (loop_vinfo, stmt_info, ncopies,
3105
- gs_info->offset , vec_offset,
3106
- gs_info->offset_vectype );
3107
- }
3004
+ vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[0 ], vec_offset);
3108
3005
}
3109
3006
3110
3007
/* Prepare to implement a grouped or strided load or store using
@@ -5025,12 +4922,8 @@ vect_create_vectorized_demotion_stmts (vec_info *vinfo, vec<tree> *vec_oprnds,
5025
4922
else
5026
4923
{
5027
4924
/* This is the last step of the conversion sequence. Store the
5028
- vectors in SLP_NODE or in vector info of the scalar statement
5029
- (or in STMT_VINFO_RELATED_STMT chain). */
5030
- if (slp_node)
5031
- slp_node->push_vec_def (new_stmt);
5032
- else
5033
- STMT_VINFO_VEC_STMTS (stmt_info).safe_push (new_stmt);
4925
+ vectors in SLP_NODE. */
4926
+ slp_node->push_vec_def (new_stmt);
5034
4927
}
5035
4928
}
5036
4929
@@ -8738,8 +8631,7 @@ vectorizable_store (vec_info *vinfo,
8738
8631
vect_get_slp_defs (mask_node, &vec_masks);
8739
8632
8740
8633
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
8741
- vect_get_gather_scatter_ops (loop_vinfo, loop, stmt_info,
8742
- slp_node, &gs_info,
8634
+ vect_get_gather_scatter_ops (loop, slp_node, &gs_info,
8743
8635
&dataref_ptr, &vec_offsets);
8744
8636
else
8745
8637
dataref_ptr
@@ -10638,8 +10530,7 @@ vectorizable_load (vec_info *vinfo,
10638
10530
if (!costing_p)
10639
10531
{
10640
10532
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
10641
- vect_get_gather_scatter_ops (loop_vinfo, loop, stmt_info,
10642
- slp_node, &gs_info, &dataref_ptr,
10533
+ vect_get_gather_scatter_ops (loop, slp_node, &gs_info, &dataref_ptr,
10643
10534
&vec_offsets);
10644
10535
else
10645
10536
dataref_ptr
0 commit comments