Skip to content

Commit 67af7c4

Browse files
authored
[GPU] Fixed wrong eltwise handling as bias (#30417)
### Details: - Fixed wrong eltwise handling as bias ### Tickets: - CVS-166697
1 parent 071cb7f commit 67af7c4

File tree

2 files changed

+31
-2
lines changed

2 files changed

+31
-2
lines changed

src/plugins/intel_gpu/src/graph/graph_optimizer/prepare_primitive_fusing.cpp

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -253,10 +253,9 @@ void prepare_primitive_fusing::fuse_bias(program &p) {
253253
return node.as<fully_connected>().get_primitive()->input_size == 3;
254254
};
255255

256-
256+
auto broadcast_type = eltw_node.get_primitive()->broadcast_spec.m_type;
257257
if (node->get_output_layout().is_dynamic()) {
258258
if (eltw_node.get_dependency(non_const_dep_idx).is_type<fully_connected>()) {
259-
auto broadcast_type = eltw_node.get_primitive()->broadcast_spec.m_type;
260259
if (broadcast_type != ov::op::AutoBroadcastType::NUMPY && broadcast_type != ov::op::AutoBroadcastType::NONE)
261260
continue;
262261

@@ -298,6 +297,9 @@ void prepare_primitive_fusing::fuse_bias(program &p) {
298297
const_dep.get_output_layout().count() != static_cast<size_t>(out_features)) {
299298
continue;
300299
}
300+
// Handle eltw as a bias only when eltw's shape is to be broadcasted to parent node
301+
if (const_dep.get_output_layout().count() > node->get_dependency(non_const_dep_idx).get_output_layout().count())
302+
continue;
301303
}
302304
auto& bias_node = eltw_node.get_dependency(const_dep_idx);
303305
primitive_id bias_name = bias_node.id();

src/plugins/intel_gpu/tests/unit/fusions/convolution_fusion_test.cpp

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -466,6 +466,7 @@ class ConvActivationTestOnednn : public BaseFusingTest<conv_activation_onednn_te
466466
#define CASE_CONV_FP16_14 { 1, 32, 55, 1 }, { 1, 32, 55, 1 }, { 32, 1, 1, 3, 1 }, { 1, 1 }, { 1, 1 }, { 1, 1 }, 32, data_types::f16, format::b_fs_yx_fsv16, data_types::f16, format::gs_oiyx_gsv16, data_types::f16, format::bfyx
467467
#define CASE_CONV_FP16_15 { 1, 39, 55, 1 }, { 1, 39, 55, 1 }, { 39, 1, 1, 3, 1 }, { 1, 1 }, { 1, 1 }, { 1, 1 }, 39, data_types::f16, format::b_fs_yx_fsv16, data_types::f16, format::gs_oiyx_gsv16, data_types::f16, format::bfyx
468468
#define CASE_CONV_FP16_16 { 1, 3, 112, 112, 8 }, { 1, 32, 56, 56, 8 }, { 32, 3, 3, 3, 1 }, { 2, 2, 1 }, { 1, 1, 0 }, { 1, 1, 1 }, 1, data_types::f16, format::bfzyx, data_types::f16, format::bfzyx, data_types::f16, format::bfzyx
469+
#define CASE_CONV_FP16_17 { 1, 512, 1, 1}, { 1, 1, 1, 1}, { 1, 512, 1, 1 }, { 1, 1 }, { 0, 0 }, { 1, 1 }, 1, data_types::f16, format::bfyx, data_types::f16, format::bfyx, data_types::f16, format::bfyx
469470

470471
#define CASE_CONV_U8S8_1 { 1, 15, 4, 5 }, { 1, 30, 2, 3 }, { 30, 15, 3, 3 }, { 1, 1 }, { 0, 0 }, { 1, 1 }, 1, data_types::u8, format::bfyx, data_types::i8, format::bfyx, data_types::f32, format::bfyx
471472
#define CASE_CONV_U8S8_2 { 1, 15, 5, 5 }, { 1, 30, 3, 3 }, { 30, 15, 3, 3 }, { 1, 1 }, { 0, 0 }, { 1, 1 }, 1, data_types::u8, format::bfyx, data_types::i8, format::bfyx, data_types::f32, format::bfyx
@@ -790,6 +791,32 @@ INSTANTIATE_TEST_SUITE_P(fusings_gpu, conv_fp32_wrong_bias, ::testing::ValuesIn(
790791
convolution_test_params{ CASE_CONV_FP32_15, 3, 3, 3 },
791792
}));
792793

794+
class conv_fp16_wrong_bias : public ConvFusingTest {};
795+
TEST_P(conv_fp16_wrong_bias, basic) {
796+
// Check case when eltwise add dependency has shape [1, 1, X, Y] and X*Y == CONV_OUT_FEATURES
797+
auto p = GetParam();
798+
ov::PartialShape eltw_data_shape = get_input_layout(p).get_partial_shape();
799+
eltw_data_shape[1] *= 128;
800+
801+
auto eltw_data_layout = layout{eltw_data_shape, p.default_type, format::bfyx};
802+
803+
create_topologies(
804+
input_layout("input", get_input_layout(p)),
805+
data("weights", get_mem(get_weights_layout(p))),
806+
data("non-bias", get_mem(eltw_data_layout)),
807+
convolution("conv_prim", input_info("input"), "weights", "", p.groups, p.stride, p.dilation, p.pad, p.pad, format::is_grouped(get_weights_layout(p).format)),
808+
eltwise("add", { input_info("conv_prim"), input_info("non-bias") }, eltwise_mode::sum),
809+
reorder("reorder_bfyx", input_info("add"), p.default_format, data_types::f32)
810+
);
811+
812+
tolerance = default_tolerance(p.default_type);
813+
execute(p);
814+
}
815+
816+
INSTANTIATE_TEST_SUITE_P(fusings_gpu, conv_fp16_wrong_bias, ::testing::ValuesIn(std::vector<convolution_test_params>{
817+
convolution_test_params{ CASE_CONV_FP16_17, 3, 3, 3 },
818+
}));
819+
793820
class conv_fp32_add_per_element_planar_const : public ConvFusingTest {};
794821
TEST_P(conv_fp32_add_per_element_planar_const, basic) {
795822
auto p = GetParam();

0 commit comments

Comments
 (0)