@@ -59,10 +59,10 @@ RelativeTolerance<float> tolerance_float_sqrt(0.0001f);
5959constexpr AbsoluteTolerance<int16_t > tolerance_qsymm16 (1 );
6060
6161const auto NeonActivationFunctionsDataset = concat(datasets::ActivationFunctions(),
62- framework::dataset:: make (" ActivationFunction" , { ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::SWISH }));
62+ make (" ActivationFunction" , { ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::SWISH }));
6363
6464/* * Input data sets. */
65- const auto ActivationDataset = combine(framework::dataset:: make(" InPlace" , { false , true }), NeonActivationFunctionsDataset, framework::dataset:: make(" AlphaBeta" , { 0 .5f , 1 .f }));
65+ const auto ActivationDataset = combine(make(" InPlace" , { false , true }), NeonActivationFunctionsDataset, make(" AlphaBeta" , { 0 .5f , 1 .f }));
6666const auto ActivationDatasetForPaddingAfterConfigure = combine(
6767 make (" InPlace" , { false , true }),
6868 NeonActivationFunctionsDataset,
@@ -163,19 +163,19 @@ TEST_CASE(ActivationAPI, framework::DatasetMode::ALL)
163163// *INDENT-OFF*
164164// clang-format off
165165DATA_TEST_CASE (Validate, framework::DatasetMode::ALL, zip(
166- framework::dataset:: make (" InputInfo" , { TensorInfo (TensorShape (27U , 13U , 2U ), 1 , DataType::F32), // Mismatching data types
166+ make (" InputInfo" , { TensorInfo (TensorShape (27U , 13U , 2U ), 1 , DataType::F32), // Mismatching data types
167167 TensorInfo (TensorShape (32U , 13U , 2U ), 1 , DataType::F32),
168168 TensorInfo (TensorShape (27U , 13U , 2U ), 1 , DataType::F32), // Mismatching shapes
169169 }),
170- framework::dataset:: make(" OutputInfo" ,{ TensorInfo (TensorShape (27U , 13U , 2U ), 1 , DataType::F16),
170+ make(" OutputInfo" ,{ TensorInfo (TensorShape (27U , 13U , 2U ), 1 , DataType::F16),
171171 TensorInfo (TensorShape (32U , 13U , 2U ), 1 , DataType::F32),
172172 TensorInfo (TensorShape (32U , 13U , 2U ), 1 , DataType::F32),
173173 }),
174- framework::dataset:: make(" ActivationInfo" , { ActivationLayerInfo (ActivationLayerInfo::ActivationFunction::RELU),
174+ make(" ActivationInfo" , { ActivationLayerInfo (ActivationLayerInfo::ActivationFunction::RELU),
175175 ActivationLayerInfo (ActivationLayerInfo::ActivationFunction::RELU),
176176 ActivationLayerInfo (ActivationLayerInfo::ActivationFunction::RELU),
177177 }),
178- framework::dataset:: make(" Expected" , { false , true , false })
178+ make(" Expected" , { false , true , false })
179179 ),
180180 input_info, output_info, act_info, expected)
181181{
@@ -199,7 +199,7 @@ TEST_CASE(SqrtBoundaryValue, framework::DatasetMode::ALL)
199199 test_float_sqrt_boundary_value<half>();
200200}
201201FIXTURE_DATA_TEST_CASE (RunSmall, NEActivationLayerFixture<half>, framework::DatasetMode::ALL, combine(datasets::SmallShapes(), ActivationDataset,
202- framework::dataset:: make(" DataType" ,
202+ make(" DataType" ,
203203 DataType::F16)))
204204{
205205 if (CPUInfo::get ().has_fp16 ())
@@ -294,7 +294,7 @@ template <typename T>
294294using NEActivationLayerWithPaddingQuantizedFixture = ActivationWithPaddingValidationQuantizedFixture<Tensor, Accessor, NEActivationLayer, T>;
295295
296296/* * Input data sets. */
297- const auto QuantizedActivationFunctionsDataset = framework::dataset:: make(" ActivationFunction" ,
297+ const auto QuantizedActivationFunctionsDataset = make(" ActivationFunction" ,
298298{
299299 ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
300300 ActivationLayerInfo::ActivationFunction::RELU,
@@ -307,9 +307,9 @@ const auto QuantizedActivationFunctionsDataset = framework::dataset::make("Activ
307307#endif
308308});
309309
310- const auto QuantizedActivationDataset = combine(framework::dataset:: make(" InPlace" , { false }),
311- concat (QuantizedActivationFunctionsDataset, framework::dataset:: make(" ActivationFunction" , ActivationLayerInfo::ActivationFunction::HARD_SWISH)),
312- framework::dataset:: make(" AlphaBeta" , { 0 .5f , 1 .f }));
310+ const auto QuantizedActivationDataset = combine(make(" InPlace" , { false }),
311+ concat (QuantizedActivationFunctionsDataset, make(" ActivationFunction" , ActivationLayerInfo::ActivationFunction::HARD_SWISH)),
312+ make(" AlphaBeta" , { 0 .5f , 1 .f }));
313313const auto QuantizedActivationDatasetForPaddingAfterConfigure = combine(
314314 make (" InPlace" , { false }),
315315 concat(QuantizedActivationFunctionsDataset,
@@ -321,9 +321,9 @@ const auto QuantizedActivationDatasetForPaddingAfterConfigure = combine(
321321TEST_SUITE (Quantized)
322322TEST_SUITE(QASYMM8)
323323FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<uint8_t >, framework::DatasetMode::ALL, combine(datasets::SmallShapes(), QuantizedActivationDataset,
324- framework::dataset:: make(" DataType" ,
324+ make(" DataType" ,
325325 DataType::QASYMM8),
326- framework::dataset:: make(" QuantizationInfo" , { QuantizationInfo (0 .1f , 128 .0f ) })))
326+ make(" QuantizationInfo" , { QuantizationInfo (0 .1f , 128 .0f ) })))
327327{
328328 // Validate output
329329 validate (Accessor (_target), _reference, helper::tolerance_qasymm8 (_function));
@@ -343,9 +343,9 @@ TEST_SUITE_END() // QASYMM8
343343
344344TEST_SUITE(QASYMM8_SIGNED)
345345FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<int8_t >, framework::DatasetMode::ALL, combine(datasets::SmallShapes(), QuantizedActivationDataset,
346- framework::dataset:: make(" DataType" ,
346+ make(" DataType" ,
347347 DataType::QASYMM8_SIGNED),
348- framework::dataset:: make(" QuantizationInfo" , { QuantizationInfo (0 .5f , 10 .0f ) })))
348+ make(" QuantizationInfo" , { QuantizationInfo (0 .5f , 10 .0f ) })))
349349{
350350 // Validate output
351351 validate (Accessor (_target), _reference, helper::tolerance_qasymm8 (_function));
@@ -364,14 +364,14 @@ FIXTURE_DATA_TEST_CASE(PaddingAfterConfigure, NEActivationLayerWithPaddingQuanti
364364TEST_SUITE_END () // QASYMM8_SIGNED
365365
366366/* * Input data sets. */
367- const auto Int16QuantizedActivationFunctionsDataset = framework::dataset:: make(" ActivationFunction" ,
367+ const auto Int16QuantizedActivationFunctionsDataset = make(" ActivationFunction" ,
368368{
369369 ActivationLayerInfo::ActivationFunction::LOGISTIC,
370370 ActivationLayerInfo::ActivationFunction::TANH,
371371 ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
372372});
373- const auto Int16QuantizedActivationDataset = combine(framework::dataset:: make(" InPlace" , { false }), Int16QuantizedActivationFunctionsDataset,
374- framework::dataset:: make(" AlphaBeta" , { 0 .5f , 1 .f }));
373+ const auto Int16QuantizedActivationDataset = combine(make(" InPlace" , { false }), Int16QuantizedActivationFunctionsDataset,
374+ make(" AlphaBeta" , { 0 .5f , 1 .f }));
375375
376376const auto Int16QuantizedActivationDatasetForPaddingAfterConfigure = combine(
377377 make (" InPlace" , { false }),
@@ -381,9 +381,9 @@ const auto Int16QuantizedActivationDatasetForPaddingAfterConfigure = combine(
381381
382382TEST_SUITE (QSYMM16)
383383FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<int16_t >, framework::DatasetMode::ALL, combine(datasets::SmallShapes(), Int16QuantizedActivationDataset,
384- framework::dataset:: make(" DataType" ,
384+ make(" DataType" ,
385385 DataType::QSYMM16),
386- framework::dataset:: make(" QuantizationInfo" , { QuantizationInfo (1 .f / 32768 .f , 0 .f ) })))
386+ make(" QuantizationInfo" , { QuantizationInfo (1 .f / 32768 .f , 0 .f ) })))
387387{
388388 // Validate output
389389 validate (Accessor (_target), _reference, tolerance_qsymm16);
0 commit comments