Skip to content

Commit 51fd9ac

Browse files
committed
refactor: Flatten nested concat usage in validation/NEON.
Updated NEON validation tests to use the local make alias instead of fully qualifying framework::dataset::make, streamlining dataset setup in the permute, fill-border, dequantization layer, arithmetic addition, elementwise kernel selection, direct convolution layer, and convolution layer suites Added the missing <type_traits> include in JoinDataset.h and implemented a variadic concat overload so tests can join multiple datasets without nested calls. Change-Id: I5b441c5f19a7fffc8f4763ed32cc2406d49cd4c5 Signed-off-by: Pablo Marquez Tello <[email protected]>
1 parent d4207ce commit 51fd9ac

File tree

91 files changed

+2171
-1999
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

91 files changed

+2171
-1999
lines changed

tests/framework/datasets/JoinDataset.h

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright (c) 2017-2018 Arm Limited.
2+
* Copyright (c) 2017-2018, 2025 Arm Limited.
33
*
44
* SPDX-License-Identifier: MIT
55
*
@@ -21,13 +21,14 @@
2121
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
2222
* SOFTWARE.
2323
*/
24-
#ifndef ARM_COMPUTE_TEST_DATASET_JOIN
25-
#define ARM_COMPUTE_TEST_DATASET_JOIN
24+
#ifndef ACL_TESTS_FRAMEWORK_DATASETS_JOINDATASET_H
25+
#define ACL_TESTS_FRAMEWORK_DATASETS_JOINDATASET_H
2626

2727
#include "Dataset.h"
2828

2929
#include <string>
3030
#include <tuple>
31+
#include <type_traits>
3132
#include <utility>
3233

3334
namespace arm_compute
@@ -161,8 +162,19 @@ JoinDataset<T, U> concat(T &&dataset1, U &&dataset2)
161162
{
162163
return JoinDataset<T, U>(std::forward<T>(dataset1), std::forward<U>(dataset2));
163164
}
165+
166+
template <typename T, typename U, typename V, typename... Rest>
167+
auto concat(T &&dataset1, U &&dataset2, V &&dataset3, Rest &&... rest)
168+
-> decltype(concat(concat(std::forward<T>(dataset1), std::forward<U>(dataset2)),
169+
std::forward<V>(dataset3),
170+
std::forward<Rest>(rest)...))
171+
{
172+
return concat(concat(std::forward<T>(dataset1), std::forward<U>(dataset2)),
173+
std::forward<V>(dataset3),
174+
std::forward<Rest>(rest)...);
175+
}
164176
} // namespace dataset
165177
} // namespace framework
166178
} // namespace test
167179
} // namespace arm_compute
168-
#endif /* ARM_COMPUTE_TEST_DATASET_JOIN */
180+
#endif // ACL_TESTS_FRAMEWORK_DATASETS_JOINDATASET_H

tests/validation/NEON/ActivationLayer.cpp

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,10 @@ RelativeTolerance<float> tolerance_float_sqrt(0.0001f);
5959
constexpr AbsoluteTolerance<int16_t> tolerance_qsymm16(1);
6060

6161
const auto NeonActivationFunctionsDataset = concat(datasets::ActivationFunctions(),
62-
framework::dataset::make("ActivationFunction", { ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::SWISH }));
62+
make("ActivationFunction", { ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::SWISH }));
6363

6464
/** Input data sets. */
65-
const auto ActivationDataset = combine(framework::dataset::make("InPlace", { false, true }), NeonActivationFunctionsDataset, framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
65+
const auto ActivationDataset = combine(make("InPlace", { false, true }), NeonActivationFunctionsDataset, make("AlphaBeta", { 0.5f, 1.f }));
6666
const auto ActivationDatasetForPaddingAfterConfigure = combine(
6767
make("InPlace", { false, true }),
6868
NeonActivationFunctionsDataset,
@@ -163,19 +163,19 @@ TEST_CASE(ActivationAPI, framework::DatasetMode::ALL)
163163
// *INDENT-OFF*
164164
// clang-format off
165165
DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(
166-
framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data types
166+
make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data types
167167
TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
168168
TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching shapes
169169
}),
170-
framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F16),
170+
make("OutputInfo",{ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F16),
171171
TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
172172
TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
173173
}),
174-
framework::dataset::make("ActivationInfo", { ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
174+
make("ActivationInfo", { ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
175175
ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
176176
ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
177177
}),
178-
framework::dataset::make("Expected", { false, true, false})
178+
make("Expected", { false, true, false})
179179
),
180180
input_info, output_info, act_info, expected)
181181
{
@@ -199,7 +199,7 @@ TEST_CASE(SqrtBoundaryValue, framework::DatasetMode::ALL)
199199
test_float_sqrt_boundary_value<half>();
200200
}
201201
FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerFixture<half>, framework::DatasetMode::ALL, combine(datasets::SmallShapes(), ActivationDataset,
202-
framework::dataset::make("DataType",
202+
make("DataType",
203203
DataType::F16)))
204204
{
205205
if(CPUInfo::get().has_fp16())
@@ -294,7 +294,7 @@ template <typename T>
294294
using NEActivationLayerWithPaddingQuantizedFixture = ActivationWithPaddingValidationQuantizedFixture<Tensor, Accessor, NEActivationLayer, T>;
295295

296296
/** Input data sets. */
297-
const auto QuantizedActivationFunctionsDataset = framework::dataset::make("ActivationFunction",
297+
const auto QuantizedActivationFunctionsDataset = make("ActivationFunction",
298298
{
299299
ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
300300
ActivationLayerInfo::ActivationFunction::RELU,
@@ -307,9 +307,9 @@ const auto QuantizedActivationFunctionsDataset = framework::dataset::make("Activ
307307
#endif
308308
});
309309

310-
const auto QuantizedActivationDataset = combine(framework::dataset::make("InPlace", { false }),
311-
concat(QuantizedActivationFunctionsDataset, framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH)),
312-
framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
310+
const auto QuantizedActivationDataset = combine(make("InPlace", { false }),
311+
concat(QuantizedActivationFunctionsDataset, make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH)),
312+
make("AlphaBeta", { 0.5f, 1.f }));
313313
const auto QuantizedActivationDatasetForPaddingAfterConfigure = combine(
314314
make("InPlace", { false }),
315315
concat(QuantizedActivationFunctionsDataset,
@@ -321,9 +321,9 @@ const auto QuantizedActivationDatasetForPaddingAfterConfigure = combine(
321321
TEST_SUITE(Quantized)
322322
TEST_SUITE(QASYMM8)
323323
FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<uint8_t>, framework::DatasetMode::ALL, combine(datasets::SmallShapes(), QuantizedActivationDataset,
324-
framework::dataset::make("DataType",
324+
make("DataType",
325325
DataType::QASYMM8),
326-
framework::dataset::make("QuantizationInfo", { QuantizationInfo(0.1f, 128.0f) })))
326+
make("QuantizationInfo", { QuantizationInfo(0.1f, 128.0f) })))
327327
{
328328
// Validate output
329329
validate(Accessor(_target), _reference, helper::tolerance_qasymm8(_function));
@@ -343,9 +343,9 @@ TEST_SUITE_END() // QASYMM8
343343

344344
TEST_SUITE(QASYMM8_SIGNED)
345345
FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<int8_t>, framework::DatasetMode::ALL, combine(datasets::SmallShapes(), QuantizedActivationDataset,
346-
framework::dataset::make("DataType",
346+
make("DataType",
347347
DataType::QASYMM8_SIGNED),
348-
framework::dataset::make("QuantizationInfo", { QuantizationInfo(0.5f, 10.0f) })))
348+
make("QuantizationInfo", { QuantizationInfo(0.5f, 10.0f) })))
349349
{
350350
// Validate output
351351
validate(Accessor(_target), _reference, helper::tolerance_qasymm8(_function));
@@ -364,14 +364,14 @@ FIXTURE_DATA_TEST_CASE(PaddingAfterConfigure, NEActivationLayerWithPaddingQuanti
364364
TEST_SUITE_END() // QASYMM8_SIGNED
365365

366366
/** Input data sets. */
367-
const auto Int16QuantizedActivationFunctionsDataset = framework::dataset::make("ActivationFunction",
367+
const auto Int16QuantizedActivationFunctionsDataset = make("ActivationFunction",
368368
{
369369
ActivationLayerInfo::ActivationFunction::LOGISTIC,
370370
ActivationLayerInfo::ActivationFunction::TANH,
371371
ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
372372
});
373-
const auto Int16QuantizedActivationDataset = combine(framework::dataset::make("InPlace", { false }), Int16QuantizedActivationFunctionsDataset,
374-
framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
373+
const auto Int16QuantizedActivationDataset = combine(make("InPlace", { false }), Int16QuantizedActivationFunctionsDataset,
374+
make("AlphaBeta", { 0.5f, 1.f }));
375375

376376
const auto Int16QuantizedActivationDatasetForPaddingAfterConfigure = combine(
377377
make("InPlace", { false }),
@@ -381,9 +381,9 @@ const auto Int16QuantizedActivationDatasetForPaddingAfterConfigure = combine(
381381

382382
TEST_SUITE(QSYMM16)
383383
FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerQuantizedFixture<int16_t>, framework::DatasetMode::ALL, combine(datasets::SmallShapes(), Int16QuantizedActivationDataset,
384-
framework::dataset::make("DataType",
384+
make("DataType",
385385
DataType::QSYMM16),
386-
framework::dataset::make("QuantizationInfo", { QuantizationInfo(1.f / 32768.f, 0.f) })))
386+
make("QuantizationInfo", { QuantizationInfo(1.f / 32768.f, 0.f) })))
387387
{
388388
// Validate output
389389
validate(Accessor(_target), _reference, tolerance_qsymm16);

0 commit comments

Comments
 (0)