aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorManuel Bottini <manuel.bottini@arm.com>2018-10-02 16:41:52 +0100
committerGeorgios Pinitas <georgios.pinitas@arm.com>2018-12-05 11:37:14 +0000
commit0d0028ca25a47dd51260e2555b336fc9f09d1df1 (patch)
tree968e8f126a9c7d5d7d4159fbb7d906d47ad077f2 /tests
parent8bf622a44c70564d6a7c712473cdfac3e50ac62d (diff)
downloadComputeLibrary-0d0028ca25a47dd51260e2555b336fc9f09d1df1.tar.gz
COMPMID-1298: Fuse ReLu activation in CLWinogradOutputTransform
Change-Id: I9e6e43a5839d04c2e4b4552c05446efb0a5074cf Reviewed-on: https://review.mlplatform.org/232 Tested-by: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/validation/CL/Winograd.cpp99
-rw-r--r--tests/validation/fixtures/WinogradConvolutionLayerFixture.h16
2 files changed, 68 insertions, 47 deletions
diff --git a/tests/validation/CL/Winograd.cpp b/tests/validation/CL/Winograd.cpp
index f7f06b7f79..efa049f5ab 100644
--- a/tests/validation/CL/Winograd.cpp
+++ b/tests/validation/CL/Winograd.cpp
@@ -139,6 +139,17 @@ const auto SmallWinogradOutputTransformDatasetNHWC = datasets::SmallWinogradOutp
const auto LargeWinogradOutputTransformDatasetNCHW = datasets::LargeWinogradOutputTransformDatasetNCHW();
const auto LargeWinogradOutputTransformDatasetNHWC = datasets::LargeWinogradOutputTransformDatasetNHWC();
+
+//Activation Functions
+const auto ActivationFunctionsDataset = framework::dataset::make("ActivationInfo",
+{
+ ActivationLayerInfo(),
+ ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
+ ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU),
+ ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU),
+ ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LEAKY_RELU),
+ ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::SOFT_RELU)
+});
} // namespace
using namespace arm_compute::misc::shape_calculator;
@@ -562,16 +573,18 @@ DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(framework::da
}
TEST_SUITE(FP16)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradOutputTransformFixtureFP16, framework::DatasetMode::ALL,
- combine(SmallWinogradOutputTransformDatasetNCHW,
- framework::dataset::make("DataType", { DataType::F16 })))
+ combine(combine(SmallWinogradOutputTransformDatasetNCHW,
+ framework::dataset::make("DataType", { DataType::F16 })),
+ framework::dataset::make("ActivationInfo",{ ActivationLayerInfo() }) ))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_f16);
}
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradOutputTransformFixtureFP16, framework::DatasetMode::NIGHTLY,
- combine(LargeWinogradOutputTransformDatasetNCHW,
- framework::dataset::make("DataType", { DataType::F16 })))
+ combine(combine(LargeWinogradOutputTransformDatasetNCHW,
+ framework::dataset::make("DataType", { DataType::F16 })),
+ framework::dataset::make("ActivationInfo",{ ActivationLayerInfo() }) ))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_f16);
@@ -579,16 +592,18 @@ FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradOutputTransformFixtureFP16, framework
TEST_SUITE_END() // FP16
TEST_SUITE(FP32)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradOutputTransformFixtureFP32, framework::DatasetMode::ALL,
- combine(SmallWinogradOutputTransformDatasetNCHW,
- framework::dataset::make("DataType", { DataType::F32 })))
+ combine(combine(SmallWinogradOutputTransformDatasetNCHW,
+ framework::dataset::make("DataType", { DataType::F32 })),
+ framework::dataset::make("ActivationInfo",{ ActivationLayerInfo() }) ))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_f32);
}
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradOutputTransformFixtureFP32, framework::DatasetMode::NIGHTLY,
- combine(LargeWinogradOutputTransformDatasetNCHW,
- framework::dataset::make("DataType", { DataType::F32 })))
+ combine(combine(LargeWinogradOutputTransformDatasetNCHW,
+ framework::dataset::make("DataType", { DataType::F32 })),
+ framework::dataset::make("ActivationInfo",{ ActivationLayerInfo() }) ))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_f32);
@@ -618,16 +633,18 @@ DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(framework::da
TEST_SUITE(FP16)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradOutputTransformFixtureFP16, framework::DatasetMode::ALL,
- combine(SmallWinogradOutputTransformDatasetNHWC,
- framework::dataset::make("DataType", { DataType::F16 })))
+ combine(combine(SmallWinogradOutputTransformDatasetNHWC,
+ framework::dataset::make("DataType", { DataType::F16 })),
+ framework::dataset::make("ActivationInfo",{ ActivationLayerInfo() }) ))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_f16);
}
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradOutputTransformFixtureFP16, framework::DatasetMode::NIGHTLY,
- combine(LargeWinogradOutputTransformDatasetNHWC,
- framework::dataset::make("DataType", { DataType::F16 })))
+ combine(combine(LargeWinogradOutputTransformDatasetNHWC,
+ framework::dataset::make("DataType", { DataType::F16 })),
+ framework::dataset::make("ActivationInfo",{ ActivationLayerInfo() }) ))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_f16);
@@ -635,16 +652,18 @@ FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradOutputTransformFixtureFP16, framework
TEST_SUITE_END() // FP16
TEST_SUITE(FP32)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradOutputTransformFixtureFP32, framework::DatasetMode::ALL,
- combine(SmallWinogradOutputTransformDatasetNHWC,
- framework::dataset::make("DataType", { DataType::F32 })))
+ combine(combine(SmallWinogradOutputTransformDatasetNHWC,
+ framework::dataset::make("DataType", { DataType::F32 })),
+ framework::dataset::make("ActivationInfo",{ ActivationLayerInfo() }) ))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_f32);
}
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradOutputTransformFixtureFP32, framework::DatasetMode::NIGHTLY,
- combine(LargeWinogradOutputTransformDatasetNHWC,
- framework::dataset::make("DataType", { DataType::F32 })))
+ combine(combine(LargeWinogradOutputTransformDatasetNHWC,
+ framework::dataset::make("DataType", { DataType::F32 })),
+ framework::dataset::make("ActivationInfo",{ ActivationLayerInfo() }) ))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_f32);
@@ -702,7 +721,7 @@ TEST_SUITE(Conv3x3)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer3x3Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -712,7 +731,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, fram
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer3x3Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -724,7 +743,7 @@ TEST_SUITE(Conv3x1)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer3x1Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -734,7 +753,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, fram
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer3x1Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -746,7 +765,7 @@ TEST_SUITE(Conv1x3)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer1x3Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -756,7 +775,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, fram
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer1x3Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -768,7 +787,7 @@ TEST_SUITE(Conv5x5)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer5x5Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset ),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -779,7 +798,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, fram
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer5x5Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset ),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -792,7 +811,7 @@ TEST_SUITE(Conv5x1)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer5x1Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -803,7 +822,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, fram
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer5x1Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -816,7 +835,7 @@ TEST_SUITE(Conv1x5)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer1x5Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -827,7 +846,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture, fram
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer1x5Dataset(),
framework::dataset::make("DataType", { DataType::F32 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -845,7 +864,7 @@ TEST_SUITE(Conv3x3)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer3x3Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -855,7 +874,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, fr
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer3x3Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -867,7 +886,7 @@ TEST_SUITE(Conv3x1)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer3x1Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -877,7 +896,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, fr
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer3x1Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -889,7 +908,7 @@ TEST_SUITE(Conv1x3)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer1x3Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -899,7 +918,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, fr
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer1x3Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
// Validate output
@@ -911,7 +930,7 @@ TEST_SUITE(Conv5x5)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer5x5Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -922,7 +941,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, fr
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer5x5Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -935,7 +954,7 @@ TEST_SUITE(Conv5x1)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer5x1Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -946,7 +965,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, fr
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer5x1Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -959,7 +978,7 @@ TEST_SUITE(Conv1x5)
FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(datasets::SmallWinogradConvolutionLayer1x5Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
@@ -970,7 +989,7 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLWinogradConvolutionLayerFastMathFixture16, fr
FIXTURE_DATA_TEST_CASE(RunLarge, CLWinogradConvolutionLayerFastMathFixture16, framework::DatasetMode::NIGHTLY,
combine(combine(combine(datasets::LargeWinogradConvolutionLayer1x5Dataset(),
framework::dataset::make("DataType", { DataType::F16 })),
- framework::dataset::make("ActivationLayerInfo", { ActivationLayerInfo() })),
+ ActivationFunctionsDataset),
framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC })))
{
diff --git a/tests/validation/fixtures/WinogradConvolutionLayerFixture.h b/tests/validation/fixtures/WinogradConvolutionLayerFixture.h
index 9c9e634205..8f34654c3a 100644
--- a/tests/validation/fixtures/WinogradConvolutionLayerFixture.h
+++ b/tests/validation/fixtures/WinogradConvolutionLayerFixture.h
@@ -494,10 +494,10 @@ class WinogradOutputTransformValidationFixture : public framework::Fixture
{
public:
template <typename...>
- void setup(TensorShape input_shape, WinogradInfo winograd_info, DataType data_type)
+ void setup(TensorShape input_shape, WinogradInfo winograd_info, DataType data_type, ActivationLayerInfo act_info = ActivationLayerInfo())
{
- _target = compute_target(input_shape, winograd_info, data_type);
- _reference = compute_reference(input_shape, winograd_info, data_type);
+ _target = compute_target(input_shape, winograd_info, data_type, act_info);
+ _reference = compute_reference(input_shape, winograd_info, data_type, act_info);
}
protected:
@@ -522,7 +522,7 @@ protected:
}
}
- TensorType compute_target(const TensorShape &input_shape, const WinogradInfo &winograd_info, DataType data_type)
+ TensorType compute_target(const TensorShape &input_shape, const WinogradInfo &winograd_info, DataType data_type, ActivationLayerInfo act_info)
{
TensorShape output_shape = compute_winograd_output_transform_shape(TensorInfo(input_shape, 1, data_type), winograd_info);
@@ -533,7 +533,7 @@ protected:
// Create and configure function
FunctionType output_transform;
- output_transform.configure(&src, &bias, &dst, winograd_info);
+ output_transform.configure(&src, &bias, &dst, winograd_info, act_info);
ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS);
ARM_COMPUTE_EXPECT(bias.info()->is_resizable(), framework::LogLevel::ERRORS);
@@ -557,7 +557,7 @@ protected:
return dst;
}
- SimpleTensor<T> compute_reference(const TensorShape &input_shape, WinogradInfo winograd_info, DataType data_type)
+ SimpleTensor<T> compute_reference(const TensorShape &input_shape, WinogradInfo winograd_info, DataType data_type, ActivationLayerInfo act_info)
{
winograd_info.output_data_layout = DataLayout::NCHW;
TensorShape output_shape = compute_winograd_output_transform_shape(TensorInfo(input_shape, 1, data_type), winograd_info);
@@ -570,7 +570,9 @@ protected:
fill(src, 0, -1.f, 1.f);
fill(bias, 1, -1.f, 1.f);
- return reference::winograd_output_transform<T>(src, bias, output_shape, winograd_info);
+ const SimpleTensor<T> winograd_output = reference::winograd_output_transform<T>(src, bias, output_shape, winograd_info);
+
+ return (act_info.enabled()) ? reference::activation_layer<T>(winograd_output, act_info) : winograd_output;
}
TensorType _target{};