aboutsummaryrefslogtreecommitdiff
path: root/tests/validation/fixtures
diff options
context:
space:
mode:
authorAndrew Mundy <andrew.mundy@arm.com>2018-03-15 16:47:03 +0000
committerAnthony Barbier <anthony.barbier@arm.com>2018-11-02 16:49:16 +0000
commit4d9379a9d3ada794f532ce8acdc8607f4faa2b21 (patch)
tree14ba02ebcdaf6cb927e9422e45cbab6456c9a097 /tests/validation/fixtures
parent3f217ec4ff11e20fe686beb9a28d0bbd80a56cd6 (diff)
downloadComputeLibrary-4d9379a9d3ada794f532ce8acdc8607f4faa2b21.tar.gz
COMPMID-1040: Added support for nullptr bias tensor in NEWinogradLayer
Change-Id: Ie624ee17c63dede711d913a82819e128954a57c9 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/124861 Tested-by: Jenkins <bsgcomp@arm.com> Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Diffstat (limited to 'tests/validation/fixtures')
-rw-r--r--tests/validation/fixtures/WinogradLayerFixture.h13
1 files changed, 10 insertions, 3 deletions
diff --git a/tests/validation/fixtures/WinogradLayerFixture.h b/tests/validation/fixtures/WinogradLayerFixture.h
index 5210cbf720..481eb93e80 100644
--- a/tests/validation/fixtures/WinogradLayerFixture.h
+++ b/tests/validation/fixtures/WinogradLayerFixture.h
@@ -48,7 +48,7 @@ namespace validation
{
using namespace arm_compute::misc::shape_calculator;
-template <typename TensorType, typename AccessorType, typename FunctionType, typename T>
+template <typename TensorType, typename AccessorType, typename FunctionType, typename T, bool use_bias = true>
class WinogradConvolutionLayerValidationFixture : public framework::Fixture
{
public:
@@ -93,7 +93,7 @@ protected:
// Create and configure function
FunctionType conv;
- conv.configure(&src, &weights, &bias, &dst, info, act_info);
+ conv.configure(&src, &weights, (use_bias) ? &bias : nullptr, &dst, info, act_info);
ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS);
ARM_COMPUTE_EXPECT(weights.info()->is_resizable(), framework::LogLevel::ERRORS);
@@ -133,7 +133,14 @@ protected:
// Fill reference
fill(src, 0, -1.f, 1.f);
fill(weights, 1, -1.f, 1.f);
- fill(bias, 2, -1.f, 1.f);
+ if(use_bias)
+ {
+ fill(bias, 2, -1.f, 1.f);
+ }
+ else
+ {
+ fill(bias, 2, 0.f, 0.f);
+ }
return (act_info.enabled()) ? reference::activation_layer<T>(reference::convolution_layer<T>(src, weights, bias, output_shape, info), act_info) : reference::convolution_layer<T>(src, weights, bias,
output_shape, info);