diff options
author | Alex Gilday <alexander.gilday@arm.com> | 2018-03-23 14:16:00 +0000 |
---|---|---|
committer | Anthony Barbier <anthony.barbier@arm.com> | 2018-11-02 16:49:16 +0000 |
commit | 7da29b6b12ff319ed2b6e2c46588dfa1991556fb (patch) | |
tree | 24e766d916ae8da32deb5cd4fac4d82207cbe6ea /tests/benchmark/fixtures/ConvolutionLayerFixture.h | |
parent | f92cb23f06572fe73ec5ab9da0ec5713724c2dde (diff) | |
download | ComputeLibrary-7da29b6b12ff319ed2b6e2c46588dfa1991556fb.tar.gz |
COMPMID-1017: Implement dilated convolution in NEON, OpenCL, and GC
Change-Id: If4626ec9e215e14dffe22e80812da5bac84a52e2
Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/125734
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Tested-by: Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests/benchmark/fixtures/ConvolutionLayerFixture.h')
-rw-r--r-- | tests/benchmark/fixtures/ConvolutionLayerFixture.h | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/tests/benchmark/fixtures/ConvolutionLayerFixture.h b/tests/benchmark/fixtures/ConvolutionLayerFixture.h index 9815040d42..7558b4c9a8 100644 --- a/tests/benchmark/fixtures/ConvolutionLayerFixture.h +++ b/tests/benchmark/fixtures/ConvolutionLayerFixture.h @@ -42,7 +42,7 @@ class ConvolutionLayerFixture : public framework::Fixture { public: template <typename...> - void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape biases_shape, TensorShape dst_shape, PadStrideInfo info, DataType data_type, int batches) + void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape biases_shape, TensorShape dst_shape, PadStrideInfo info, Size2D dilation, DataType data_type, int batches) { // Set batched in source and destination shapes const unsigned int fixed_point_position = 4; @@ -57,7 +57,7 @@ public: dst = create_tensor<TensorType>(dst_shape, data_type, 1, fixed_point_position); // Create and configure function - conv_layer.configure(&src, &weights, &biases, &dst, info); + conv_layer.configure(&src, &weights, &biases, &dst, info, WeightsInfo(), dilation); // Allocate tensors src.allocator()->allocate(); |