From 3ada2b7a29e1ab2058ab7dc701cacff548d2aae9 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 23 Aug 2018 15:54:36 +0100 Subject: COMPMID-1534: Fix LSTM/RNN Layers for NEON and FP16 Switches default activation layer to the respective datasets to RELU from LOGISTIC Change-Id: I09f1ad09922ccdd6e1dc33c28a594f7ffbfe40f4 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/145436 Reviewed-by: Anthony Barbier Tested-by: Jenkins --- tests/datasets/RNNLayerDataset.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'tests/datasets/RNNLayerDataset.h') diff --git a/tests/datasets/RNNLayerDataset.h b/tests/datasets/RNNLayerDataset.h index 40d1b934f3..5f42def676 100644 --- a/tests/datasets/RNNLayerDataset.h +++ b/tests/datasets/RNNLayerDataset.h @@ -131,7 +131,7 @@ class SmallRNNLayerDataset final : public RNNLayerDataset public: SmallRNNLayerDataset() { - add_config(TensorShape(128U, 16U), TensorShape(128U, 32U), TensorShape(32U, 32U), TensorShape(32U), TensorShape(32U, 16U), ActivationLayerInfo()); + add_config(TensorShape(128U, 16U), TensorShape(128U, 32U), TensorShape(32U, 32U), TensorShape(32U), TensorShape(32U, 16U), ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); } }; -- cgit v1.2.1