diff options
author | Georgios Pinitas <georgios.pinitas@arm.com> | 2018-07-17 12:28:42 +0100 |
---|---|---|
committer | Anthony Barbier <anthony.barbier@arm.com> | 2018-11-02 16:54:54 +0000 |
commit | 7d66a8e3f603f2cd363f04a750847e3f9eabdfd4 (patch) | |
tree | 0d7e1ad5bf0ecd32cd919074f756d27c351d7638 /tests/networks/AlexNetNetwork.h | |
parent | ae54e026c86aec7d6819ee3ef76372c1a3c92467 (diff) | |
download | ComputeLibrary-7d66a8e3f603f2cd363f04a750847e3f9eabdfd4.tar.gz |
COMPMID-1386: Add support for converting weights for CL.
Change-Id: I62e3ead903366baeeb1488f233a9b8b0c388c9de
Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/140403
Tested-by: Jenkins <bsgcomp@arm.com>
Reviewed-by: Giorgio Arena <giorgio.arena@arm.com>
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Diffstat (limited to 'tests/networks/AlexNetNetwork.h')
-rw-r--r-- | tests/networks/AlexNetNetwork.h | 9 |
1 files changed, 6 insertions, 3 deletions
diff --git a/tests/networks/AlexNetNetwork.h b/tests/networks/AlexNetNetwork.h index e92affe954..e15db2a110 100644 --- a/tests/networks/AlexNetNetwork.h +++ b/tests/networks/AlexNetNetwork.h @@ -193,6 +193,9 @@ public: /** Build the network */ void build() { + FullyConnectedLayerInfo fc_info; + fc_info.are_weights_reshaped = _reshaped_weights; + input.allocator()->init(TensorInfo(TensorShape(227U, 227U, 3U, _batches), 1, _data_type)); output.allocator()->init(TensorInfo(TensorShape(1000U, _batches), 1, _data_type)); @@ -265,13 +268,13 @@ public: act5.configure(&conv5_out, &act5_out, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); pool5.configure(&act5_out, &pool5_out, PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0))); // Layer 6 - fc6.configure(&pool5_out, &w[5], &b[5], &fc6_out, true, _reshaped_weights); + fc6.configure(&pool5_out, &w[5], &b[5], &fc6_out, fc_info); act6.configure(&fc6_out, &act6_out, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); // Layer 7 - fc7.configure(&act6_out, &w[6], &b[6], &fc7_out, true, _reshaped_weights); + fc7.configure(&act6_out, &w[6], &b[6], &fc7_out, fc_info); act7.configure(&fc7_out, &act7_out, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)); // Layer 8 - fc8.configure(&act7_out, &w[7], &b[7], &fc8_out, true, _reshaped_weights); + fc8.configure(&act7_out, &w[7], &b[7], &fc8_out, fc_info); // Softmax smx.configure(&fc8_out, &output); } |