diff options
author | Georgios Pinitas <georgios.pinitas@arm.com> | 2017-10-04 16:53:58 +0100 |
---|---|---|
committer | Anthony Barbier <anthony.barbier@arm.com> | 2018-11-02 16:35:24 +0000 |
commit | ff421f2100e0e9e532f5fe78585300546af61690 (patch) | |
tree | 9ba5a1bfe64b5b10f70c64a965f9c5ca14de9ce3 /src/graph/nodes/SoftmaxLayer.cpp | |
parent | 925ca0f7402115da3bffb21c04fca0bc822c9b38 (diff) | |
download | ComputeLibrary-ff421f2100e0e9e532f5fe78585300546af61690.tar.gz |
COMPMID-601: Add GraphContext
GraphContext hold all the information about the hints that need to be
passed in the nodes. As these might expand, it serves as a centralized
class for such information.
Change-Id: I0b5527630fb97cc5fa500db0bac8307ff2ea36e6
Reviewed-on: http://mpd-gerrit.cambridge.arm.com/90300
Tested-by: Kaizen <jeremy.johnson+kaizengerrit@arm.com>
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Diffstat (limited to 'src/graph/nodes/SoftmaxLayer.cpp')
-rw-r--r-- | src/graph/nodes/SoftmaxLayer.cpp | 28 |
1 files changed, 14 insertions, 14 deletions
diff --git a/src/graph/nodes/SoftmaxLayer.cpp b/src/graph/nodes/SoftmaxLayer.cpp index fee88970fc..9e798ef7cc 100644 --- a/src/graph/nodes/SoftmaxLayer.cpp +++ b/src/graph/nodes/SoftmaxLayer.cpp @@ -34,7 +34,7 @@ using namespace arm_compute::graph; namespace { -template <typename SoftmaxType, typename TensorType, Hint hint> +template <typename SoftmaxType, typename TensorType, TargetHint hint> std::unique_ptr<arm_compute::IFunction> instantiate_function(ITensor *input, ITensor *output) { auto softmax = arm_compute::support::cpp14::make_unique<SoftmaxType>(); @@ -45,36 +45,36 @@ std::unique_ptr<arm_compute::IFunction> instantiate_function(ITensor *input, ITe return std::move(softmax); } -template <Hint hint> +template <TargetHint target_hint> std::unique_ptr<arm_compute::IFunction> instantiate(ITensor *input, ITensor *output); template <> -std::unique_ptr<arm_compute::IFunction> instantiate<Hint::OPENCL>(ITensor *input, ITensor *output) +std::unique_ptr<arm_compute::IFunction> instantiate<TargetHint::OPENCL>(ITensor *input, ITensor *output) { - return instantiate_function<arm_compute::CLSoftmaxLayer, arm_compute::CLTensor, Hint::OPENCL>(input, output); + return instantiate_function<arm_compute::CLSoftmaxLayer, arm_compute::CLTensor, TargetHint::OPENCL>(input, output); } template <> -std::unique_ptr<arm_compute::IFunction> instantiate<Hint::NEON>(ITensor *input, ITensor *output) +std::unique_ptr<arm_compute::IFunction> instantiate<TargetHint::NEON>(ITensor *input, ITensor *output) { - return instantiate_function<arm_compute::NESoftmaxLayer, arm_compute::Tensor, Hint::NEON>(input, output); + return instantiate_function<arm_compute::NESoftmaxLayer, arm_compute::Tensor, TargetHint::NEON>(input, output); } } // namespace -std::unique_ptr<arm_compute::IFunction> SoftmaxLayer::instantiate_node(Hint hint, ITensor *input, ITensor *output) +std::unique_ptr<arm_compute::IFunction> SoftmaxLayer::instantiate_node(GraphContext &ctx, ITensor *input, ITensor *output) { std::unique_ptr<arm_compute::IFunction> func; - _hint = hint; - _input = input; - _output = output; + _target_hint = ctx.hints().target_hint(); + _input = input; + _output = output; - if(_hint == Hint::OPENCL) + if(_target_hint == TargetHint::OPENCL) { - func = instantiate<Hint::OPENCL>(input, output); + func = instantiate<TargetHint::OPENCL>(input, output); } else { - func = instantiate<Hint::NEON>(input, output); + func = instantiate<TargetHint::NEON>(input, output); } return func; @@ -82,7 +82,7 @@ std::unique_ptr<arm_compute::IFunction> SoftmaxLayer::instantiate_node(Hint hint void SoftmaxLayer::print_info() { - if(_hint == Hint::OPENCL) + if(_target_hint == TargetHint::OPENCL) { std::cout << "Instantiating CLSoftmaxLayer"; } |