aboutsummaryrefslogtreecommitdiff
path: root/src/graph/nodes/SoftmaxLayer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/graph/nodes/SoftmaxLayer.cpp')
-rw-r--r--src/graph/nodes/SoftmaxLayer.cpp63
1 files changed, 10 insertions, 53 deletions
diff --git a/src/graph/nodes/SoftmaxLayer.cpp b/src/graph/nodes/SoftmaxLayer.cpp
index 3cdbc9c96a..62057c770c 100644
--- a/src/graph/nodes/SoftmaxLayer.cpp
+++ b/src/graph/nodes/SoftmaxLayer.cpp
@@ -23,70 +23,27 @@
*/
#include "arm_compute/graph/nodes/SoftmaxLayer.h"
-#include "arm_compute/runtime/CL/CLTensor.h"
-#include "arm_compute/runtime/CL/functions/CLSoftmaxLayer.h"
-#include "arm_compute/runtime/NEON/functions/NESoftmaxLayer.h"
-#include "arm_compute/runtime/Tensor.h"
+#include "arm_compute/graph/NodeContext.h"
+#include "arm_compute/graph/OperationRegistry.h"
#include "support/ToolchainSupport.h"
-#include "utils/TypePrinter.h"
using namespace arm_compute::graph;
-namespace
-{
-template <typename SoftmaxType, typename TensorType, TargetHint hint>
-std::unique_ptr<arm_compute::IFunction> instantiate_function(arm_compute::ITensor *input, arm_compute::ITensor *output)
-{
- auto softmax = arm_compute::support::cpp14::make_unique<SoftmaxType>();
- softmax->configure(
- dynamic_cast<TensorType *>(input),
- dynamic_cast<TensorType *>(output));
-
- return std::move(softmax);
-}
-
-template <TargetHint target_hint>
-std::unique_ptr<arm_compute::IFunction> instantiate(arm_compute::ITensor *input, arm_compute::ITensor *output);
-
-template <>
-std::unique_ptr<arm_compute::IFunction> instantiate<TargetHint::OPENCL>(arm_compute::ITensor *input, arm_compute::ITensor *output)
-{
- return instantiate_function<arm_compute::CLSoftmaxLayer, arm_compute::ICLTensor, TargetHint::OPENCL>(input, output);
-}
-
-template <>
-std::unique_ptr<arm_compute::IFunction> instantiate<TargetHint::NEON>(arm_compute::ITensor *input, arm_compute::ITensor *output)
-{
- return instantiate_function<arm_compute::NESoftmaxLayer, arm_compute::ITensor, TargetHint::NEON>(input, output);
-}
-} // namespace
-
std::unique_ptr<arm_compute::IFunction> SoftmaxLayer::instantiate_node(GraphContext &ctx, ITensorObject *input, ITensorObject *output)
{
ARM_COMPUTE_ERROR_ON(input == nullptr || input->tensor() == nullptr);
ARM_COMPUTE_ERROR_ON(output == nullptr || output->tensor() == nullptr);
- std::unique_ptr<arm_compute::IFunction> func;
- _target_hint = ctx.hints().target_hint();
-
arm_compute::ITensor *in = input->tensor();
arm_compute::ITensor *out = output->tensor();
+ _target_hint = ctx.hints().target_hint();
- if(_target_hint == TargetHint::OPENCL)
- {
- func = instantiate<TargetHint::OPENCL>(in, out);
- ARM_COMPUTE_LOG_GRAPH_INFO("Instantiating CLSoftmaxLayer");
- }
- else
- {
- func = instantiate<TargetHint::NEON>(in, out);
- ARM_COMPUTE_LOG_GRAPH_INFO("Instantiating NESoftmaxLayer");
- }
-
- ARM_COMPUTE_LOG_GRAPH_INFO(" Data Type: " << in->info()->data_type()
- << " Input shape: " << in->info()->tensor_shape()
- << " Output shape: " << out->info()->tensor_shape()
- << std::endl);
+ // Create node context
+ NodeContext node_ctx(OperationType::SoftmaxLayer);
+ node_ctx.set_target(_target_hint);
+ node_ctx.add_input(in);
+ node_ctx.add_output(out);
- return func;
+ // Get function
+ return OperationRegistry::get().find_operation(OperationType::SoftmaxLayer, _target_hint)->configure(node_ctx);
}