aboutsummaryrefslogtreecommitdiff
path: root/src/graph/backends/NEON/NEFunctionFactory.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/graph/backends/NEON/NEFunctionFactory.cpp')
-rw-r--r--src/graph/backends/NEON/NEFunctionFactory.cpp79
1 files changed, 79 insertions, 0 deletions
diff --git a/src/graph/backends/NEON/NEFunctionFactory.cpp b/src/graph/backends/NEON/NEFunctionFactory.cpp
index 7f97876e57..8376feb265 100644
--- a/src/graph/backends/NEON/NEFunctionFactory.cpp
+++ b/src/graph/backends/NEON/NEFunctionFactory.cpp
@@ -190,6 +190,46 @@ std::unique_ptr<IFunction> create_convolution_layer(ConvolutionLayerNode &node,
return func;
}
+/** Create a backend deconvolution layer function
+ *
+ * @param[in] node Node to create the backend function for
+ *
+ * @return Backend deconvolution layer function
+ */
+std::unique_ptr<IFunction> create_deconvolution_layer(DeconvolutionLayerNode &node, GraphContext &ctx)
+{
+ ARM_COMPUTE_LOG_GRAPH_VERBOSE("Creating NEON DeconvolutionLayer node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
+ ARM_COMPUTE_ERROR_ON(node.num_inputs() != 3);
+ ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
+
+ // Extract IO and info
+ ITensor *input = get_backing_tensor(node.input(0));
+ ITensor *weights = get_backing_tensor(node.input(1));
+ ITensor *biases = get_backing_tensor(node.input(2));
+ ITensor *output = get_backing_tensor(node.output(0));
+
+ const PadStrideInfo deconv_info = node.deconvolution_info();
+ const Size2D inner_border = node.inner_border();
+
+ // Create and configure function (we assume that functions have been validated before creation)
+ std::shared_ptr<IMemoryManager> mm = get_memory_manager(ctx, Target::CL);
+ std::unique_ptr<IFunction> func;
+ std::string func_name;
+
+ std::tie(func, func_name) = create_named_memory_managed_function<NEDeconvolutionLayer>(std::string("NEDeconvolutionLayer"), mm,
+ input, weights, biases, output,
+ deconv_info, inner_border.x(), inner_border.y());
+
+ // Log info
+ ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " << func_name
+ << " Data Type: " << input->info()->data_type()
+ << " Input shape: " << input->info()->tensor_shape()
+ << " Weights shape: " << weights->info()->tensor_shape()
+ << " Output shape: " << output->info()->tensor_shape()
+ << std::endl);
+ return func;
+}
+
/** Create a backend layer depth concatenate function
*
* @param[in] node Node to create the backend function for
@@ -503,6 +543,41 @@ std::unique_ptr<IFunction> create_reshape_layer(ReshapeLayerNode &node)
return std::move(func);
}
+/** Create a backend resize layer function
+ *
+ * @param[in] node Node to create the backend function for
+ *
+ * @return Backend resize layer function
+ */
+std::unique_ptr<IFunction> create_resize_layer(ResizeLayerNode &node)
+{
+ ARM_COMPUTE_LOG_GRAPH_VERBOSE(
+ "Creating NEON Resize node with ID : " << node.id() << " and Name: " << node.name() << std::endl);
+ ARM_COMPUTE_ERROR_ON(node.num_inputs() != 1);
+ ARM_COMPUTE_ERROR_ON(node.num_outputs() != 1);
+
+ // Extract IO and info
+ ITensor *input = get_backing_tensor(node.input(0));
+ ITensor *output = get_backing_tensor(node.output(0));
+ ARM_COMPUTE_ERROR_ON(input == nullptr);
+ ARM_COMPUTE_ERROR_ON(output == nullptr);
+ const InterpolationPolicy policy = node.policy();
+
+ // Create and configure function
+ auto func = support::cpp14::make_unique<NEScale>();
+ func->configure(input, output, policy, BorderMode::CONSTANT);
+
+ // Log info
+ ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated NEScale"
+ << " Data Type: " << input->info()->data_type()
+ << " Input shape: " << input->info()->tensor_shape()
+ << " Output shape: " << output->info()->tensor_shape()
+ << " Interpolation: " << policy
+ << std::endl);
+
+ return std::move(func);
+}
+
/** Create a backend softmax layer function
*
* @param[in] node Node to create the backend function for
@@ -553,6 +628,8 @@ std::unique_ptr<IFunction> NEFunctionFactory::create(INode *node, GraphContext &
return create_batch_normalization_layer(*polymorphic_downcast<BatchNormalizationLayerNode *>(node));
case NodeType::ConvolutionLayer:
return create_convolution_layer(*polymorphic_downcast<ConvolutionLayerNode *>(node), ctx);
+ case NodeType::DeconvolutionLayer:
+ return create_deconvolution_layer(*polymorphic_downcast<DeconvolutionLayerNode *>(node), ctx);
case NodeType::DepthConcatenateLayer:
return create_depth_concatenate_layer(*polymorphic_downcast<DepthConcatenateLayerNode *>(node));
case NodeType::DepthwiseConvolutionLayer:
@@ -569,6 +646,8 @@ std::unique_ptr<IFunction> NEFunctionFactory::create(INode *node, GraphContext &
return create_pooling_layer(*polymorphic_downcast<PoolingLayerNode *>(node));
case NodeType::ReshapeLayer:
return create_reshape_layer(*polymorphic_downcast<ReshapeLayerNode *>(node));
+ case NodeType::ResizeLayer:
+ return create_resize_layer(*polymorphic_downcast<ResizeLayerNode *>(node));
case NodeType::SoftmaxLayer:
return create_softmax_layer(*polymorphic_downcast<SoftmaxLayerNode *>(node), ctx);
default: