From cd4e9abf7a165f15ccd10ac4541365d4f8a6db19 Mon Sep 17 00:00:00 2001 From: Isabella Gottardi Date: Tue, 5 Nov 2019 17:50:27 +0000 Subject: COMPMID-2452: Add mnist example * Add small-mnist example * Add PrintAccessor * Add DequantizationLayer graph node Change-Id: I7bc8011e5a602f40fa3c47b231a2a69c804e78c2 Signed-off-by: Isabella Gottardi Reviewed-on: https://review.mlplatform.org/c/2274 Comments-Addressed: Arm Jenkins Tested-by: Arm Jenkins Reviewed-by: Giorgio Arena Reviewed-by: Georgios Pinitas --- arm_compute/graph/backends/FunctionHelpers.h | 38 ++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) (limited to 'arm_compute/graph/backends/FunctionHelpers.h') diff --git a/arm_compute/graph/backends/FunctionHelpers.h b/arm_compute/graph/backends/FunctionHelpers.h index 02bfe9dc22..960011c1e2 100644 --- a/arm_compute/graph/backends/FunctionHelpers.h +++ b/arm_compute/graph/backends/FunctionHelpers.h @@ -607,6 +607,44 @@ std::unique_ptr create_depthwise_convolution_layer(DepthwiseConvoluti return func; } +/** Create a backend dequantize layer function + * + * @tparam DequantizationLayer Function Backend dequantize function + * @tparam TargetInfo Target-specific information + * + * @param[in] node Node to create the backend function for + * + * @return Backend dequantize layer function + */ +template +std::unique_ptr create_dequantization_layer(DequantizationLayerNode &node) +{ + validate_node(node, 1 /* expected inputs */, 1 /* expected outputs */); + + // Extract IO and info + typename TargetInfo::TensorType *input = get_backing_tensor(node.input(0)); + typename TargetInfo::TensorType *output = get_backing_tensor(node.output(0)); + + ARM_COMPUTE_ERROR_ON(input == nullptr); + ARM_COMPUTE_ERROR_ON(output == nullptr); + + // Create and configure function + auto func = support::cpp14::make_unique(); + func->configure(input, output); + + // Log info + ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " + << node.name() + << " Type: " << node.type() + << " Target: " << TargetInfo::TargetType + << " Data Type: " << input->info()->data_type() + << " Input shape: " << input->info()->tensor_shape() + << " Input quantization info: " << output->info()->quantization_info() + << " Output shape: " << output->info()->tensor_shape() + << std::endl); + + return std::move(func); +} /** Create a backend detection output layer function * * @tparam DetectionOutputLayer Function Backend detection output function -- cgit v1.2.1