aboutsummaryrefslogtreecommitdiff
path: root/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp
blob: 06196475bfcc05f0b77c7a0c41a3ba2003d5b65c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
//
// Copyright © 2017 Arm Ltd. All rights reserved.
// SPDX-License-Identifier: MIT
//
#include "../MobileNetSsdInferenceTest.hpp"

#include "armnnTfLiteParser/ITfLiteParser.hpp"

#include <algorithm>
#include <iterator>

using namespace armnnTfLiteParser;

int main(int argc, char* argv[])
{
    int retVal = EXIT_FAILURE;
    try
    {
        using DataType = float;
        using Parser   = armnnTfLiteParser::ITfLiteParser;
        using Model    = InferenceModel<Parser, DataType>;

        armnn::TensorShape inputTensorShape({ 1, 300, 300, 3  });

        std::vector<const char*> inputLayerNames  =
        {
            "normalized_input_image_tensor"
        };

        std::vector<const char*> outputLayerNames =
        {
            "TFLite_Detection_PostProcess",
            "TFLite_Detection_PostProcess:1",
            "TFLite_Detection_PostProcess:2",
            "TFLite_Detection_PostProcess:3"
        };

        retVal = InferenceTestMain(argc, argv, { 0 },
            [&inputTensorShape, inputLayerNames, outputLayerNames]()
            {
                return make_unique<MobileNetSsdTestCaseProvider<Model>>(
                    [&]
                    (const InferenceTestOptions& commonOptions,
                     typename Model::CommandLineOptions modelOptions)
                    {
                        if (!ValidateDirectory(modelOptions.m_ModelDir))
                        {
                            return std::unique_ptr<Model>();
                        }

                        typename Model::Params modelParams;
                        modelParams.m_ModelPath =
                            modelOptions.m_ModelDir + "ssd_mobilenet_v1.tflite";

                        std::copy(inputLayerNames.begin(), inputLayerNames.end(),
                                  std::back_inserter(modelParams.m_InputBindings));

                        std::copy(outputLayerNames.begin(), outputLayerNames.end(),
                                  std::back_inserter(modelParams.m_OutputBindings));

                        modelParams.m_InputShapes                    = { inputTensorShape };
                        modelParams.m_IsModelBinary                  = true;
                        modelParams.m_ComputeDevices                 = modelOptions.GetComputeDevicesAsBackendIds();
                        modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
                        modelParams.m_EnableFp16TurboMode            = modelOptions.m_EnableFp16TurboMode;

                        return std::make_unique<Model>(modelParams, commonOptions.m_EnableProfiling);
                });
            });
    }
    catch (const std::exception& e)
    {
        std::cerr << "WARNING: " << *argv << ": An error has occurred when running "
                     "the classifier inference tests: " << e.what() << std::endl;
    }
    return retVal;
}