aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/optimizations/RedirectMembersToConstantInputs.hpp
blob: 85d715c6b1ba63c2db2ca88e7a539aa68363e15c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
//
// Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
// SPDX-License-Identifier: MIT
//

#pragma once

#include "Optimization.hpp"

#include <armnn/utility/IgnoreUnused.hpp>
#include <armnn/utility/PolymorphicDowncast.hpp>

namespace armnn
{
namespace optimizations
{

class RedirectMembersToConstantInputsImpl
{
public:
    /// Search for layers with ConstantLayers as inputs. If the inputs are constant redirect the layers member
    /// variable for ConstTensors (e.g. m_weights) to the data stored in the ConstantLayer it is connected to.
    void Run(Graph& graph, Layer& layer) const
    {
        IgnoreUnused(graph);

        switch (layer.GetType())
        {
            case LayerType::BatchNormalization:
                break;
            case LayerType::Convolution2d:
                break;
            case LayerType::DepthwiseConvolution2d:
                break;
            case LayerType::DetectionPostProcess:
                break;
            case LayerType::FullyConnected:
                RedirectWeightsAndBiases<FullyConnectedLayer>(&layer);
                break;
            case LayerType::Lstm:
                break;
            case LayerType::TransposeConvolution2d:
                break;
            default:
                break;
        }
    }

protected:
    RedirectMembersToConstantInputsImpl()  = default;
    ~RedirectMembersToConstantInputsImpl() = default;

private:
    template <typename LayerT>
    static LayerT* RedirectWeightsAndBiases(Layer* layer)
    {
        LayerT* layerPtr = PolymorphicDowncast<LayerT*>(layer);

        // Loop through input slots to check for constant weights and biases layers.
        // Weights index = 1, Biases index = 2.
        for (unsigned int inputSlotIndex = 1; inputSlotIndex != layerPtr->GetNumInputSlots(); ++inputSlotIndex)
        {
            OutputSlot* outputSlot = layerPtr->GetInputSlot(inputSlotIndex).GetConnectedOutputSlot();
            if (outputSlot->GetOwningLayer().GetType() == LayerType::Constant)
            {
                // Get constant layer and redirect base layer member variables.
                ConstantLayer& constantLayer = dynamic_cast<ConstantLayer&>(outputSlot->GetOwningLayer());
                if (inputSlotIndex == 1)
                {
                    layerPtr->m_Weight = constantLayer.m_LayerOutput;
                }
                else if (inputSlotIndex == 2)
                {
                    layerPtr->m_Bias = constantLayer.m_LayerOutput;
                }
            }
        }

        return layerPtr;
    }
};

using RedirectMembersToConstantInputs = OptimizeForType<FullyConnectedLayer, RedirectMembersToConstantInputsImpl>;

} // namespace optimizations
} // namespace armnn