aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/layers/LayerWithParameters.hpp
blob: 40ade95d5c069b2e98c7064f7951266f1859ebd4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
//
// Copyright © 2017 Arm Ltd. All rights reserved.
// SPDX-License-Identifier: MIT
//
#pragma once

#include "ConstantLayer.hpp"
#include <Layer.hpp>

namespace armnn
{

template <typename Parameters>
class LayerWithParameters : public Layer
{
public:
    using DescriptorType = Parameters;

    const Parameters& GetParameters() const override { return m_Param; }

    /// Helper to serialize the layer parameters to string
    /// (currently used in DotSerializer and company).
    void SerializeLayerParameters(ParameterStringifyFunction& fn) const override
    {
        StringifyLayerParameters<Parameters>::Serialize(fn, m_Param);
        Layer::SerializeLayerParameters(fn);
    }

protected:
    LayerWithParameters(unsigned int numInputSlots,
                        unsigned int numOutputSlots,
                        LayerType type,
                        const Parameters& param,
                        const char* name)
        : Layer(numInputSlots, numOutputSlots, type, name)
        , m_Param(param)
    {
    }

    ~LayerWithParameters() = default;

    /// Helper function to reduce duplication in *Layer::CreateWorkload.
    template <typename QueueDescriptor>
    WorkloadInfo PrepInfoAndDesc(QueueDescriptor& descriptor) const
    {
        descriptor.m_Parameters = m_Param;
        descriptor.m_AllowExpandedDims = GetAllowExpandedDims();
        return Layer::PrepInfoAndDesc(descriptor);
    }

    /// The parameters for the layer (not including tensor-valued weights etc.).
    Parameters m_Param;

    void ExecuteStrategy(IStrategy& strategy) const override
    {
        strategy.ExecuteStrategy(this, GetParameters(), {}, GetName());
    }

    Layer::ConstantTensors GetConnectedConstantAsInputTensors()
    {
        Layer::ConstantTensors tensors;
        for (unsigned int i = 0; i < GetNumInputSlots(); ++i)
        {
            if (GetInputSlot(i).GetConnection() && GetInputSlot(i).GetConnection()->GetTensorInfo().IsConstant())
            {
                auto &inputLayer = GetInputSlot(i).GetConnectedOutputSlot()->GetOwningLayer();
                if (inputLayer.GetType() == armnn::LayerType::Constant)
                {
                    auto &constantLayer = static_cast<ConstantLayer&>(inputLayer);

                    tensors.push_back(constantLayer.m_LayerOutput);
                }
            }
        }
        if (tensors.empty())
        {
            const std::string warningMessage{"GetConnectedConstantAsInputTensors() called on Layer with no "
                                             "connected Constants as Input Tensors."};
            ARMNN_LOG(warning) << warningMessage;
        }
        return tensors;
    }
};




} // namespace