ArmNN
 21.02
ParserFlatbuffersSerializeFixture Struct Reference

#include <ParserFlatbuffersSerializeFixture.hpp>

Inheritance diagram for ParserFlatbuffersSerializeFixture:
PositiveActivationFixture

Public Member Functions

 ParserFlatbuffersSerializeFixture ()
 
void Setup ()
 
void SetupSingleInputSingleOutput (const std::string &inputName, const std::string &outputName)
 
bool ReadStringToBinary ()
 
template<std::size_t NumOutputDimensions, armnn::DataType ArmnnType, typename DataType = armnn::ResolveType<ArmnnType>>
void RunTest (unsigned int layersId, const std::vector< DataType > &inputData, const std::vector< DataType > &expectedOutputData)
 Executes the network with the given input tensor and checks the result against the given output tensor. More...
 
template<std::size_t NumOutputDimensions, armnn::DataType ArmnnInputType, armnn::DataType ArmnnOutputType, typename InputDataType = armnn::ResolveType<ArmnnInputType>, typename OutputDataType = armnn::ResolveType<ArmnnOutputType>>
void RunTest (unsigned int layersId, const std::vector< InputDataType > &inputData, const std::vector< OutputDataType > &expectedOutputData)
 
template<std::size_t NumOutputDimensions, armnn::DataType ArmnnType, typename DataType = armnn::ResolveType<ArmnnType>>
void RunTest (unsigned int layersId, const std::map< std::string, std::vector< DataType >> &inputData, const std::map< std::string, std::vector< DataType >> &expectedOutputData)
 Executes the network with the given input tensors and checks the results against the given output tensors. More...
 
template<std::size_t NumOutputDimensions, armnn::DataType ArmnnInputType, armnn::DataType ArmnnOutputType, typename InputDataType = armnn::ResolveType<ArmnnInputType>, typename OutputDataType = armnn::ResolveType<ArmnnOutputType>>
void RunTest (unsigned int layersId, const std::map< std::string, std::vector< InputDataType >> &inputData, const std::map< std::string, std::vector< OutputDataType >> &expectedOutputData)
 
void CheckTensors (const TensorRawPtr &tensors, size_t shapeSize, const std::vector< int32_t > &shape, armnnSerializer::TensorInfo tensorType, const std::string &name, const float scale, const int64_t zeroPoint)
 

Public Attributes

std::vector< uint8_t > m_GraphBinary
 
std::string m_JsonString
 
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> m_Parser
 
armnn::IRuntimePtr m_Runtime
 
armnn::NetworkId m_NetworkIdentifier
 
std::string m_SingleInputName
 If the single-input-single-output overload of Setup() is called, these will store the input and output name so they don't need to be passed to the single-input-single-output overload of RunTest(). More...
 
std::string m_SingleOutputName
 

Detailed Description

Definition at line 27 of file ParserFlatbuffersSerializeFixture.hpp.

Constructor & Destructor Documentation

◆ ParserFlatbuffersSerializeFixture()

Definition at line 29 of file ParserFlatbuffersSerializeFixture.hpp.

29  :
30  m_Parser(IDeserializer::Create()),
33  {
34  }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:37
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> m_Parser

Member Function Documentation

◆ CheckTensors()

void CheckTensors ( const TensorRawPtr tensors,
size_t  shapeSize,
const std::vector< int32_t > &  shape,
armnnSerializer::TensorInfo  tensorType,
const std::string &  name,
const float  scale,
const int64_t  zeroPoint 
)
inline

Definition at line 151 of file ParserFlatbuffersSerializeFixture.hpp.

References armnn::IgnoreUnused().

154  {
155  armnn::IgnoreUnused(name);
156  BOOST_CHECK_EQUAL(shapeSize, tensors->dimensions()->size());
157  BOOST_CHECK_EQUAL_COLLECTIONS(shape.begin(), shape.end(),
158  tensors->dimensions()->begin(), tensors->dimensions()->end());
159  BOOST_CHECK_EQUAL(tensorType.dataType(), tensors->dataType());
160  BOOST_CHECK_EQUAL(scale, tensors->quantizationScale());
161  BOOST_CHECK_EQUAL(zeroPoint, tensors->quantizationOffset());
162  }
void IgnoreUnused(Ts &&...)

◆ ReadStringToBinary()

bool ReadStringToBinary ( )
inline

Definition at line 89 of file ParserFlatbuffersSerializeFixture.hpp.

References ARMNN_ASSERT_MSG, deserialize_schema_end, deserialize_schema_start, and RunTest().

Referenced by Setup().

90  {
91  std::string schemafile(&deserialize_schema_start, &deserialize_schema_end);
92 
93  // parse schema first, so we can use it to parse the data after
94  flatbuffers::Parser parser;
95 
96  bool ok = parser.Parse(schemafile.c_str());
97  ARMNN_ASSERT_MSG(ok, "Failed to parse schema file");
98 
99  ok &= parser.Parse(m_JsonString.c_str());
100  ARMNN_ASSERT_MSG(ok, "Failed to parse json input");
101 
102  if (!ok)
103  {
104  return false;
105  }
106 
107  {
108  const uint8_t* bufferPtr = parser.builder_.GetBufferPointer();
109  size_t size = static_cast<size_t>(parser.builder_.GetSize());
110  m_GraphBinary.assign(bufferPtr, bufferPtr+size);
111  }
112  return ok;
113  }
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
const char deserialize_schema_start
const char deserialize_schema_end

◆ RunTest() [1/4]

void RunTest ( unsigned int  layersId,
const std::vector< DataType > &  inputData,
const std::vector< DataType > &  expectedOutputData 
)

Executes the network with the given input tensor and checks the result against the given output tensor.

This overload assumes the network has a single input and a single output.

Definition at line 166 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by ReadStringToBinary().

169 {
170  RunTest<NumOutputDimensions, ArmnnType, ArmnnType, DataType, DataType>(layersId, inputData, expectedOutputData);
171 }

◆ RunTest() [2/4]

void RunTest ( unsigned int  layersId,
const std::vector< InputDataType > &  inputData,
const std::vector< OutputDataType > &  expectedOutputData 
)

Definition at line 178 of file ParserFlatbuffersSerializeFixture.hpp.

References m_SingleInputName, and m_SingleOutputName.

181 {
182  RunTest<NumOutputDimensions, ArmnnInputType, ArmnnOutputType>(layersId,
183  { { m_SingleInputName, inputData } },
184  { { m_SingleOutputName, expectedOutputData } });
185 }
std::string m_SingleInputName
If the single-input-single-output overload of Setup() is called, these will store the input and outpu...

◆ RunTest() [3/4]

void RunTest ( unsigned int  layersId,
const std::map< std::string, std::vector< DataType >> &  inputData,
const std::map< std::string, std::vector< DataType >> &  expectedOutputData 
)

Executes the network with the given input tensors and checks the results against the given output tensors.

This overload supports multiple inputs and multiple outputs, identified by name.

Definition at line 188 of file ParserFlatbuffersSerializeFixture.hpp.

191 {
192  RunTest<NumOutputDimensions, ArmnnType, ArmnnType, DataType, DataType>(layersId, inputData, expectedOutputData);
193 }

◆ RunTest() [4/4]

void RunTest ( unsigned int  layersId,
const std::map< std::string, std::vector< InputDataType >> &  inputData,
const std::map< std::string, std::vector< OutputDataType >> &  expectedOutputData 
)

Definition at line 200 of file ParserFlatbuffersSerializeFixture.hpp.

References CompareTensors(), m_NetworkIdentifier, m_Parser, m_Runtime, and armnn::VerifyTensorInfoDataType().

204 {
205  auto ConvertBindingInfo = [](const armnnDeserializer::BindingPointInfo& bindingInfo)
206  {
207  return std::make_pair(bindingInfo.m_BindingId, bindingInfo.m_TensorInfo);
208  };
209 
210  // Setup the armnn input tensors from the given vectors.
211  armnn::InputTensors inputTensors;
212  for (auto&& it : inputData)
213  {
214  armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
215  m_Parser->GetNetworkInputBindingInfo(layersId, it.first));
216  armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnInputType);
217  inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
218  }
219 
220  // Allocate storage for the output tensors to be written to and setup the armnn output tensors.
221  std::map<std::string, boost::multi_array<OutputDataType, NumOutputDimensions>> outputStorage;
222  armnn::OutputTensors outputTensors;
223  for (auto&& it : expectedOutputData)
224  {
225  armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
226  m_Parser->GetNetworkOutputBindingInfo(layersId, it.first));
227  armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnOutputType);
228  outputStorage.emplace(it.first, MakeTensor<OutputDataType, NumOutputDimensions>(bindingInfo.second));
229  outputTensors.push_back(
230  { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
231  }
232 
233  m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
234 
235  // Compare each output tensor to the expected values
236  for (auto&& it : expectedOutputData)
237  {
238  armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
239  m_Parser->GetNetworkOutputBindingInfo(layersId, it.first));
240  auto outputExpected = MakeTensor<OutputDataType, NumOutputDimensions>(bindingInfo.second, it.second);
241  BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first]));
242  }
243 }
boost::test_tools::predicate_result CompareTensors(const boost::multi_array< T, n > &a, const boost::multi_array< T, n > &b, bool compareBoolean=false, bool isDynamic=false)
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:340
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> m_Parser
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:306
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:314
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:341
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:261
void VerifyTensorInfoDataType(const armnn::TensorInfo &info, armnn::DataType dataType)
Definition: TypesUtils.hpp:309

◆ Setup()

void Setup ( )
inline

Definition at line 47 of file ParserFlatbuffersSerializeFixture.hpp.

References armnn::CpuRef, armnn::Optimize(), ReadStringToBinary(), and armnn::Success.

Referenced by SetupSingleInputSingleOutput().

48  {
49  bool ok = ReadStringToBinary();
50  if (!ok)
51  {
52  throw armnn::Exception("LoadNetwork failed while reading binary input");
53  }
54 
55  armnn::INetworkPtr network =
56  m_Parser->CreateNetworkFromBinary(m_GraphBinary);
57 
58  if (!network)
59  {
60  throw armnn::Exception("The parser failed to create an ArmNN network");
61  }
62 
63  auto optimized = Optimize(*network, {armnn::Compute::CpuRef},
64  m_Runtime->GetDeviceSpec());
65 
66  std::string errorMessage;
67  armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
68 
69  if (ret != armnn::Status::Success)
70  {
71  throw armnn::Exception(fmt::format("The runtime failed to load the network. "
72  "Error was: {0}. in {1} [{2}:{3}]",
73  errorMessage,
74  __func__,
75  __FILE__,
76  __LINE__));
77  }
78 
79  }
CPU Execution: Reference C++ kernels.
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> m_Parser
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1502
Status
enumeration
Definition: Types.hpp:26
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:173

◆ SetupSingleInputSingleOutput()

void SetupSingleInputSingleOutput ( const std::string &  inputName,
const std::string &  outputName 
)
inline

Definition at line 81 of file ParserFlatbuffersSerializeFixture.hpp.

References Setup().

82  {
83  // Store the input and output name so they don't need to be passed to the single-input-single-output RunTest().
84  m_SingleInputName = inputName;
85  m_SingleOutputName = outputName;
86  Setup();
87  }
std::string m_SingleInputName
If the single-input-single-output overload of Setup() is called, these will store the input and outpu...

Member Data Documentation

◆ m_GraphBinary

std::vector<uint8_t> m_GraphBinary

Definition at line 36 of file ParserFlatbuffersSerializeFixture.hpp.

◆ m_JsonString

std::string m_JsonString

Definition at line 37 of file ParserFlatbuffersSerializeFixture.hpp.

◆ m_NetworkIdentifier

armnn::NetworkId m_NetworkIdentifier

Definition at line 40 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().

◆ m_Parser

std::unique_ptr<IDeserializer, void (*)(IDeserializer* parser)> m_Parser

Definition at line 38 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().

◆ m_Runtime

armnn::IRuntimePtr m_Runtime

Definition at line 39 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().

◆ m_SingleInputName

std::string m_SingleInputName

If the single-input-single-output overload of Setup() is called, these will store the input and output name so they don't need to be passed to the single-input-single-output overload of RunTest().

Definition at line 44 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().

◆ m_SingleOutputName

std::string m_SingleOutputName

Definition at line 45 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().


The documentation for this struct was generated from the following file: