ArmNN
 21.08
ParserFlatbuffersSerializeFixture Struct Reference

#include <ParserFlatbuffersSerializeFixture.hpp>

Public Member Functions

 ParserFlatbuffersSerializeFixture ()
 
void Setup ()
 
void SetupSingleInputSingleOutput (const std::string &inputName, const std::string &outputName)
 
bool ReadStringToBinary ()
 
template<std::size_t NumOutputDimensions, armnn::DataType ArmnnType, typename DataType = armnn::ResolveType<ArmnnType>>
void RunTest (unsigned int layersId, const std::vector< DataType > &inputData, const std::vector< DataType > &expectedOutputData)
 Executes the network with the given input tensor and checks the result against the given output tensor. More...
 
template<std::size_t NumOutputDimensions, armnn::DataType ArmnnInputType, armnn::DataType ArmnnOutputType, typename InputDataType = armnn::ResolveType<ArmnnInputType>, typename OutputDataType = armnn::ResolveType<ArmnnOutputType>>
void RunTest (unsigned int layersId, const std::vector< InputDataType > &inputData, const std::vector< OutputDataType > &expectedOutputData)
 
template<std::size_t NumOutputDimensions, armnn::DataType ArmnnType, typename DataType = armnn::ResolveType<ArmnnType>>
void RunTest (unsigned int layersId, const std::map< std::string, std::vector< DataType >> &inputData, const std::map< std::string, std::vector< DataType >> &expectedOutputData)
 Executes the network with the given input tensors and checks the results against the given output tensors. More...
 
template<std::size_t NumOutputDimensions, armnn::DataType ArmnnInputType, armnn::DataType ArmnnOutputType, typename InputDataType = armnn::ResolveType<ArmnnInputType>, typename OutputDataType = armnn::ResolveType<ArmnnOutputType>>
void RunTest (unsigned int layersId, const std::map< std::string, std::vector< InputDataType >> &inputData, const std::map< std::string, std::vector< OutputDataType >> &expectedOutputData)
 
void CheckTensors (const TensorRawPtr &tensors, size_t shapeSize, const std::vector< int32_t > &shape, armnnSerializer::TensorInfo tensorType, const std::string &name, const float scale, const int64_t zeroPoint)
 

Public Attributes

std::vector< uint8_t > m_GraphBinary
 
std::string m_JsonString
 
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> m_Parser
 
armnn::IRuntimePtr m_Runtime
 
armnn::NetworkId m_NetworkIdentifier
 
std::string m_SingleInputName
 If the single-input-single-output overload of Setup() is called, these will store the input and output name so they don't need to be passed to the single-input-single-output overload of RunTest(). More...
 
std::string m_SingleOutputName
 

Detailed Description

Definition at line 29 of file ParserFlatbuffersSerializeFixture.hpp.

Constructor & Destructor Documentation

◆ ParserFlatbuffersSerializeFixture()

Definition at line 31 of file ParserFlatbuffersSerializeFixture.hpp.

31  :
32  m_Parser(IDeserializer::Create()),
35  {
36  }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:39
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> m_Parser

Member Function Documentation

◆ CheckTensors()

void CheckTensors ( const TensorRawPtr tensors,
size_t  shapeSize,
const std::vector< int32_t > &  shape,
armnnSerializer::TensorInfo  tensorType,
const std::string &  name,
const float  scale,
const int64_t  zeroPoint 
)
inline

Definition at line 153 of file ParserFlatbuffersSerializeFixture.hpp.

References armnn::IgnoreUnused().

156  {
157  armnn::IgnoreUnused(name);
158  CHECK_EQ(shapeSize, tensors->dimensions()->size());
159  CHECK(std::equal(shape.begin(), shape.end(),
160  tensors->dimensions()->begin(), tensors->dimensions()->end()));
161  CHECK_EQ(tensorType.dataType(), tensors->dataType());
162  CHECK_EQ(scale, tensors->quantizationScale());
163  CHECK_EQ(zeroPoint, tensors->quantizationOffset());
164  }
void IgnoreUnused(Ts &&...)

◆ ReadStringToBinary()

bool ReadStringToBinary ( )
inline

Definition at line 91 of file ParserFlatbuffersSerializeFixture.hpp.

References ARMNN_ASSERT_MSG, deserialize_schema_end, deserialize_schema_start, and RunTest().

Referenced by Setup().

92  {
93  std::string schemafile(&deserialize_schema_start, &deserialize_schema_end);
94 
95  // parse schema first, so we can use it to parse the data after
96  flatbuffers::Parser parser;
97 
98  bool ok = parser.Parse(schemafile.c_str());
99  ARMNN_ASSERT_MSG(ok, "Failed to parse schema file");
100 
101  ok &= parser.Parse(m_JsonString.c_str());
102  ARMNN_ASSERT_MSG(ok, "Failed to parse json input");
103 
104  if (!ok)
105  {
106  return false;
107  }
108 
109  {
110  const uint8_t* bufferPtr = parser.builder_.GetBufferPointer();
111  size_t size = static_cast<size_t>(parser.builder_.GetSize());
112  m_GraphBinary.assign(bufferPtr, bufferPtr+size);
113  }
114  return ok;
115  }
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
const char deserialize_schema_start
const char deserialize_schema_end

◆ RunTest() [1/4]

void RunTest ( unsigned int  layersId,
const std::vector< DataType > &  inputData,
const std::vector< DataType > &  expectedOutputData 
)

Executes the network with the given input tensor and checks the result against the given output tensor.

This overload assumes the network has a single input and a single output.

Definition at line 168 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by ReadStringToBinary().

171 {
172  RunTest<NumOutputDimensions, ArmnnType, ArmnnType, DataType, DataType>(layersId, inputData, expectedOutputData);
173 }

◆ RunTest() [2/4]

void RunTest ( unsigned int  layersId,
const std::vector< InputDataType > &  inputData,
const std::vector< OutputDataType > &  expectedOutputData 
)

Definition at line 180 of file ParserFlatbuffersSerializeFixture.hpp.

References m_SingleInputName, and m_SingleOutputName.

183 {
184  RunTest<NumOutputDimensions, ArmnnInputType, ArmnnOutputType>(layersId,
185  { { m_SingleInputName, inputData } },
186  { { m_SingleOutputName, expectedOutputData } });
187 }
std::string m_SingleInputName
If the single-input-single-output overload of Setup() is called, these will store the input and outpu...

◆ RunTest() [3/4]

void RunTest ( unsigned int  layersId,
const std::map< std::string, std::vector< DataType >> &  inputData,
const std::map< std::string, std::vector< DataType >> &  expectedOutputData 
)

Executes the network with the given input tensors and checks the results against the given output tensors.

This overload supports multiple inputs and multiple outputs, identified by name.

Definition at line 190 of file ParserFlatbuffersSerializeFixture.hpp.

193 {
194  RunTest<NumOutputDimensions, ArmnnType, ArmnnType, DataType, DataType>(layersId, inputData, expectedOutputData);
195 }

◆ RunTest() [4/4]

void RunTest ( unsigned int  layersId,
const std::map< std::string, std::vector< InputDataType >> &  inputData,
const std::map< std::string, std::vector< OutputDataType >> &  expectedOutputData 
)

Definition at line 202 of file ParserFlatbuffersSerializeFixture.hpp.

References CompareTensors(), m_NetworkIdentifier, m_Parser, m_Runtime, and armnn::VerifyTensorInfoDataType().

206 {
207  auto ConvertBindingInfo = [](const armnnDeserializer::BindingPointInfo& bindingInfo)
208  {
209  return std::make_pair(bindingInfo.m_BindingId, bindingInfo.m_TensorInfo);
210  };
211 
212  // Setup the armnn input tensors from the given vectors.
213  armnn::InputTensors inputTensors;
214  for (auto&& it : inputData)
215  {
216  armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
217  m_Parser->GetNetworkInputBindingInfo(layersId, it.first));
218  armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnInputType);
219  inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
220  }
221 
222  // Allocate storage for the output tensors to be written to and setup the armnn output tensors.
223  std::map<std::string, std::vector<OutputDataType>> outputStorage;
224  armnn::OutputTensors outputTensors;
225  for (auto&& it : expectedOutputData)
226  {
227  armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
228  m_Parser->GetNetworkOutputBindingInfo(layersId, it.first));
229  armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnOutputType);
230  outputStorage.emplace(it.first, std::vector<OutputDataType>(bindingInfo.second.GetNumElements()));
231  outputTensors.push_back(
232  { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
233  }
234 
235  m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
236 
237  // Compare each output tensor to the expected values
238  for (auto&& it : expectedOutputData)
239  {
240  armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
241  m_Parser->GetNetworkOutputBindingInfo(layersId, it.first));
242  auto outputExpected = it.second;
243  auto result = CompareTensors(outputExpected, outputStorage[it.first],
244  bindingInfo.second.GetShape(), bindingInfo.second.GetShape());
245  CHECK_MESSAGE(result.m_Result, result.m_Message.str());
246  }
247 }
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:360
armnn::PredicateResult CompareTensors(const std::vector< T > &actualData, const std::vector< T > &expectedData, const armnn::TensorShape &actualShape, const armnn::TensorShape &expectedShape, bool compareBoolean=false, bool isDynamic=false)
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> m_Parser
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:319
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:361
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:274
void VerifyTensorInfoDataType(const armnn::TensorInfo &info, armnn::DataType dataType)
Definition: TypesUtils.hpp:322

◆ Setup()

void Setup ( )
inline

Definition at line 49 of file ParserFlatbuffersSerializeFixture.hpp.

References armnn::CpuRef, armnn::Optimize(), ReadStringToBinary(), and armnn::Success.

Referenced by SetupSingleInputSingleOutput(), and TEST_SUITE().

50  {
51  bool ok = ReadStringToBinary();
52  if (!ok)
53  {
54  throw armnn::Exception("LoadNetwork failed while reading binary input");
55  }
56 
57  armnn::INetworkPtr network =
58  m_Parser->CreateNetworkFromBinary(m_GraphBinary);
59 
60  if (!network)
61  {
62  throw armnn::Exception("The parser failed to create an ArmNN network");
63  }
64 
65  auto optimized = Optimize(*network, {armnn::Compute::CpuRef},
66  m_Runtime->GetDeviceSpec());
67 
68  std::string errorMessage;
69  armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
70 
71  if (ret != armnn::Status::Success)
72  {
73  throw armnn::Exception(fmt::format("The runtime failed to load the network. "
74  "Error was: {0}. in {1} [{2}:{3}]",
75  errorMessage,
76  __func__,
77  __FILE__,
78  __LINE__));
79  }
80 
81  }
CPU Execution: Reference C++ kernels.
std::unique_ptr< IDeserializer, void(*)(IDeserializer *parser)> m_Parser
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1613
Status
enumeration
Definition: Types.hpp:29
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:172

◆ SetupSingleInputSingleOutput()

void SetupSingleInputSingleOutput ( const std::string &  inputName,
const std::string &  outputName 
)
inline

Definition at line 83 of file ParserFlatbuffersSerializeFixture.hpp.

References Setup().

Referenced by TEST_SUITE().

84  {
85  // Store the input and output name so they don't need to be passed to the single-input-single-output RunTest().
86  m_SingleInputName = inputName;
87  m_SingleOutputName = outputName;
88  Setup();
89  }
std::string m_SingleInputName
If the single-input-single-output overload of Setup() is called, these will store the input and outpu...

Member Data Documentation

◆ m_GraphBinary

std::vector<uint8_t> m_GraphBinary

Definition at line 38 of file ParserFlatbuffersSerializeFixture.hpp.

◆ m_JsonString

std::string m_JsonString

Definition at line 39 of file ParserFlatbuffersSerializeFixture.hpp.

◆ m_NetworkIdentifier

armnn::NetworkId m_NetworkIdentifier

Definition at line 42 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().

◆ m_Parser

std::unique_ptr<IDeserializer, void (*)(IDeserializer* parser)> m_Parser

Definition at line 40 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().

◆ m_Runtime

armnn::IRuntimePtr m_Runtime

Definition at line 41 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().

◆ m_SingleInputName

std::string m_SingleInputName

If the single-input-single-output overload of Setup() is called, these will store the input and output name so they don't need to be passed to the single-input-single-output overload of RunTest().

Definition at line 46 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().

◆ m_SingleOutputName

std::string m_SingleOutputName

Definition at line 47 of file ParserFlatbuffersSerializeFixture.hpp.

Referenced by RunTest().


The documentation for this struct was generated from the following file: