75 std::string
const filePathString{filePath.string()};
76 std::ofstream file { filePathString };
94 {{
"SaveCachedNetwork",
true}, {
"CachedNetworkFilePath", filePathString}});
96 {{
"SaveCachedNetwork",
false}, {
"CachedNetworkFilePath", filePathString}});
101 *net1, backends, runtime->GetDeviceSpec(), optimizerOptions1);
103 *net2, backends, runtime->GetDeviceSpec(), optimizerOptions2);
108 CHECK(fs::is_empty(filePathString));
115 CHECK(fs::exists(filePathString));
116 std::vector<char> dataSerialized = ReadBinaryFile(filePathString);
117 CHECK(dataSerialized.size() != 0);
124 std::vector<uint8_t> outputData1(5);
125 RunInference(netId1, runtime, outputData1);
127 std::vector<uint8_t> outputData2(5);
128 RunInference(netId2, runtime, outputData2);
131 CHECK(std::equal(outputData1.begin(), outputData1.end(), outputData2.begin(), outputData2.end()));
134 fs::remove(filePath);
ModelOptions m_ModelOptions
static IRuntimePtr Create(const CreationOptions &options)
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
GPU Execution: OpenCL: ArmCompute.
ArmNN performs an optimization on each model/network before it gets loaded for execution.
Struct for the users to pass backend specific options.
fs::path NamedTempFile(const char *fileName)
Returns a path to a file in the system temporary folder. If the file existed it will be deleted...
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr