ArmNN
 20.05
EndToEndTest.cpp File Reference
#include <armnn/Descriptors.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/INetwork.hpp>
#include <armnn/utility/IgnoreUnused.hpp>
#include <boost/test/unit_test.hpp>
#include <set>

Go to the source code of this file.

Functions

 BOOST_AUTO_TEST_CASE (ErrorOnLoadNetwork)
 

Function Documentation

◆ BOOST_AUTO_TEST_CASE()

BOOST_AUTO_TEST_CASE ( ErrorOnLoadNetwork  )

Definition at line 17 of file EndToEndTest.cpp.

References BOOST_AUTO_TEST_SUITE_END(), BOOST_CHECK(), IOutputSlot::Connect(), armnn::CpuAcc, INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, and IOutputSlot::SetTensorInfo().

18 {
19  using namespace armnn;
20 
21  // Create runtime in which test will run
22  // Note we don't allow falling back to CpuRef if an operation (excluding inputs, outputs, etc.) isn't supported
24  IRuntimePtr runtime(IRuntime::Create(options));
25 
26  // build up the structure of the network
28 
29  IConnectableLayer* input = net->AddInputLayer(0);
30 
31  // This layer configuration isn't supported by CpuAcc and isn't allowed to fall back, so Optimize will return null.
32  NormalizationDescriptor descriptor;
33  IConnectableLayer* pooling = net->AddNormalizationLayer(descriptor);
34 
35  IConnectableLayer* output = net->AddOutputLayer(0);
36 
37  input->GetOutputSlot(0).Connect(pooling->GetInputSlot(0));
38  pooling->GetOutputSlot(0).Connect(output->GetInputSlot(0));
39 
40  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
41  pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
42 
43  // optimize the network
44  std::vector<BackendId> backends = {Compute::CpuAcc};
45  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
46  BOOST_CHECK(!optNet);
47 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:31
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
Copyright (c) 2020 ARM Limited.
BOOST_CHECK(profilingService.GetCurrentState()==ProfilingState::WaitingForAck)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:573
CPU Execution: NEON: ArmCompute.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A NormalizationDescriptor for the NormalizationLayer.
static INetworkPtr Create()
Definition: Network.cpp:50