ArmNN
 21.02
EndToEndTest.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <armnn/Descriptors.hpp>
7 #include <armnn/IRuntime.hpp>
8 #include <armnn/INetwork.hpp>
10 
11 #include <boost/test/unit_test.hpp>
12 
13 #include <set>
14 
15 BOOST_AUTO_TEST_SUITE(EndToEnd)
16 
17 BOOST_AUTO_TEST_CASE(ErrorOnLoadNetwork)
18 {
19  using namespace armnn;
20 
21  // Create runtime in which test will run
22  // Note we don't allow falling back to CpuRef if an operation (excluding inputs, outputs, etc.) isn't supported
24  IRuntimePtr runtime(IRuntime::Create(options));
25 
26  // build up the structure of the network
28 
29  IConnectableLayer* input = net->AddInputLayer(0);
30 
31  // This layer configuration isn't supported by CpuAcc and isn't allowed to fall back, so Optimize will return null.
32  NormalizationDescriptor descriptor;
33  IConnectableLayer* pooling = net->AddNormalizationLayer(descriptor);
34 
35  IConnectableLayer* output = net->AddOutputLayer(0);
36 
37  input->GetOutputSlot(0).Connect(pooling->GetInputSlot(0));
38  pooling->GetOutputSlot(0).Connect(output->GetInputSlot(0));
39 
40  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
41  pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
42 
43  // optimize the network
44  std::vector<BackendId> backends = {Compute::CpuAcc};
45  std::vector<std::string> errMessages;
46 
47  try
48  {
49  Optimize(*net, backends, runtime->GetDeviceSpec(), OptimizerOptions(), errMessages);
50  BOOST_FAIL("Should have thrown an exception.");
51  }
52  catch (const InvalidArgumentException& e)
53  {
54  // Different exceptions are thrown on different backends
55  }
56  BOOST_CHECK(errMessages.size() > 0);
57 }
58 
BOOST_AUTO_TEST_SUITE(TensorflowLiteParser)
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:37
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:62
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:26
BOOST_AUTO_TEST_CASE(ErrorOnLoadNetwork)
Copyright (c) 2021 ARM Limited and Contributors.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1502
BOOST_AUTO_TEST_SUITE_END()
CPU Execution: NEON: ArmCompute.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:173
virtual int Connect(IInputSlot &destination)=0
A NormalizationDescriptor for the NormalizationLayer.
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:510