491 ParseArgs progArgs = ParseArgs(argc, argv);
496 if (!progArgs.dynamicBackendPath.empty())
498 std::cout <<
"Loading backends from" << progArgs.dynamicBackendPath <<
"\n";
502 auto runtime = IRuntime::Create(runtimeOptions);
505 ARMNN_LOG(fatal) <<
"Could not create runtime.";
511 auto parser = ITfLiteParser::Create(parserOptions);
514 ARMNN_LOG(info) <<
"Loading backbone...";
516 const DumpToDot dumpToDot = progArgs.dumpToDot;
521 progArgs.prefBackendsBackbone,
524 auto inputId = parser->GetNetworkInputBindingInfo(0,
"inputs");
525 auto bbOut0Id = parser->GetNetworkOutputBindingInfo(0,
"input_to_detector_1");
526 auto bbOut1Id = parser->GetNetworkOutputBindingInfo(0,
"input_to_detector_2");
527 auto bbOut2Id = parser->GetNetworkOutputBindingInfo(0,
"input_to_detector_3");
528 auto backboneProfile = runtime->GetProfiler(backboneId);
529 backboneProfile->EnableProfiling(
true);
533 ARMNN_LOG(info) <<
"Loading detector...";
539 progArgs.prefBackendsDetector,
542 auto detectIn0Id = parser->GetNetworkInputBindingInfo(0,
"input_to_detector_1");
543 auto detectIn1Id = parser->GetNetworkInputBindingInfo(0,
"input_to_detector_2");
544 auto detectIn2Id = parser->GetNetworkInputBindingInfo(0,
"input_to_detector_3");
545 auto outputBoxesId = parser->GetNetworkOutputBindingInfo(0,
"output_boxes");
546 auto detectorProfile = runtime->GetProfiler(detectorId);
549 ARMNN_LOG(info) <<
"Loading test image...";
550 auto image =
LoadImage(progArgs.imageDir.c_str());
553 return LOAD_IMAGE_ERROR;
557 std::vector<float> intermediateMem0(bbOut0Id.second.GetNumElements());
558 std::vector<float> intermediateMem1(bbOut1Id.second.GetNumElements());
559 std::vector<float> intermediateMem2(bbOut2Id.second.GetNumElements());
560 std::vector<float> intermediateMem3(outputBoxesId.second.GetNumElements());
563 using BindingInfos = std::vector<armnn::BindingPointInfo>;
564 using FloatTensors = std::vector<std::reference_wrapper<std::vector<float>>>;
567 FloatTensors{ image });
569 FloatTensors{ intermediateMem0,
575 FloatTensors{ intermediateMem0,
579 FloatTensors{ intermediateMem3 });
581 static const int numIterations=2;
582 using DurationUS = std::chrono::duration<double, std::micro>;
583 std::vector<DurationUS> nmsDurations(0);
584 std::vector<yolov3::Detection> filtered_boxes;
585 nmsDurations.reserve(numIterations);
586 for (
int i=0; i < numIterations; i++)
589 ARMNN_LOG(info) <<
"Running backbone...";
590 runtime->EnqueueWorkload(backboneId, bbInputTensors, bbOutputTensors);
593 ARMNN_LOG(info) <<
"Running detector...";
594 runtime->EnqueueWorkload(detectorId, detectInputTensors, detectOutputTensors);
598 using clock = std::chrono::steady_clock;
599 auto nmsStartTime = clock::now();
605 filtered_boxes =
yolov3::nms(config, intermediateMem3);
606 auto nmsEndTime = clock::now();
613 const auto nmsDuration = DurationUS(nmsStartTime - nmsEndTime);
614 nmsDurations.push_back(nmsDuration);
616 backboneProfile->EnableProfiling(
true);
617 detectorProfile->EnableProfiling(
true);
620 std::ofstream backboneProfileStream(
"backbone.json");
621 backboneProfile->Print(backboneProfileStream);
622 backboneProfileStream.close();
624 std::ofstream detectorProfileStream(
"detector.json");
625 detectorProfile->Print(detectorProfileStream);
626 detectorProfileStream.close();
629 std::ofstream nmsProfileStream(
"nms.json");
630 nmsProfileStream <<
"{" <<
"\n";
631 nmsProfileStream << R
"( "NmsTimings": {)" << "\n";
632 nmsProfileStream << R
"( "raw": [)" << "\n";
634 for (
auto duration : nmsDurations)
638 nmsProfileStream <<
",\n";
641 nmsProfileStream <<
" " << duration.count();
644 nmsProfileStream <<
"\n";
645 nmsProfileStream << R
"( "units": "us")" << "\n";
646 nmsProfileStream <<
" ]" <<
"\n";
647 nmsProfileStream <<
" }" <<
"\n";
648 nmsProfileStream <<
"}" <<
"\n";
649 nmsProfileStream.close();
651 if (progArgs.comparisonFiles.size() > 0)
658 progArgs.comparisonFiles);
void CheckAccuracy(std::vector< float > *toDetector0, std::vector< float > *toDetector1, std::vector< float > *toDetector2, std::vector< float > *detectorOutput, const std::vector< yolov3::Detection > &nmsOut, const std::vector< std::string > &filePaths)
void SetAllLoggingSinks(bool standardOut, bool debugOut, bool coloured)
int LoadModel(const char *filename, ITfLiteParser &parser, IRuntime &runtime, NetworkId &networkId, const std::vector< BackendId > &backendPreferences, ImportMemory enableImport, DumpToDot dumpToDot)
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< std::reference_wrapper< TContainer >> &inputDataContainers)
#define ARMNN_LOG(severity)
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
unsigned int num_boxes
Number of detected boxes.
std::vector< float > LoadImage(const char *filename)
void SetLogFilter(LogSeverity level)
void print_detection(std::ostream &os, const std::vector< Detection > &detections)
Print identified yolo detections.
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
float iou_threshold
Inclusion threshold for Intersection-Over-Union.
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
std::vector< Detection > nms(const NMSConfig &config, const std::vector< float > &detected_boxes)
Perform Non-Maxima Supression on a list of given detections.
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, const std::vector< std::reference_wrapper< TContainer >> &outputDataContainers)
Non Maxima Suprresion configuration meta-data.
float confidence_threshold
Inclusion confidence threshold for a box.
unsigned int num_classes
Number of classes in the detected boxes.