aboutsummaryrefslogtreecommitdiff
path: root/samples/ObjectDetection/src
diff options
context:
space:
mode:
Diffstat (limited to 'samples/ObjectDetection/src')
-rw-r--r--samples/ObjectDetection/src/Main.cpp15
-rw-r--r--samples/ObjectDetection/src/ObjectDetectionPipeline.cpp26
2 files changed, 26 insertions, 15 deletions
diff --git a/samples/ObjectDetection/src/Main.cpp b/samples/ObjectDetection/src/Main.cpp
index e057981550..8bc2f0de38 100644
--- a/samples/ObjectDetection/src/Main.cpp
+++ b/samples/ObjectDetection/src/Main.cpp
@@ -20,6 +20,7 @@ const std::string MODEL_FILE_PATH = "--model-file-path";
const std::string OUTPUT_VIDEO_FILE_PATH = "--output-video-file-path";
const std::string LABEL_PATH = "--label-path";
const std::string PREFERRED_BACKENDS = "--preferred-backends";
+const std::string PROFILING_ENABLED = "--profiling_enabled";
const std::string HELP = "--help";
/*
@@ -29,13 +30,16 @@ static std::map<std::string, std::string> CMD_OPTIONS = {
{VIDEO_FILE_PATH, "[REQUIRED] Path to the video file to run object detection on"},
{MODEL_FILE_PATH, "[REQUIRED] Path to the Object Detection model to use"},
{LABEL_PATH, "[REQUIRED] Path to the label set for the provided model file. "
- "Label file is should just be an ordered list, seperated by new line."},
+ "Label file should be an ordered list, separated by a new line."},
{MODEL_NAME, "[REQUIRED] The name of the model being used. Accepted options: YOLO_V3_TINY, SSD_MOBILE"},
{OUTPUT_VIDEO_FILE_PATH, "[OPTIONAL] Path to the output video file with detections added in. "
"If specified will save file to disk, else displays the output to screen"},
{PREFERRED_BACKENDS, "[OPTIONAL] Takes the preferred backends in preference order, separated by comma."
" For example: CpuAcc,GpuAcc,CpuRef. Accepted options: [CpuAcc, CpuRef, GpuAcc]."
- " Defaults to CpuAcc,CpuRef"}
+ " Defaults to CpuAcc,CpuRef"},
+ {PROFILING_ENABLED, "[OPTIONAL] Enabling this option will print important ML related milestones timing"
+ "information in micro-seconds. By default, this option is disabled."
+ "Accepted options are true/false."}
};
/*
@@ -137,6 +141,10 @@ int main(int argc, char *argv[])
pipelineOptions.m_ModelFilePath = GetSpecifiedOption(options, MODEL_FILE_PATH);
pipelineOptions.m_ModelName = GetSpecifiedOption(options, MODEL_NAME);
+ if (CheckOptionSpecified(options, PROFILING_ENABLED))
+ {
+ pipelineOptions.m_ProfilingEnabled = GetSpecifiedOption(options, PROFILING_ENABLED) == "true";
+ }
if(CheckOptionSpecified(options, PREFERRED_BACKENDS))
{
pipelineOptions.m_backends = GetPreferredBackendList((GetSpecifiedOption(options, PREFERRED_BACKENDS)));
@@ -148,6 +156,8 @@ int main(int argc, char *argv[])
auto labels = AssignColourToLabel(GetSpecifiedOption(options, LABEL_PATH));
+ common::Profiling profiling(pipelineOptions.m_ProfilingEnabled);
+ profiling.ProfilingStart();
od::IPipelinePtr objectDetectionPipeline = od::CreatePipeline(pipelineOptions);
auto inputAndOutput = GetFrameSourceAndSink(options);
@@ -180,5 +190,6 @@ int main(int argc, char *argv[])
frame = reader->ReadFrame();
}
sink->Close();
+ profiling.ProfilingStopAndPrintUs("Overall compute time");
return 0;
}
diff --git a/samples/ObjectDetection/src/ObjectDetectionPipeline.cpp b/samples/ObjectDetection/src/ObjectDetectionPipeline.cpp
index 077caa40cb..2c4a76d35a 100644
--- a/samples/ObjectDetection/src/ObjectDetectionPipeline.cpp
+++ b/samples/ObjectDetection/src/ObjectDetectionPipeline.cpp
@@ -11,8 +11,8 @@ namespace od
ObjDetectionPipeline::ObjDetectionPipeline(std::unique_ptr<common::ArmnnNetworkExecutor<float>> executor,
std::unique_ptr<IDetectionResultDecoder> decoder) :
- m_executor(std::move(executor)),
- m_decoder(std::move(decoder)){}
+ m_executor(std::move(executor)),
+ m_decoder(std::move(decoder)){}
void od::ObjDetectionPipeline::Inference(const cv::Mat& processed, common::InferenceResults<float>& result)
{
@@ -39,8 +39,8 @@ void ObjDetectionPipeline::PreProcessing(const cv::Mat& frame, cv::Mat& processe
MobileNetSSDv1::MobileNetSSDv1(std::unique_ptr<common::ArmnnNetworkExecutor<float>> executor,
float objectThreshold) :
- ObjDetectionPipeline(std::move(executor),
- std::make_unique<SSDResultDecoder>(objectThreshold))
+ ObjDetectionPipeline(std::move(executor),
+ std::make_unique<SSDResultDecoder>(objectThreshold))
{}
void MobileNetSSDv1::PreProcessing(const cv::Mat& frame, cv::Mat& processed)
@@ -52,13 +52,12 @@ void MobileNetSSDv1::PreProcessing(const cv::Mat& frame, cv::Mat& processed)
processed.convertTo(processed, CV_32FC3, 1 / 127.5, -1);
}
}
-
YoloV3Tiny::YoloV3Tiny(std::unique_ptr<common::ArmnnNetworkExecutor<float>> executor,
float NMSThreshold, float ClsThreshold, float ObjectThreshold) :
- ObjDetectionPipeline(std::move(executor),
- std::move(std::make_unique<YoloResultDecoder>(NMSThreshold,
- ClsThreshold,
- ObjectThreshold)))
+ ObjDetectionPipeline(std::move(executor),
+ std::move(std::make_unique<YoloResultDecoder>(NMSThreshold,
+ ClsThreshold,
+ ObjectThreshold)))
{}
void YoloV3Tiny::PreProcessing(const cv::Mat& frame, cv::Mat& processed)
@@ -72,11 +71,12 @@ void YoloV3Tiny::PreProcessing(const cv::Mat& frame, cv::Mat& processed)
IPipelinePtr CreatePipeline(common::PipelineOptions& config)
{
- auto executor = std::make_unique<common::ArmnnNetworkExecutor<float>>(config.m_ModelFilePath, config.m_backends);
-
+ auto executor = std::make_unique<common::ArmnnNetworkExecutor<float>>(config.m_ModelFilePath,
+ config.m_backends,
+ config.m_ProfilingEnabled);
if (config.m_ModelName == "SSD_MOBILE")
{
- float detectionThreshold = 0.6;
+ float detectionThreshold = 0.5;
return std::make_unique<od::MobileNetSSDv1>(std::move(executor),
detectionThreshold
@@ -99,4 +99,4 @@ IPipelinePtr CreatePipeline(common::PipelineOptions& config)
}
}
-}// namespace od \ No newline at end of file
+}// namespace od