aboutsummaryrefslogtreecommitdiff
path: root/shim/sl/canonical
diff options
context:
space:
mode:
Diffstat (limited to 'shim/sl/canonical')
-rw-r--r--shim/sl/canonical/ArmnnDriver.hpp20
-rw-r--r--shim/sl/canonical/ArmnnDriverImpl.cpp22
-rw-r--r--shim/sl/canonical/ArmnnPreparedModel.cpp12
-rw-r--r--shim/sl/canonical/ArmnnPreparedModel.hpp12
4 files changed, 39 insertions, 27 deletions
diff --git a/shim/sl/canonical/ArmnnDriver.hpp b/shim/sl/canonical/ArmnnDriver.hpp
index 6c8acd60c5..c42ffa20b5 100644
--- a/shim/sl/canonical/ArmnnDriver.hpp
+++ b/shim/sl/canonical/ArmnnDriver.hpp
@@ -22,7 +22,6 @@
#include "ModelToINetworkTransformer.hpp"
#include <log/log.h>
-
namespace armnn_driver
{
@@ -67,8 +66,7 @@ public:
Version getFeatureLevel() const override
{
VLOG(DRIVER) << "ArmnnDriver::getFeatureLevel()";
- // return kVersionFeatureLevel5;
- return Version::ANDROID_S;
+ return kVersionFeatureLevel5;
}
DeviceType getType() const override
@@ -175,12 +173,14 @@ public:
}
GeneralResult<SharedPreparedModel> prepareModel(const Model& model,
- ExecutionPreference preference,
- Priority priority,
- OptionalTimePoint deadline,
- const std::vector<SharedHandle>& modelCache,
- const std::vector<SharedHandle>& dataCache,
- const CacheToken& token) const override
+ ExecutionPreference preference,
+ Priority priority,
+ OptionalTimePoint deadline,
+ const std::vector<SharedHandle>& modelCache,
+ const std::vector<SharedHandle>& dataCache,
+ const CacheToken& token,
+ const std::vector<android::nn::TokenValuePair>& hints,
+ const std::vector<android::nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override
{
VLOG(DRIVER) << "ArmnnDriver::prepareModel()";
@@ -240,7 +240,7 @@ public:
const std::vector<BufferRole>&) const override
{
VLOG(DRIVER) << "ArmnnDriver::allocate()";
- return NN_ERROR(ErrorStatus::GENERAL_FAILURE) << "ArmnnDriver::allocate -- does not support allocate.";
+ return NN_ERROR(ErrorStatus::INVALID_ARGUMENT) << "ArmnnDriver::allocate -- does not support allocate.";
}
};
diff --git a/shim/sl/canonical/ArmnnDriverImpl.cpp b/shim/sl/canonical/ArmnnDriverImpl.cpp
index d3b4f23e12..3223d9e8bf 100644
--- a/shim/sl/canonical/ArmnnDriverImpl.cpp
+++ b/shim/sl/canonical/ArmnnDriverImpl.cpp
@@ -73,12 +73,12 @@ bool ArmnnDriverImpl::ValidateSharedHandle(const SharedHandle& sharedHandle)
{
bool valid = true;
- if (sharedHandle->fds[0] < 0)
+ if (*sharedHandle < 0)
{
return !valid;
}
- int dataCacheFileAccessMode = fcntl(sharedHandle->fds[0], F_GETFL) & O_ACCMODE;
+ int dataCacheFileAccessMode = fcntl(*sharedHandle, F_GETFL) & O_ACCMODE;
if (dataCacheFileAccessMode != O_RDWR)
{
return !valid;
@@ -102,7 +102,7 @@ bool ArmnnDriverImpl::ValidateDataCacheHandle(const std::vector<SharedHandle>& d
}
struct stat statBuffer;
- if (fstat(dataCacheHandle[0]->fds[0], &statBuffer) == 0)
+ if (fstat(*dataCacheHandle[0], &statBuffer) == 0)
{
unsigned long bufferSize = statBuffer.st_size;
if (bufferSize != dataSize)
@@ -189,7 +189,7 @@ GeneralResult<SharedPreparedModel> ArmnnDriverImpl::PrepareArmnnModel(
// For GpuAcc numberOfCachedFiles is 1
if (backend == armnn::Compute::GpuAcc)
{
- cachedFd = modelCacheHandle[index]->fds[0];
+ cachedFd = *modelCacheHandle[index];
saveCachedNetwork = true;
}
index += numberOfCachedModelFiles;
@@ -279,7 +279,7 @@ GeneralResult<SharedPreparedModel> ArmnnDriverImpl::PrepareArmnnModel(
size_t hashValue = 0;
if (dataCacheHandle.size() == 1 )
{
- write(dataCacheHandle[0]->fds[0], dataCacheData.data(), dataCacheData.size());
+ write(*dataCacheHandle[0], dataCacheData.data(), dataCacheData.size());
hashValue = CacheDataHandlerInstance().Hash(dataCacheData);
}
@@ -290,17 +290,17 @@ GeneralResult<SharedPreparedModel> ArmnnDriverImpl::PrepareArmnnModel(
{
for (uint32_t i = 0; i < modelCacheHandle.size(); ++i)
{
- int modelCacheFileAccessMode = fcntl(modelCacheHandle[i]->fds[0], F_GETFL) & O_ACCMODE;
+ int modelCacheFileAccessMode = fcntl(*modelCacheHandle[i], F_GETFL) & O_ACCMODE;
if (modelCacheFileAccessMode != O_RDONLY)
{
struct stat statBuffer;
- if (fstat(modelCacheHandle[i]->fds[0], &statBuffer) == 0)
+ if (fstat(*modelCacheHandle[i], &statBuffer) == 0)
{
long modelDataSize = statBuffer.st_size;
if (modelDataSize > 0)
{
std::vector<uint8_t> modelData(modelDataSize);
- pread(modelCacheHandle[i]->fds[0], modelData.data(), modelData.size(), 0);
+ pread(*modelCacheHandle[i], modelData.data(), modelData.size(), 0);
hashValue ^= CacheDataHandlerInstance().Hash(modelData);
}
}
@@ -403,7 +403,7 @@ GeneralResult<SharedPreparedModel> ArmnnDriverImpl::PrepareArmnnModelFromCache(
// Read the model
std::vector<uint8_t> dataCacheData(dataSize);
- pread(dataCacheHandle[0]->fds[0], dataCacheData.data(), dataCacheData.size(), 0);
+ pread(*dataCacheHandle[0], dataCacheData.data(), dataCacheData.size(), 0);
auto hashValue = CacheDataHandlerInstance().Hash(dataCacheData);
int gpuAccCachedFd = -1;
@@ -422,7 +422,7 @@ GeneralResult<SharedPreparedModel> ArmnnDriverImpl::PrepareArmnnModelFromCache(
return NN_ERROR(ErrorStatus::GENERAL_FAILURE)
<< "ArmnnDriverImpl::prepareModelFromCache(): Invalid model cache handle!";
}
- int cachedFd = modelCacheHandle[index]->fds[0];
+ int cachedFd = *modelCacheHandle[index];
struct stat statBuffer;
if (fstat(cachedFd, &statBuffer) == 0)
{
@@ -558,4 +558,4 @@ void ArmnnDriverImpl::ClearNetworks()
m_NetworkIDs.clear();
}
-} // namespace armnn_driver \ No newline at end of file
+} // namespace armnn_driver
diff --git a/shim/sl/canonical/ArmnnPreparedModel.cpp b/shim/sl/canonical/ArmnnPreparedModel.cpp
index 35edfb7d99..927d5bfad6 100644
--- a/shim/sl/canonical/ArmnnPreparedModel.cpp
+++ b/shim/sl/canonical/ArmnnPreparedModel.cpp
@@ -275,7 +275,9 @@ ExecutionResult<std::pair<std::vector<OutputShape>, Timing>> ArmnnPreparedModel:
const Request& request,
MeasureTiming measureTiming,
const OptionalTimePoint& deadline,
- const OptionalDuration&) const
+ const OptionalDuration&,
+ const std::vector<android::nn::TokenValuePair>& hints,
+ const std::vector<android::nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const
{
VLOG(DRIVER) << "CanonicalDriver::PreparedModel::execute()";
@@ -443,7 +445,9 @@ GeneralResult<std::pair<SyncFence, ExecuteFencedInfoCallback>> ArmnnPreparedMode
MeasureTiming measureTiming,
const OptionalTimePoint& deadline,
const OptionalDuration&,
- const OptionalDuration&) const
+ const OptionalDuration&,
+ const std::vector<android::nn::TokenValuePair>& hints,
+ const std::vector<android::nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const
{
VLOG(DRIVER) << "ArmnnPreparedModel::executeFenced()";
@@ -562,7 +566,9 @@ GeneralResult<std::pair<SyncFence, ExecuteFencedInfoCallback>> ArmnnPreparedMode
GeneralResult<SharedExecution> ArmnnPreparedModel::createReusableExecution(
const Request& request,
MeasureTiming measureTiming,
- const OptionalDuration& loopTimeoutDuration) const
+ const OptionalDuration& loopTimeoutDuration,
+ const std::vector<android::nn::TokenValuePair>& hints,
+ const std::vector<android::nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const
{
VLOG(DRIVER) << "ArmnnPreparedModel::createReusableExecution()";
return std::make_shared<DefaultExecution>(shared_from_this(),
diff --git a/shim/sl/canonical/ArmnnPreparedModel.hpp b/shim/sl/canonical/ArmnnPreparedModel.hpp
index 4f0f456312..920c7fb314 100644
--- a/shim/sl/canonical/ArmnnPreparedModel.hpp
+++ b/shim/sl/canonical/ArmnnPreparedModel.hpp
@@ -59,7 +59,9 @@ public:
const Request& request,
MeasureTiming measureTiming,
const OptionalTimePoint& deadline,
- const OptionalDuration& loopTimeoutDuration) const override;
+ const OptionalDuration& loopTimeoutDuration,
+ const std::vector<android::nn::TokenValuePair>& hints,
+ const std::vector<android::nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
GeneralResult<std::pair<SyncFence, ExecuteFencedInfoCallback>> executeFenced(
const Request& request,
@@ -67,12 +69,16 @@ public:
MeasureTiming measureTiming,
const OptionalTimePoint& deadline,
const OptionalDuration& loopTimeoutDuration,
- const OptionalDuration& timeoutDurationAfterFence) const override;
+ const OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<android::nn::TokenValuePair>& hints,
+ const std::vector<android::nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
GeneralResult<android::nn::SharedExecution> createReusableExecution(
const Request& request,
MeasureTiming measureTiming,
- const OptionalDuration& loopTimeoutDuration) const override;
+ const OptionalDuration& loopTimeoutDuration,
+ const std::vector<android::nn::TokenValuePair>& hints,
+ const std::vector<android::nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
GeneralResult<SharedBurst> configureExecutionBurst() const override;