aboutsummaryrefslogtreecommitdiff
path: root/python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i
diff options
context:
space:
mode:
Diffstat (limited to 'python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i')
-rw-r--r--python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i7
1 files changed, 2 insertions, 5 deletions
diff --git a/python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i b/python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i
index 55b6795c90..a2f57a3aa9 100644
--- a/python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i
+++ b/python/pyarmnn/src/pyarmnn/swig/modules/armnn_network.i
@@ -29,7 +29,7 @@ Contains:
that can not be reduced will be left in Fp32.
m_ReduceFp32ToFp16 (bool): Reduces Fp32 network to Fp16 for faster processing. Layers
that can not be reduced will be left in Fp32.
- m_ImportEnabled (bool): Enable memory import of inport tensors.
+ m_ImportEnabled (bool): Enable memory import.
m_shapeInferenceMethod: The ShapeInferenceMethod modifies how the output shapes are treated.
When ValidateOnly is selected, the output shapes are inferred from the input parameters
of the layer and any mismatch is reported.
@@ -38,7 +38,6 @@ Contains:
with tensors which rank or dimension sizes are not specified explicitly, however this
information can be calculated from the inputs.
m_ModelOptions: List of backends optimisation options.
- m_ExportEnabled (bool): Enable memory export of output tensors.
") OptimizerOptions;
@@ -52,8 +51,7 @@ struct OptimizerOptions
bool reduceFp32ToBf16 = false,
ShapeInferenceMethod shapeInferenceMethod = armnn::ShapeInferenceMethod::ValidateOnly,
bool importEnabled = false,
- std::vector<armnn::BackendOptions> modelOptions = {},
- bool exportEnabled = false);
+ std::vector<armnn::BackendOptions> modelOptions = {});
bool m_ReduceFp32ToBf16;
bool m_ReduceFp32ToFp16;
@@ -61,7 +59,6 @@ struct OptimizerOptions
ShapeInferenceMethod m_shapeInferenceMethod;
bool m_ImportEnabled;
std::vector<armnn::BackendOptions> m_ModelOptions;
- bool m_ExportEnabled;
};
%model_options_clear;