29 return CloneBase<DequantizeLayer>(graph,
GetName());
39 BOOST_ASSERT(inferredShapes.size() == 1);
41 ConditionalThrowIfNotEqual<LayerValidationException>(
42 "DequantizeLayer: TensorShape set on OutputSlot[0] does not match the inferred shape.",
const char * GetName() const override
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
virtual const TensorInfo & GetTensorInfo() const =0
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *LayerCreateWorkload.
float Dequantize(QuantizedType value, float scale, int32_t offset)
void ValidateTensorShapesFromInputs() override
DequantizeLayer * Clone(Graph &graph) const override
void Accept(ILayerVisitor &visitor) const override
virtual void VisitDequantizeLayer(const IConnectableLayer *layer, const char *name=nullptr)=0
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
DequantizeLayer(const char *name)
const TensorShape & GetShape() const
const TensorInfo & GetTensorInfo() const override
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
virtual std::unique_ptr< IWorkload > CreateDequantize(const DequantizeQueueDescriptor &descriptor, const WorkloadInfo &info) const
This layer dequantizes the input tensor.
const InputSlot & GetInputSlot(unsigned int index) const override