#include <OpenVINOEngine.hpp>
|
void | run () override |
|
void | load () override |
|
ImageOrdering | getPreferredImageOrdering () const override |
|
std::string | getName () const override |
|
std::vector< ModelFormat > | getSupportedModelFormats () const |
|
ModelFormat | getPreferredModelFormat () const |
|
std::vector< InferenceDeviceInfo > | getDeviceList () |
|
| ~OpenVINOEngine () |
|
virtual void | setFilename (std::string filename) |
|
virtual void | setModelAndWeights (std::vector< uint8_t > model, std::vector< uint8_t > weights) |
|
virtual std::string | getFilename () const |
|
virtual void | addInputNode (uint portID, std::string name, NodeType type=NodeType::IMAGE, TensorShape shape={}) |
|
virtual void | addOutputNode (uint portID, std::string name, NodeType type=NodeType::IMAGE, TensorShape shape={}) |
|
virtual void | setInputNodeShape (std::string name, TensorShape shape) |
|
virtual void | setOutputNodeShape (std::string name, TensorShape shape) |
|
virtual NetworkNode | getInputNode (std::string name) const |
|
virtual NetworkNode | getOutputNode (std::string name) const |
|
virtual std::unordered_map< std::string, NetworkNode > | getOutputNodes () const |
|
virtual std::unordered_map< std::string, NetworkNode > | getInputNodes () const |
|
virtual void | setInputData (std::string inputNodeName, std::shared_ptr< Tensor > tensor) |
|
virtual std::shared_ptr< Tensor > | getOutputData (std::string inputNodeName) |
|
virtual bool | isLoaded () const |
|
virtual bool | isModelFormatSupported (ModelFormat format) |
|
virtual void | setDeviceType (InferenceDeviceType type) |
|
virtual void | setDevice (int index=-1, InferenceDeviceType type=InferenceDeviceType::ANY) |
|
virtual int | getMaxBatchSize () |
|
virtual void | setMaxBatchSize (int size) |
|
| Object () |
|
virtual | ~Object () |
|
Reporter & | getReporter () |
|
◆ ~OpenVINOEngine()
fast::OpenVINOEngine::~OpenVINOEngine |
( |
| ) |
|
◆ getDeviceList()
Get a list of devices available for this inference engine.
- Returns
- vector with info on each device
Reimplemented from fast::InferenceEngine.
◆ getName()
std::string fast::OpenVINOEngine::getName |
( |
| ) |
const |
|
overridevirtual |
◆ getPreferredImageOrdering()
ImageOrdering fast::OpenVINOEngine::getPreferredImageOrdering |
( |
| ) |
const |
|
overridevirtual |
◆ getPreferredModelFormat()
ModelFormat fast::OpenVINOEngine::getPreferredModelFormat |
( |
| ) |
const |
|
inlinevirtual |
◆ getSupportedModelFormats()
std::vector<ModelFormat> fast::OpenVINOEngine::getSupportedModelFormats |
( |
| ) |
const |
|
inlinevirtual |
◆ load()
void fast::OpenVINOEngine::load |
( |
| ) |
|
|
overridevirtual |
◆ run()
void fast::OpenVINOEngine::run |
( |
| ) |
|
|
overridevirtual |
The documentation for this class was generated from the following file: