FAST  3.2.0
Framework for Heterogeneous Medical Image Computing and Visualization
NeuralNetwork.hpp
Go to the documentation of this file.
1 #pragma once
2 
3 #include <FAST/ProcessObject.hpp>
4 #include <FAST/Data/Tensor.hpp>
6 #include "InferenceEngine.hpp"
7 
8 namespace fast {
9 
10 class Image;
11 class Tensor;
12 
16 class FAST_EXPORT InferenceDataList {
17  public:
18  explicit InferenceDataList(std::vector<std::shared_ptr<Image>> images) {
19  m_images = images;
20  }
21  explicit InferenceDataList(std::vector<std::shared_ptr<Tensor>> tensors) {
22  m_tensors = tensors;
23  }
25 
26  }
27  std::vector<std::shared_ptr<Image>> getImages() const {
28  if(!isImages())
29  throw Exception("The inference that list contains tensors, not images");
30 
31  return m_images;
32  }
33  std::vector<std::shared_ptr<Tensor>> getTensors() const {
34  if(!isTensors())
35  throw Exception("The inference that list contains images, not tensors");
36 
37  return m_tensors;
38  }
39  bool isTensors() const { return !m_tensors.empty(); };
40  bool isImages() const { return !m_images.empty(); };
41  int getSize() const {
42  return isImages() ? m_images.size() : m_tensors.size();
43  }
44  private:
45  std::vector<std::shared_ptr<Image>> m_images;
46  std::vector<std::shared_ptr<Tensor>> m_tensors;
47 };
48 
49 class Sequence : public SimpleDataObject<InferenceDataList> {
51  public:
52  void create(std::vector<std::shared_ptr<Image>> images) {
53  mData = InferenceDataList(images);
54  };
55  void create(std::vector<std::shared_ptr<Tensor>> tensors) {
56  mData = InferenceDataList(tensors);
57  };
59  private:
60  Sequence() {};
61 };
62 
63 class Batch : public SimpleDataObject<InferenceDataList> {
65  public:
66  void create(std::vector<std::shared_ptr<Image>> images) {
67  mData = InferenceDataList(images);
68  };
69  void create(std::vector<std::shared_ptr<Tensor>> tensors) {
70  mData = InferenceDataList(tensors);
71  };
73  private:
74  Batch() {};
75 };
76 
77 class FAST_EXPORT NeuralNetwork : public ProcessObject {
79  public:
84  void load(std::string filename);
90  void load(std::vector<uint8_t> model, std::vector<uint8_t> weights);
95  void setInferenceEngine(InferenceEngine::pointer engine);
100  void setInferenceEngine(std::string engine);
105  InferenceEngine::pointer getInferenceEngine() const;
106  void setInputNode(uint portID, std::string name, NodeType type = NodeType::IMAGE, TensorShape shape = {});
107  void setOutputNode(uint portID, std::string name, NodeType type = NodeType::IMAGE, TensorShape shape = {});
112  void setScaleFactor(float scale);
118  void setMeanAndStandardDeviation(float mean, float std);
124  void setMinAndMaxIntensity(float min, float max);
125  void setSignedInputNormalization(bool signedInputNormalization);
126  void setPreserveAspectRatio(bool preserve);
132  void setHorizontalFlipping(bool flip);
133 
142  void setTemporalWindow(uint window);
143 
144  virtual void setInputSize(std::string name, std::vector<int> size);
145 
146  void loadAttributes();
147 
148  virtual ~NeuralNetwork();
149  protected:
150  NeuralNetwork();
152  bool mHorizontalImageFlipping = false;
153  bool mSignedInputNormalization = false;
154  int mTemporalWindow = 0;
156  float mScaleFactor, mMean, mStd, mMinIntensity, mMaxIntensity;
157  bool mMinAndMaxIntensitySet = false;
159  std::unordered_map<std::string, std::vector<int>> mInputSizes;
160  std::unordered_map<int, DataObject::pointer> m_processedOutputData;
161 
162  virtual void run();
163 
164  std::shared_ptr<InferenceEngine> m_engine;
165 
166  std::unordered_map<std::string, std::vector<std::shared_ptr<Image>>> mInputImages;
167  std::unordered_map<std::string, std::vector<std::shared_ptr<Tensor>>> mInputTensors;
168 
169  std::unordered_map<std::string, Tensor::pointer> processInputData();
170  std::vector<std::shared_ptr<Image>> resizeImages(const std::vector<std::shared_ptr<Image>>& images, int width, int height, int depth);
171  Tensor::pointer convertImagesToTensor(std::vector<std::shared_ptr<Image>> image, const TensorShape& shape, bool temporal);
172 
178  Tensor::pointer standardizeOutputTensorData(Tensor::pointer tensor, int sample = 0);
179 
180  private:
181  void execute();
182 };
183 
184 }
fast::NodeType
NodeType
Definition: InferenceEngine.hpp:26
fast::Sequence::access
DataAccess< InferenceDataList >::pointer access
Definition: NeuralNetwork.hpp:57
fast::Sequence
Definition: NeuralNetwork.hpp:49
fast::InferenceEngine::pointer
std::shared_ptr< InferenceEngine > pointer
Definition: InferenceEngine.hpp:78
fast::NeuralNetwork::mPreserveAspectRatio
bool mPreserveAspectRatio
Definition: NeuralNetwork.hpp:151
fast::NeuralNetwork::mNewInputSpacing
Vector3f mNewInputSpacing
Definition: NeuralNetwork.hpp:158
fast::NeuralNetwork::m_processedOutputData
std::unordered_map< int, DataObject::pointer > m_processedOutputData
Definition: NeuralNetwork.hpp:160
fast::Exception
Definition: Exception.hpp:15
fast::TensorShape
Definition: TensorShape.hpp:9
fast::SimpleDataObject
Definition: SimpleDataObject.hpp:24
fast
Definition: AffineTransformation.hpp:7
fast::NeuralNetwork::m_engine
std::shared_ptr< InferenceEngine > m_engine
Definition: NeuralNetwork.hpp:164
fast::InferenceDataList::getImages
std::vector< std::shared_ptr< Image > > getImages() const
Definition: NeuralNetwork.hpp:27
fast::InferenceDataList
Definition: NeuralNetwork.hpp:16
fast::DataAccess::pointer
std::unique_ptr< DataAccess< DataType > > pointer
Definition: SimpleDataObject.hpp:36
fast::InferenceDataList::InferenceDataList
InferenceDataList(std::vector< std::shared_ptr< Image >> images)
Definition: NeuralNetwork.hpp:18
fast::InferenceDataList::getSize
int getSize() const
Definition: NeuralNetwork.hpp:41
FAST_OBJECT
#define FAST_OBJECT(className)
Definition: Object.hpp:9
fast::NeuralNetwork::m_batchSize
int m_batchSize
Definition: NeuralNetwork.hpp:155
fast::InferenceDataList::isImages
bool isImages() const
Definition: NeuralNetwork.hpp:40
fast::Batch::access
DataAccess< InferenceDataList >::pointer access
Definition: NeuralNetwork.hpp:71
fast::NeuralNetwork::mInputImages
std::unordered_map< std::string, std::vector< std::shared_ptr< Image > > > mInputImages
Definition: NeuralNetwork.hpp:166
fast::InferenceDataList::InferenceDataList
InferenceDataList(std::vector< std::shared_ptr< Tensor >> tensors)
Definition: NeuralNetwork.hpp:21
fast::max
T max(T a, T b)
Definition: Utility.hpp:46
fast::NeuralNetwork::mInputSizes
std::unordered_map< std::string, std::vector< int > > mInputSizes
Definition: NeuralNetwork.hpp:159
fast::Sequence::create
void create(std::vector< std::shared_ptr< Tensor >> tensors)
Definition: NeuralNetwork.hpp:55
fast::InferenceDataList::InferenceDataList
InferenceDataList()
Definition: NeuralNetwork.hpp:24
fast::InferenceDataList::getTensors
std::vector< std::shared_ptr< Tensor > > getTensors() const
Definition: NeuralNetwork.hpp:33
fast::min
T min(T a, T b)
Definition: Utility.hpp:41
SimpleDataObject.hpp
ProcessObject.hpp
InferenceEngine.hpp
fast::NodeType::IMAGE
@ IMAGE
Tensor.hpp
fast::DataObject::pointer
std::shared_ptr< DataObject > pointer
Definition: DataObject.hpp:16
fast::Sequence::create
void create(std::vector< std::shared_ptr< Image >> images)
Definition: NeuralNetwork.hpp:52
fast::NeuralNetwork::mStd
float mStd
Definition: NeuralNetwork.hpp:156
fast::Batch
Definition: NeuralNetwork.hpp:63
fast::Batch::create
void create(std::vector< std::shared_ptr< Tensor >> tensors)
Definition: NeuralNetwork.hpp:69
uint
unsigned int uint
Definition: DataTypes.hpp:16
fast::NeuralNetwork
Definition: NeuralNetwork.hpp:77
fast::SimpleDataObject< InferenceDataList >::mData
InferenceDataList mData
Definition: SimpleDataObject.hpp:83
fast::Batch::create
void create(std::vector< std::shared_ptr< Image >> images)
Definition: NeuralNetwork.hpp:66
fast::NeuralNetwork::mInputTensors
std::unordered_map< std::string, std::vector< std::shared_ptr< Tensor > > > mInputTensors
Definition: NeuralNetwork.hpp:167
fast::ProcessObject
Definition: ProcessObject.hpp:22
fast::InferenceDataList::isTensors
bool isTensors() const
Definition: NeuralNetwork.hpp:39