1 // This file is part of OpenCV project.
2 // It is subject to the license terms in the LICENSE file found in the top-level directory
3 // of this distribution and at http://opencv.org/license.html.
4 //
5 // Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 // Third party copyrights are property of their respective owners.
7 
8 #ifndef __OPENCV_DNN_IE_NGRAPH_HPP__
9 #define __OPENCV_DNN_IE_NGRAPH_HPP__
10 
11 #include "op_inf_engine.hpp"
12 
13 #ifdef HAVE_DNN_NGRAPH
14 
15 #ifdef _MSC_VER
16 #pragma warning(push)
17 #pragma warning(disable : 4245)
18 #pragma warning(disable : 4268)
19 #endif
20 #include <ngraph/ngraph.hpp>
21 #ifdef _MSC_VER
22 #pragma warning(pop)
23 #endif
24 
25 #endif  // HAVE_DNN_NGRAPH
26 
27 namespace cv { namespace dnn {
28 
29 #ifdef HAVE_DNN_NGRAPH
30 
31 class InfEngineNgraphNode;
32 
33 
34 class InfEngineNgraphNet
35 {
36 public:
37     InfEngineNgraphNet(detail::NetImplBase& netImpl);
38     InfEngineNgraphNet(detail::NetImplBase& netImpl, InferenceEngine::CNNNetwork& net);
39 
40     void addOutput(const std::string& name);
41 
42     bool isInitialized();
43     void init(Target targetId);
44 
45     void forward(const std::vector<Ptr<BackendWrapper> >& outBlobsWrappers, bool isAsync);
46 
47     void initPlugin(InferenceEngine::CNNNetwork& net);
48     ngraph::ParameterVector setInputs(const std::vector<cv::Mat>& inputs, const std::vector<std::string>& names);
49 
50     void setUnconnectedNodes(Ptr<InfEngineNgraphNode>& node);
51     void addBlobs(const std::vector<cv::Ptr<BackendWrapper> >& ptrs);
52 
53     void createNet(Target targetId);
54     void setNodePtr(std::shared_ptr<ngraph::Node>* ptr);
55 
56     void reset();
57 private:
58     detail::NetImplBase& netImpl_;
59 
60     void release();
61     int getNumComponents();
62     void dfs(std::shared_ptr<ngraph::Node>& node, std::vector<std::shared_ptr<ngraph::Node>>& comp,
63              std::unordered_map<std::string, bool>& used);
64 
65     ngraph::ParameterVector inputs_vec;
66     std::shared_ptr<ngraph::Function> ngraph_function;
67     std::vector<std::vector<std::shared_ptr<ngraph::Node>>> components;
68     std::unordered_map<std::string, std::shared_ptr<ngraph::Node>* > all_nodes;
69 
70     InferenceEngine::ExecutableNetwork netExec;
71     InferenceEngine::BlobMap allBlobs;
72     std::string device_name;
73     bool isInit = false;
74 
75     struct NgraphReqWrapper
76     {
NgraphReqWrappercv::dnn::InfEngineNgraphNet::NgraphReqWrapper77         NgraphReqWrapper() : isReady(true) {}
78 
79         void makePromises(const std::vector<Ptr<BackendWrapper> >& outs);
80 
81         InferenceEngine::InferRequest req;
82         std::vector<cv::AsyncPromise> outProms;
83         std::vector<std::string> outsNames;
84         bool isReady;
85     };
86     std::vector<Ptr<NgraphReqWrapper> > infRequests;
87 
88     InferenceEngine::CNNNetwork cnn;
89     bool hasNetOwner;
90     std::vector<std::string> requestedOutputs;
91     std::unordered_set<std::shared_ptr<ngraph::Node>> unconnectedNodes;
92 };
93 
94 class InfEngineNgraphNode : public BackendNode
95 {
96 public:
97     InfEngineNgraphNode(const std::vector<Ptr<BackendNode> >& nodes, Ptr<Layer>& layer,
98                         std::vector<Mat*>& inputs, std::vector<Mat>& outputs,
99                         std::vector<Mat>& internals);
100 
101     InfEngineNgraphNode(std::shared_ptr<ngraph::Node>&& _node);
102     InfEngineNgraphNode(std::shared_ptr<ngraph::Node>& _node);
103 
104     void setName(const std::string& name);
105 
106     // Inference Engine network object that allows to obtain the outputs of this layer.
107     std::shared_ptr<ngraph::Node> node;
108     Ptr<InfEngineNgraphNet> net;
109     Ptr<dnn::Layer> cvLayer;
110 };
111 
112 class NgraphBackendWrapper : public BackendWrapper
113 {
114 public:
115     NgraphBackendWrapper(int targetId, const Mat& m);
116     NgraphBackendWrapper(Ptr<BackendWrapper> wrapper);
117     ~NgraphBackendWrapper();
118 
119     static Ptr<BackendWrapper> create(Ptr<BackendWrapper> wrapper);
120 
121     virtual void copyToHost() CV_OVERRIDE;
122     virtual void setHostDirty() CV_OVERRIDE;
123 
124     InferenceEngine::DataPtr dataPtr;
125     InferenceEngine::Blob::Ptr blob;
126     AsyncArray futureMat;
127 };
128 
129 InferenceEngine::DataPtr ngraphDataNode(const Ptr<BackendWrapper>& ptr);
130 
131 // This is a fake class to run networks from Model Optimizer. Objects of that
132 // class simulate responses of layers are imported by OpenCV and supported by
133 // Inference Engine. The main difference is that they do not perform forward pass.
134 class NgraphBackendLayer : public Layer
135 {
136 public:
NgraphBackendLayer(const InferenceEngine::CNNNetwork & t_net_)137     NgraphBackendLayer(const InferenceEngine::CNNNetwork &t_net_) : t_net(t_net_) {};
138 
139     virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
140                                  const int requiredOutputs,
141                                  std::vector<MatShape> &outputs,
142                                  std::vector<MatShape> &internals) const CV_OVERRIDE;
143 
144     virtual void forward(InputArrayOfArrays inputs, OutputArrayOfArrays outputs,
145                          OutputArrayOfArrays internals) CV_OVERRIDE;
146 
147     virtual bool supportBackend(int backendId) CV_OVERRIDE;
148 
149 private:
150     InferenceEngine::CNNNetwork t_net;
151 };
152 
153 #endif  // HAVE_DNN_NGRAPH
154 
155 void forwardNgraph(const std::vector<Ptr<BackendWrapper> >& outBlobsWrappers,
156                    Ptr<BackendNode>& node, bool isAsync);
157 
158 }}  // namespace cv::dnn
159 
160 
161 #endif  // __OPENCV_DNN_IE_NGRAPH_HPP__
162