Skip to content

Commit

Permalink
Flexible inputs for OpenVINO IR models
Browse files Browse the repository at this point in the history
  • Loading branch information
dkurt committed Apr 18, 2020
1 parent e59e978 commit 908bf93
Show file tree
Hide file tree
Showing 6 changed files with 62 additions and 0 deletions.
2 changes: 2 additions & 0 deletions modules/dnn/src/dnn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1629,6 +1629,7 @@ struct Net::Impl

Ptr<InfEngineBackendNode> ieNode = node.dynamicCast<InfEngineBackendNode>();
CV_Assert(!ieNode.empty());
ieNode->net->reset();

for (it = layers.begin(); it != layers.end(); ++it)
{
Expand Down Expand Up @@ -1930,6 +1931,7 @@ struct Net::Impl

Ptr<InfEngineNgraphNode> ieNode = node.dynamicCast<InfEngineNgraphNode>();
CV_Assert(!ieNode.empty());
ieNode->net->reset();

for (it = layers.begin(); it != layers.end(); ++it)
{
Expand Down
7 changes: 7 additions & 0 deletions modules/dnn/src/ie_ngraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -780,6 +780,13 @@ void forwardNgraph(const std::vector<Ptr<BackendWrapper> >& outBlobsWrappers,
ieNode->net->forward(outBlobsWrappers, isAsync);
}

void InfEngineNgraphNet::reset()
{
allBlobs.clear();
infRequests.clear();
isInit = false;
}

void InfEngineNgraphNet::addBlobs(const std::vector<cv::Ptr<BackendWrapper> >& ptrs)
{
auto wrappers = ngraphWrappers(ptrs);
Expand Down
2 changes: 2 additions & 0 deletions modules/dnn/src/ie_ngraph.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ class InfEngineNgraphNet

void createNet(Target targetId);
void setNodePtr(std::shared_ptr<ngraph::Node>* ptr);

void reset();
private:
void release();
int getNumComponents();
Expand Down
7 changes: 7 additions & 0 deletions modules/dnn/src/op_inf_engine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -891,6 +891,13 @@ bool InfEngineBackendNet::isInitialized()
#endif
}

void InfEngineBackendNet::reset()
{
allBlobs.clear();
infRequests.clear();
isInit = false;
}

void InfEngineBackendNet::addBlobs(const std::vector<cv::Ptr<BackendWrapper> >& ptrs)
{
auto wrappers = infEngineWrappers(ptrs);
Expand Down
2 changes: 2 additions & 0 deletions modules/dnn/src/op_inf_engine.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ class InfEngineBackendNet

void addBlobs(const std::vector<cv::Ptr<BackendWrapper> >& ptrs);

void reset();

private:
InferenceEngine::Builder::Network netBuilder;

Expand Down
42 changes: 42 additions & 0 deletions modules/dnn/test/test_misc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -760,6 +760,48 @@ TEST_P(Test_Model_Optimizer, readFromBuffer)
normAssert(ref, actual, "", 0, 0);
}

TEST_P(Test_Model_Optimizer, flexible_inputs)
{
const Backend backendId = get<0>(GetParam());
const Target targetId = get<1>(GetParam());

const std::string& model = findDataFile("dnn/layers/layer_convolution_fp16.bin");
const std::string& proto = findDataFile("dnn/layers/layer_convolution_fp16.xml");

if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API);
else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
else
FAIL() << "Unknown backendId";

Net net0 = readNet(model, proto);
net0.setPreferableTarget(targetId);

Net net1 = readNet(model, proto);
net1.setPreferableTarget(targetId);

// Generate inputs.
int blobSize0[] = {2, 6, 75, 113};
Mat input0(4, &blobSize0[0], CV_32F);
randu(input0, 0, 255);

net0.setInput(input0);
Mat ref = net0.forward().clone();

int blobSize1[] = {1, 6, 10, 9};
Mat input1(4, &blobSize1[0], CV_32F);
randu(input1, 0, 255);

net1.setInput(input1);
Mat out = net1.forward();
EXPECT_NE(out.size, ref.size);

net1.setInput(input0);
out = net1.forward();
normAssert(ref, out, 0, 0);
}

INSTANTIATE_TEST_CASE_P(/**/, Test_Model_Optimizer,
dnnBackendsAndTargetsIE()
);
Expand Down

0 comments on commit 908bf93

Please sign in to comment.