Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/3.4' into merge-3.4
Browse files Browse the repository at this point in the history
  • Loading branch information
alalek committed Apr 16, 2020
2 parents fecebea + 78e4fbd commit 2cef100
Show file tree
Hide file tree
Showing 12 changed files with 58 additions and 16 deletions.
4 changes: 2 additions & 2 deletions cmake/OpenCVDetectInferenceEngine.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,9 @@ endif()

if(INF_ENGINE_TARGET)
if(NOT INF_ENGINE_RELEASE)
message(WARNING "InferenceEngine version has not been set, 2020.1 will be used by default. Set INF_ENGINE_RELEASE variable if you experience build errors.")
message(WARNING "InferenceEngine version has not been set, 2020.2 will be used by default. Set INF_ENGINE_RELEASE variable if you experience build errors.")
endif()
set(INF_ENGINE_RELEASE "2020010000" CACHE STRING "Force IE version, should be in form YYYYAABBCC (e.g. 2020.1.0.2 -> 2020010002)")
set(INF_ENGINE_RELEASE "2020020000" CACHE STRING "Force IE version, should be in form YYYYAABBCC (e.g. 2020.1.0.2 -> 2020010002)")
set_target_properties(${INF_ENGINE_TARGET} PROPERTIES
INTERFACE_COMPILE_DEFINITIONS "HAVE_INF_ENGINE=1;INF_ENGINE_RELEASE=${INF_ENGINE_RELEASE}"
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ Let us define a kernel function \f$K(p,q)\f$ which does a dot product between tw
\begin{aligned}
K(p,q) = \phi(p).\phi(q) &= \phi(p)^T \phi(q) \\
&= (p_{1}^2,p_{2}^2,\sqrt{2} p_1 p_2).(q_{1}^2,q_{2}^2,\sqrt{2} q_1 q_2) \\
&= p_1 q_1 + p_2 q_2 + 2 p_1 q_1 p_2 q_2 \\
&= p_{1}^2 q_{1}^2 + p_{2}^2 q_{2}^2 + 2 p_1 q_1 p_2 q_2 \\
&= (p_1 q_1 + p_2 q_2)^2 \\
\phi(p).\phi(q) &= (p.q)^2
\end{aligned}
Expand Down
8 changes: 7 additions & 1 deletion modules/dnn/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,13 @@ endif()

set(dnn_runtime_libs "")
if(INF_ENGINE_TARGET)
ocv_option(OPENCV_DNN_IE_NN_BUILDER_2019 "Build with Inference Engine NN Builder API support" ON) # future: NOT HAVE_NGRAPH
set(use_nn_builder OFF)
if(TARGET inference_engine_nn_builder OR # custom imported target
TARGET IE::inference_engine_nn_builder OR # default imported target via InferenceEngineConfig.cmake
INF_ENGINE_RELEASE VERSION_LESS "2020000000") # compatibility with older versions on IE
set(use_nn_builder ON)
endif()
ocv_option(OPENCV_DNN_IE_NN_BUILDER_2019 "Build with Inference Engine NN Builder API support" ${use_nn_builder}) # future: NOT HAVE_NGRAPH
if(OPENCV_DNN_IE_NN_BUILDER_2019)
message(STATUS "DNN: Enabling Inference Engine NN Builder API support")
add_definitions(-DHAVE_DNN_IE_NN_BUILDER_2019=1)
Expand Down
12 changes: 12 additions & 0 deletions modules/dnn/src/ie_ngraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,11 @@ class NgraphCustomOp: public ngraph::op::Op {
return type_info;
}

#if INF_ENGINE_VER_MAJOR_GT(2020020000)
NgraphCustomOp(const ngraph::OutputVector& inputs,
#else
NgraphCustomOp(const ngraph::NodeVector& inputs,
#endif
const std::map<std::string, InferenceEngine::Parameter>& params = {}):
Op(inputs), params(params)
{
Expand All @@ -103,7 +107,11 @@ class NgraphCustomOp: public ngraph::op::Op {

std::shared_ptr<ngraph::Node> copy_with_new_args(const ngraph::NodeVector& new_args) const override
{
#if INF_ENGINE_VER_MAJOR_GT(2020020000)
return std::make_shared<NgraphCustomOp>(ngraph::as_output_vector(new_args), params);
#else
return std::make_shared<NgraphCustomOp>(new_args, params);
#endif
}

bool visit_attributes(ngraph::AttributeVisitor& visitor) override
Expand Down Expand Up @@ -270,7 +278,11 @@ InfEngineNgraphNode::InfEngineNgraphNode(const std::vector<Ptr<BackendNode> >& n
{"internals", shapesToStr(internals)}
};

#if INF_ENGINE_VER_MAJOR_GT(2020020000)
ngraph::OutputVector inp_nodes;
#else
ngraph::NodeVector inp_nodes;
#endif
for (const auto& node : nodes)
inp_nodes.emplace_back(node.dynamicCast<InfEngineNgraphNode>()->node);
node = std::make_shared<NgraphCustomOp>(inp_nodes, params);
Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/blank_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ class BlankLayerImpl CV_FINAL : public BlankLayer
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
auto& ieInpNode = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
ngraph::NodeVector inp{ieInpNode};
ngraph::OutputVector inp{ieInpNode};
auto blank = std::make_shared<ngraph::op::Concat>(inp, 0);
return Ptr<BackendNode>(new InfEngineNgraphNode(blank));
}
Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/concat_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
std::vector<size_t> maxDims(numDims, 0);

CV_Assert(inputs.size() == nodes.size());
ngraph::NodeVector inp_nodes;
ngraph::OutputVector inp_nodes;
for (int i = 0; i < nodes.size(); ++i)
{
inp_nodes.push_back(nodes[i].dynamicCast<InfEngineNgraphNode>()->node);
Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/pooling_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@ class PoolingLayerImpl CV_FINAL : public PoolingLayer
ieLayer.setRoundingType(ceilMode ?
InferenceEngine::Builder::PoolingLayer::RoundingType::CEIL :
InferenceEngine::Builder::PoolingLayer::RoundingType::FLOOR);
ieLayer.setExcludePad(type == AVE && padMode == "SAME");
ieLayer.setExcludePad(!avePoolPaddedArea);

InferenceEngine::Builder::Layer l = ieLayer;
if (!padMode.empty())
Expand Down
7 changes: 5 additions & 2 deletions modules/dnn/src/op_inf_engine.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,11 @@
#define INF_ENGINE_RELEASE_2019R2 2019020000
#define INF_ENGINE_RELEASE_2019R3 2019030000
#define INF_ENGINE_RELEASE_2020_1 2020010000
#define INF_ENGINE_RELEASE_2020_2 2020020000

#ifndef INF_ENGINE_RELEASE
#warning("IE version have not been provided via command-line. Using 2019.1 by default")
#define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2020_1
#warning("IE version have not been provided via command-line. Using 2020.2 by default")
#define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2020_2
#endif

#define INF_ENGINE_VER_MAJOR_GT(ver) (((INF_ENGINE_RELEASE) / 10000) > ((ver) / 10000))
Expand Down Expand Up @@ -58,7 +59,9 @@

#include <inference_engine.hpp>

#ifdef HAVE_DNN_IE_NN_BUILDER_2019
#include <ie_builders.hpp>
#endif

#if defined(__GNUC__) && INF_ENGINE_VER_MAJOR_LT(INF_ENGINE_RELEASE_2020_1)
#pragma GCC visibility pop
Expand Down
2 changes: 1 addition & 1 deletion modules/objdetect/src/qrcode.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ static bool checkQRInputImage(InputArray img, Mat& gray)
return false; // image data is not enough for providing reliable results
}
int incn = img.channels();
CV_Check(incn, incn == 1 || incn == 3 || incn == 3, "");
CV_Check(incn, incn == 1 || incn == 3 || incn == 4, "");
if (incn == 3 || incn == 4)
{
cvtColor(img, gray, COLOR_BGR2GRAY);
Expand Down
4 changes: 2 additions & 2 deletions modules/objdetect/test/test_cascadeandhog.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1140,7 +1140,7 @@ void HOGDescriptorTester::compute(InputArray _img, vector<float>& descriptors,
actual_hog->compute(img, actual_descriptors, winStride, padding, locations);

double diff_norm = cvtest::norm(actual_descriptors, descriptors, NORM_L2 + NORM_RELATIVE);
const double eps = FLT_EPSILON * 100;
const double eps = 2.0e-3;
if (diff_norm > eps)
{
ts->printf(cvtest::TS::SUMMARY, "Norm of the difference: %lf\n", diff_norm);
Expand Down Expand Up @@ -1289,7 +1289,7 @@ void HOGDescriptorTester::computeGradient(InputArray _img, InputOutputArray _gra
const char* args[] = { "Gradient's", "Qangles's" };
actual_hog->computeGradient(img, actual_mats[0], actual_mats[1], paddingTL, paddingBR);

const double eps = FLT_EPSILON * 100;
const double eps = 8.0e-3;
for (i = 0; i < 2; ++i)
{
double diff_norm = cvtest::norm(actual_mats[i], reference_mats[i], NORM_L2 + NORM_RELATIVE);
Expand Down
7 changes: 4 additions & 3 deletions modules/python/src2/hdr_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,9 +430,10 @@ def parse_func_decl(self, decl_str, mat="Mat", docstring=""):
# filter off some common prefixes, which are meaningless for Python wrappers.
# note that we do not strip "static" prefix, which does matter;
# it means class methods, not instance methods
decl_str = self.batch_replace(decl_str, [("static inline", ""), ("inline", ""),\
("CV_EXPORTS_W", ""), ("CV_EXPORTS", ""), ("CV_CDECL", ""), ("CV_WRAP ", " "), ("CV_INLINE", ""),
("CV_DEPRECATED", ""), ("CV_DEPRECATED_EXTERNAL", "")]).strip()
decl_str = self.batch_replace(decl_str, [("static inline", ""), ("inline", ""), ("explicit ", ""),
("CV_EXPORTS_W", ""), ("CV_EXPORTS", ""), ("CV_CDECL", ""),
("CV_WRAP ", " "), ("CV_INLINE", ""),
("CV_DEPRECATED", ""), ("CV_DEPRECATED_EXTERNAL", "")]).strip()


if decl_str.strip().startswith('virtual'):
Expand Down
22 changes: 21 additions & 1 deletion modules/videoio/misc/java/test/VideoCaptureTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,30 @@ public void testIsOpened() {
assertFalse(capture.isOpened());
}

public void testVideoCapture() {
public void testDefaultConstructor() {
capture = new VideoCapture();
assertNotNull(capture);
assertFalse(capture.isOpened());
}

public void testConstructorWithFilename() {
capture = new VideoCapture("some_file.avi");
assertNotNull(capture);
}

public void testConstructorWithFilenameAndExplicitlySpecifiedAPI() {
capture = new VideoCapture("some_file.avi", Videoio.CAP_ANY);
assertNotNull(capture);
}

public void testConstructorWithIndex() {
capture = new VideoCapture(0);
assertNotNull(capture);
}

public void testConstructorWithIndexAndExplicitlySpecifiedAPI() {
capture = new VideoCapture(0, Videoio.CAP_ANY);
assertNotNull(capture);
}

}

0 comments on commit 2cef100

Please sign in to comment.