diff --git a/3rdparty/libtiff/tif_config.h.cmakein b/3rdparty/libtiff/tif_config.h.cmakein index 182f2833d111..24f58119ba1e 100644 --- a/3rdparty/libtiff/tif_config.h.cmakein +++ b/3rdparty/libtiff/tif_config.h.cmakein @@ -54,7 +54,7 @@ /* Native cpu byte order: 1 if big-endian (Motorola) or 0 if little-endian (Intel) */ -#define HOST_BIGENDIAN 0 +#define HOST_BIGENDIAN @WORDS_BIGENDIAN@ /* Set the native cpu bit order (FILLORDER_LSB2MSB or FILLORDER_MSB2LSB) */ #define HOST_FILLORDER FILLORDER_LSB2MSB @@ -156,15 +156,7 @@ /* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most significant byte first (like Motorola and SPARC, unlike Intel). */ -#if defined AC_APPLE_UNIVERSAL_BUILD -# if defined __BIG_ENDIAN__ -# define WORDS_BIGENDIAN 1 -# endif -#else -# ifndef WORDS_BIGENDIAN -/* # undef WORDS_BIGENDIAN */ -# endif -#endif +#cmakedefine WORDS_BIGENDIAN 1 /* Support Deflate compression */ #define ZIP_SUPPORT 1 diff --git a/CMakeLists.txt b/CMakeLists.txt index 04158f753485..1e1abd943d92 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -132,7 +132,7 @@ OCV_OPTION(WITH_JASPER "Include JPEG2K support" ON OCV_OPTION(WITH_JPEG "Include JPEG support" ON) OCV_OPTION(WITH_WEBP "Include WebP support" ON IF (NOT IOS) ) OCV_OPTION(WITH_OPENEXR "Include ILM support via OpenEXR" ON IF (NOT IOS) ) -OCV_OPTION(WITH_OPENGL "Include OpenGL support" OFF IF (NOT ANDROID AND NOT APPLE) ) +OCV_OPTION(WITH_OPENGL "Include OpenGL support" OFF IF (NOT ANDROID) ) OCV_OPTION(WITH_OPENNI "Include OpenNI support" OFF IF (NOT ANDROID AND NOT IOS) ) OCV_OPTION(WITH_PNG "Include PNG support" ON) OCV_OPTION(WITH_PVAPI "Include Prosilica GigE support" ON IF (NOT ANDROID AND NOT IOS) ) @@ -415,6 +415,12 @@ endif() include(cmake/OpenCVPCHSupport.cmake) include(cmake/OpenCVModule.cmake) +# ---------------------------------------------------------------------------- +# Detect endianness of build platform +# ---------------------------------------------------------------------------- +include(TestBigEndian) +test_big_endian(WORDS_BIGENDIAN) + # ---------------------------------------------------------------------------- # Detect 3rd-party libraries # ---------------------------------------------------------------------------- @@ -553,16 +559,46 @@ include(cmake/OpenCVGenConfig.cmake) include(cmake/OpenCVGenInfoPlist.cmake) # Generate environment setup file -if(INSTALL_TESTS AND OPENCV_TEST_DATA_PATH AND UNIX AND NOT ANDROID) - configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/templates/opencv_testing.sh.in" - "${CMAKE_BINARY_DIR}/unix-install/opencv_testing.sh" @ONLY) - install(FILES "${CMAKE_BINARY_DIR}/unix-install/opencv_testing.sh" - DESTINATION /etc/profile.d/ COMPONENT tests) - configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/templates/opencv_run_all_tests.sh.in" - "${CMAKE_BINARY_DIR}/unix-install/opencv_run_all_tests.sh" @ONLY) - install(FILES "${CMAKE_BINARY_DIR}/unix-install/opencv_run_all_tests.sh" - PERMISSIONS OWNER_READ OWNER_WRITE GROUP_READ WORLD_READ OWNER_EXECUTE GROUP_EXECUTE WORLD_EXECUTE - DESTINATION ${OPENCV_TEST_INSTALL_PATH} COMPONENT tests) +if(INSTALL_TESTS AND OPENCV_TEST_DATA_PATH AND UNIX) + if(ANDROID) + get_filename_component(TEST_PATH ${OPENCV_TEST_INSTALL_PATH} DIRECTORY) + configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/templates/opencv_run_all_tests_android.sh.in" + "${CMAKE_BINARY_DIR}/unix-install/opencv_run_all_tests.sh" @ONLY) + install(PROGRAMS "${CMAKE_BINARY_DIR}/unix-install/opencv_run_all_tests.sh" + DESTINATION ${CMAKE_INSTALL_PREFIX} COMPONENT tests) + else() + configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/templates/opencv_testing.sh.in" + "${CMAKE_BINARY_DIR}/unix-install/opencv_testing.sh" @ONLY) + install(FILES "${CMAKE_BINARY_DIR}/unix-install/opencv_testing.sh" + DESTINATION /etc/profile.d/ COMPONENT tests) + configure_file("${CMAKE_CURRENT_SOURCE_DIR}/cmake/templates/opencv_run_all_tests_unix.sh.in" + "${CMAKE_BINARY_DIR}/unix-install/opencv_run_all_tests.sh" @ONLY) + install(PROGRAMS "${CMAKE_BINARY_DIR}/unix-install/opencv_run_all_tests.sh" + DESTINATION ${OPENCV_TEST_INSTALL_PATH} COMPONENT tests) + + endif() +endif() + +if(NOT OPENCV_README_FILE) + if(ANDROID) + set(OPENCV_README_FILE ${CMAKE_CURRENT_SOURCE_DIR}/platforms/android/README.android) + endif() +endif() + +if(NOT OPENCV_LICENSE_FILE) + set(OPENCV_LICENSE_FILE ${CMAKE_CURRENT_SOURCE_DIR}/LICENSE) +endif() + +# for UNIX it does not make sense as LICENSE and readme will be part of the package automatically +if(ANDROID OR NOT UNIX) + install(FILES ${OPENCV_LICENSE_FILE} + PERMISSIONS OWNER_READ GROUP_READ WORLD_READ + DESTINATION ${CMAKE_INSTALL_PREFIX} COMPONENT libs) + if(OPENCV_README_FILE) + install(FILES ${OPENCV_README_FILE} + PERMISSIONS OWNER_READ GROUP_READ WORLD_READ + DESTINATION ${CMAKE_INSTALL_PREFIX} COMPONENT libs) + endif() endif() # ---------------------------------------------------------------------------- diff --git a/README.md b/README.md index 403f118eed59..3a26ad855521 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ ### OpenCV: Open Source Computer Vision Library +[![Gittip](http://img.shields.io/gittip/OpenCV.png)](https://www.gittip.com/OpenCV/) + #### Resources * Homepage: @@ -18,6 +20,3 @@ Summary of guidelines: * Include tests and documentation; * Clean up "oops" commits before submitting; * Follow the coding style guide. - -[![Donate OpenCV project](http://opencv.org/wp-content/uploads/2013/07/gittip1.png)](https://www.gittip.com/OpenCV/) -[![Donate OpenCV project](http://opencv.org/wp-content/uploads/2013/07/paypal-donate-button.png)](https://www.paypal.com/cgi-bin/webscr?item_name=Donation+to+OpenCV&cmd=_donations&business=accountant%40opencv.org) \ No newline at end of file diff --git a/cmake/OpenCVModule.cmake b/cmake/OpenCVModule.cmake index 3fc37c4ed8a8..19c28579454a 100644 --- a/cmake/OpenCVModule.cmake +++ b/cmake/OpenCVModule.cmake @@ -135,13 +135,13 @@ macro(ocv_add_module _name) # parse list of dependencies if("${ARGV1}" STREQUAL "INTERNAL" OR "${ARGV1}" STREQUAL "BINDINGS") - set(OPENCV_MODULE_${the_module}_CLASS "${ARGV1}" CACHE INTERNAL "The cathegory of the module") + set(OPENCV_MODULE_${the_module}_CLASS "${ARGV1}" CACHE INTERNAL "The category of the module") set(__ocv_argn__ ${ARGN}) list(REMOVE_AT __ocv_argn__ 0) ocv_add_dependencies(${the_module} ${__ocv_argn__}) unset(__ocv_argn__) else() - set(OPENCV_MODULE_${the_module}_CLASS "PUBLIC" CACHE INTERNAL "The cathegory of the module") + set(OPENCV_MODULE_${the_module}_CLASS "PUBLIC" CACHE INTERNAL "The category of the module") ocv_add_dependencies(${the_module} ${ARGN}) if(BUILD_${the_module}) set(OPENCV_MODULES_PUBLIC ${OPENCV_MODULES_PUBLIC} "${the_module}" CACHE INTERNAL "List of OpenCV modules marked for export") diff --git a/cmake/templates/cvconfig.h.in b/cmake/templates/cvconfig.h.in index e1beaada7cbc..3f316da46efc 100644 --- a/cmake/templates/cvconfig.h.in +++ b/cmake/templates/cvconfig.h.in @@ -167,6 +167,6 @@ /* Xine video library */ #cmakedefine HAVE_XINE -/* Define to 1 if your processor stores words with the most significant byte +/* Define if your processor stores words with the most significant byte first (like Motorola and SPARC, unlike Intel and VAX). */ #cmakedefine WORDS_BIGENDIAN diff --git a/cmake/templates/opencv_run_all_tests_android.sh.in b/cmake/templates/opencv_run_all_tests_android.sh.in new file mode 100644 index 000000000000..93373fa964ba --- /dev/null +++ b/cmake/templates/opencv_run_all_tests_android.sh.in @@ -0,0 +1,51 @@ +#!/bin/sh + +BASE_DIR=`dirname $0` +OPENCV_TEST_PATH=$BASE_DIR/@TEST_PATH@ +OPENCV_TEST_DATA_PATH=$BASE_DIR/sdk/etc/testdata/ + +if [ $# -ne 1 ]; then + echo "Device architecture is not preset in command line" + echo "Tests are available for architectures: `ls -m ${OPENCV_TEST_PATH}`" + echo "Usage: $0 " + return 1 +else + TARGET_ARCH=$1 +fi + +if [ -z `which adb` ]; then + echo "adb command was not found in PATH" + return 1 +fi + +adb push $OPENCV_TEST_DATA_PATH /sdcard/opencv_testdata + +adb shell "mkdir -p /data/local/tmp/opencv_test" +SUMMARY_STATUS=0 +for t in "$OPENCV_TEST_PATH/$TARGET_ARCH/"opencv_test_* "$OPENCV_TEST_PATH/$TARGET_ARCH/"opencv_perf_*; +do + test_name=`basename "$t"` + report="$test_name-`date --rfc-3339=date`.xml" + adb push $t /data/local/tmp/opencv_test/ + adb shell "export OPENCV_TEST_DATA_PATH=/sdcard/opencv_testdata && /data/local/tmp/opencv_test/$test_name --perf_min_samples=1 --perf_force_samples=1 --gtest_output=xml:/data/local/tmp/opencv_test/$report" + adb pull "/data/local/tmp/opencv_test/$report" $report + TEST_STATUS=0 + if [ -e $report ]; then + if [ `grep -c " 1500) const double* ksi_ptr = reinterpret_cast(ksi.ptr(0)); Eigen::Matrix twist, g; twist << 0., -ksi_ptr[2], ksi_ptr[1], ksi_ptr[3], diff --git a/modules/core/include/opencv2/core/types_c.h b/modules/core/include/opencv2/core/types_c.h index 38b0f340a872..161a4bd100da 100644 --- a/modules/core/include/opencv2/core/types_c.h +++ b/modules/core/include/opencv2/core/types_c.h @@ -158,7 +158,7 @@ enum { CV_StsVecLengthErr= -28, /* incorrect vector length */ CV_StsFilterStructContentErr= -29, /* incorr. filter structure content */ CV_StsKernelStructContentErr= -30, /* incorr. transform kernel content */ - CV_StsFilterOffsetErr= -31, /* incorrect filter ofset value */ + CV_StsFilterOffsetErr= -31, /* incorrect filter offset value */ CV_StsBadSize= -201, /* the input/output structure size is incorrect */ CV_StsDivByZero= -202, /* division by zero */ CV_StsInplaceNotSupported= -203, /* in-place operation is not supported */ diff --git a/modules/core/src/gl_core_3_1.cpp b/modules/core/src/gl_core_3_1.cpp index 48201b4b76af..318eb50e13c5 100644 --- a/modules/core/src/gl_core_3_1.cpp +++ b/modules/core/src/gl_core_3_1.cpp @@ -44,22 +44,27 @@ #include "gl_core_3_1.hpp" #ifdef HAVE_OPENGL - #if defined(__APPLE__) - #include + + #ifdef __APPLE__ + #include static void* AppleGLGetProcAddress (const char* name) { - static const struct mach_header* image = 0; - if (!image) - image = NSAddImage("/System/Library/Frameworks/OpenGL.framework/Versions/Current/OpenGL", NSADDIMAGE_OPTION_RETURN_ON_ERROR); - - // prepend a '_' for the Unix C symbol mangling convention - String symbolName = "_"; - symbolName += String(name); - - NSSymbol symbol = image ? NSLookupSymbolInImage(image, &symbolName[0], NSLOOKUPSYMBOLINIMAGE_OPTION_BIND | NSLOOKUPSYMBOLINIMAGE_OPTION_RETURN_ON_ERROR) : 0; - - return symbol ? NSAddressOfSymbol(symbol) : 0; + static bool initialized = false; + static void * handle = NULL; + if (!handle) + { + if (!initialized) + { + initialized = true; + const char * const path = "/System/Library/Frameworks/OpenGL.framework/Versions/Current/OpenGL"; + + handle = dlopen(path, RTLD_LAZY | RTLD_GLOBAL); + } + if (!handle) + return NULL; + } + return dlsym(handle, name); } #endif // __APPLE__ diff --git a/modules/cudaimgproc/perf/perf_color.cpp b/modules/cudaimgproc/perf/perf_color.cpp index 2ff0f1ff60d7..099e0f9ebe47 100644 --- a/modules/cudaimgproc/perf/perf_color.cpp +++ b/modules/cudaimgproc/perf/perf_color.cpp @@ -243,7 +243,14 @@ PERF_TEST_P(Sz_Type_Op, AlphaComp, TEST_CYCLE() cv::cuda::alphaComp(d_img1, d_img2, dst, alpha_op); - CUDA_SANITY_CHECK(dst, 1e-3, ERROR_RELATIVE); + if (CV_MAT_DEPTH(type) < CV_32F) + { + CUDA_SANITY_CHECK(dst, 1); + } + else + { + CUDA_SANITY_CHECK(dst, 1e-3, ERROR_RELATIVE); + } } else { diff --git a/modules/cudaimgproc/src/cuda/canny.cu b/modules/cudaimgproc/src/cuda/canny.cu index 043d6e5ff764..36cccb0e33ae 100644 --- a/modules/cudaimgproc/src/cuda/canny.cu +++ b/modules/cudaimgproc/src/cuda/canny.cu @@ -42,8 +42,6 @@ #if !defined CUDA_DISABLER -#include -#include #include "opencv2/core/cuda/common.hpp" #include "opencv2/core/cuda/emulation.hpp" #include "opencv2/core/cuda/transform.hpp" @@ -463,7 +461,10 @@ namespace canny count = min(count, map.cols * map.rows); - std::swap(st1, st2); + //std::swap(st1, st2); + short2* tmp = st1; + st1 = st2; + st2 = tmp; } } } diff --git a/modules/cudaoptflow/test/test_optflow.cpp b/modules/cudaoptflow/test/test_optflow.cpp index e80116a75f29..110fed0339c0 100644 --- a/modules/cudaoptflow/test/test_optflow.cpp +++ b/modules/cudaoptflow/test/test_optflow.cpp @@ -405,13 +405,15 @@ CUDA_TEST_P(OpticalFlowBM, Accuracy) cv::Mat frame0 = readImage("opticalflow/rubberwhale1.png", cv::IMREAD_GRAYSCALE); ASSERT_FALSE(frame0.empty()); + cv::resize(frame0, frame0, cv::Size(), 0.5, 0.5); cv::Mat frame1 = readImage("opticalflow/rubberwhale2.png", cv::IMREAD_GRAYSCALE); ASSERT_FALSE(frame1.empty()); + cv::resize(frame1, frame1, cv::Size(), 0.5, 0.5); - cv::Size block_size(16, 16); + cv::Size block_size(8, 8); cv::Size shift_size(1, 1); - cv::Size max_range(16, 16); + cv::Size max_range(8, 8); cv::cuda::GpuMat d_velx, d_vely, buf; cv::cuda::calcOpticalFlowBM(loadMat(frame0), loadMat(frame1), diff --git a/modules/highgui/src/grfmt_tiff.cpp b/modules/highgui/src/grfmt_tiff.cpp index efabf76fdfb3..03e30c73ceea 100644 --- a/modules/highgui/src/grfmt_tiff.cpp +++ b/modules/highgui/src/grfmt_tiff.cpp @@ -118,18 +118,21 @@ bool TiffDecoder::readHeader() bool result = false; close(); - TIFF* tif = TIFFOpen( m_filename.c_str(), "rb" ); + // TIFFOpen() mode flags are different to fopen(). A 'b' in mode "rb" has no effect when reading. + // http://www.remotesensing.org/libtiff/man/TIFFOpen.3tiff.html + TIFF* tif = TIFFOpen( m_filename.c_str(), "r" ); if( tif ) { - int wdth = 0, hght = 0, photometric = 0; + uint32 wdth = 0, hght = 0; + uint16 photometric = 0; m_tif = tif; if( TIFFGetField( tif, TIFFTAG_IMAGEWIDTH, &wdth ) && TIFFGetField( tif, TIFFTAG_IMAGELENGTH, &hght ) && TIFFGetField( tif, TIFFTAG_PHOTOMETRIC, &photometric )) { - int bpp=8, ncn = photometric > 1 ? 3 : 1; + uint16 bpp=8, ncn = photometric > 1 ? 3 : 1; TIFFGetField( tif, TIFFTAG_BITSPERSAMPLE, &bpp ); TIFFGetField( tif, TIFFTAG_SAMPLESPERPIXEL, &ncn ); @@ -195,12 +198,12 @@ bool TiffDecoder::readData( Mat& img ) if( m_tif && m_width && m_height ) { TIFF* tif = (TIFF*)m_tif; - int tile_width0 = m_width, tile_height0 = 0; + uint32 tile_width0 = m_width, tile_height0 = 0; int x, y, i; int is_tiled = TIFFIsTiled(tif); - int photometric; + uint16 photometric; TIFFGetField( tif, TIFFTAG_PHOTOMETRIC, &photometric ); - int bpp = 8, ncn = photometric > 1 ? 3 : 1; + uint16 bpp = 8, ncn = photometric > 1 ? 3 : 1; TIFFGetField( tif, TIFFTAG_BITSPERSAMPLE, &bpp ); TIFFGetField( tif, TIFFTAG_SAMPLESPERPIXEL, &ncn ); const int bitsPerByte = 8; diff --git a/modules/highgui/test/test_grfmt.cpp b/modules/highgui/test/test_grfmt.cpp index d670fa3da6b2..0343ba154e09 100644 --- a/modules/highgui/test/test_grfmt.cpp +++ b/modules/highgui/test/test_grfmt.cpp @@ -412,8 +412,8 @@ TEST(Highgui_Tiff, decode_tile16384x16384) try { - cv::imread(file3); - EXPECT_NO_THROW(cv::imread(file4)); + cv::imread(file3, IMREAD_UNCHANGED); + EXPECT_NO_THROW(cv::imread(file4, IMREAD_UNCHANGED)); } catch(const std::bad_alloc&) { @@ -423,6 +423,54 @@ TEST(Highgui_Tiff, decode_tile16384x16384) remove(file3.c_str()); remove(file4.c_str()); } + +TEST(Highgui_Tiff, write_read_16bit_big_little_endian) +{ + // see issue #2601 "16-bit Grayscale TIFF Load Failures Due to Buffer Underflow and Endianness" + + // Setup data for two minimal 16-bit grayscale TIFF files in both endian formats + uchar tiff_sample_data[2][86] = { { + // Little endian + 0x49, 0x49, 0x2a, 0x00, 0x0c, 0x00, 0x00, 0x00, 0xad, 0xde, 0xef, 0xbe, 0x06, 0x00, 0x00, 0x01, + 0x03, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x01, 0x03, 0x00, 0x01, 0x00, + 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x01, 0x03, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, + 0x00, 0x00, 0x06, 0x01, 0x03, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x11, 0x01, + 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x17, 0x01, 0x04, 0x00, 0x01, 0x00, + 0x00, 0x00, 0x04, 0x00, 0x00, 0x00 }, { + // Big endian + 0x4d, 0x4d, 0x00, 0x2a, 0x00, 0x00, 0x00, 0x0c, 0xde, 0xad, 0xbe, 0xef, 0x00, 0x06, 0x01, 0x00, + 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 0x00, 0x01, 0x01, 0x00, 0x03, 0x00, 0x00, + 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x10, + 0x00, 0x00, 0x01, 0x06, 0x00, 0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0x01, 0x11, + 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x01, 0x17, 0x00, 0x04, 0x00, 0x00, + 0x00, 0x01, 0x00, 0x00, 0x00, 0x04 } + }; + + // Test imread() for both a little endian TIFF and big endian TIFF + for (int i = 0; i < 2; i++) + { + string filename = cv::tempfile(".tiff"); + + // Write sample TIFF file + FILE* fp = fopen(filename.c_str(), "wb"); + ASSERT_TRUE(fp != NULL); + ASSERT_EQ((size_t)1, fwrite(tiff_sample_data, 86, 1, fp)); + fclose(fp); + + Mat img = imread(filename, IMREAD_UNCHANGED); + + EXPECT_EQ(1, img.rows); + EXPECT_EQ(2, img.cols); + EXPECT_EQ(CV_16U, img.type()); + EXPECT_EQ(sizeof(ushort), img.elemSize()); + EXPECT_EQ(1, img.channels()); + EXPECT_EQ(0xDEAD, img.at(0,0)); + EXPECT_EQ(0xBEEF, img.at(0,1)); + + remove(filename.c_str()); + } +} + #endif #ifdef HAVE_WEBP diff --git a/modules/java/generator/src/java/core+TermCriteria.java b/modules/java/generator/src/java/core+TermCriteria.java index 98a5e3c394e1..c67e51ea8dcc 100644 --- a/modules/java/generator/src/java/core+TermCriteria.java +++ b/modules/java/generator/src/java/core+TermCriteria.java @@ -87,7 +87,6 @@ public boolean equals(Object obj) { @Override public String toString() { - if (this == null) return "null"; return "{ type: " + type + ", maxCount: " + maxCount + ", epsilon: " + epsilon + "}"; } } diff --git a/modules/photo/test/test_denoising.cuda.cpp b/modules/photo/test/test_denoising.cuda.cpp index 2051368a046d..dce20b9f51ef 100644 --- a/modules/photo/test/test_denoising.cuda.cpp +++ b/modules/photo/test/test_denoising.cuda.cpp @@ -61,6 +61,7 @@ TEST(CUDA_BruteForceNonLocalMeans, Regression) cv::Mat bgr = readImage("../gpu/denoising/lena_noised_gaussian_sigma=20_multi_0.png", cv::IMREAD_COLOR); ASSERT_FALSE(bgr.empty()); + cv::resize(bgr, bgr, cv::Size(256, 256)); cv::Mat gray; cv::cvtColor(bgr, gray, cv::COLOR_BGR2GRAY); @@ -77,6 +78,8 @@ TEST(CUDA_BruteForceNonLocalMeans, Regression) cv::Mat bgr_gold = readImage("../gpu/denoising/nlm_denoised_lena_bgr.png", cv::IMREAD_COLOR); cv::Mat gray_gold = readImage("../gpu/denoising/nlm_denoised_lena_gray.png", cv::IMREAD_GRAYSCALE); ASSERT_FALSE(bgr_gold.empty() || gray_gold.empty()); + cv::resize(bgr_gold, bgr_gold, cv::Size(256, 256)); + cv::resize(gray_gold, gray_gold, cv::Size(256, 256)); EXPECT_MAT_NEAR(bgr_gold, dbgr, 1e-4); EXPECT_MAT_NEAR(gray_gold, dgray, 1e-4); diff --git a/samples/android/native-activity/src/org/opencv/samples/NativeActivity/CvNativeActivity.java b/samples/android/native-activity/src/org/opencv/samples/NativeActivity/CvNativeActivity.java index 04da9a9496d6..b9db22de1f7c 100644 --- a/samples/android/native-activity/src/org/opencv/samples/NativeActivity/CvNativeActivity.java +++ b/samples/android/native-activity/src/org/opencv/samples/NativeActivity/CvNativeActivity.java @@ -21,6 +21,7 @@ public void onManagerConnected(int status) { System.loadLibrary("native_activity"); Intent intent = new Intent(CvNativeActivity.this, android.app.NativeActivity.class); CvNativeActivity.this.startActivity(intent); + CvNativeActivity.this.finish(); } break; default: { @@ -34,7 +35,7 @@ public CvNativeActivity() { Log.i(TAG, "Instantiated new " + this.getClass()); } - @Override + @Override public void onResume() { super.onResume(); diff --git a/samples/cpp/Qt_sample/CMakeLists.txt b/samples/cpp/Qt_sample/CMakeLists.txt index e831f752f647..f465947dbce5 100644 --- a/samples/cpp/Qt_sample/CMakeLists.txt +++ b/samples/cpp/Qt_sample/CMakeLists.txt @@ -7,6 +7,6 @@ FIND_PACKAGE( OpenCV REQUIRED ) find_package (OpenGL REQUIRED) -ADD_EXECUTABLE(OpenGL_Qt_Binding main.cpp) +ADD_EXECUTABLE(OpenGL_Qt_Binding qt_opengl.cpp) TARGET_LINK_LIBRARIES(OpenGL_Qt_Binding ${OpenCV_LIBS} ${OPENGL_LIBRARIES} ) configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cube4.avi ${CMAKE_CURRENT_BINARY_DIR}/cube4.avi COPYONLY) diff --git a/samples/cpp/Qt_sample/main.cpp b/samples/cpp/Qt_sample/main.cpp deleted file mode 100644 index 92bc2b549e65..000000000000 --- a/samples/cpp/Qt_sample/main.cpp +++ /dev/null @@ -1,271 +0,0 @@ -//Yannick Verdie 2010 - -//--- Please read help() below: --- - -#include -#include -#include -#include -#include -#include -#include - -#if defined WIN32 || defined _WIN32 || defined WINCE - #include - #undef small - #undef min - #undef max - #undef abs -#endif - -#ifdef __APPLE__ - #include -#else - #include -#endif - -using namespace std; -using namespace cv; - -static void help() -{ - cout << "\nThis demo demonstrates the use of the Qt enhanced version of the highgui GUI interface\n" - " and dang if it doesn't throw in the use of of the POSIT 3D tracking algorithm too\n" - "It works off of the video: cube4.avi\n" - "Using OpenCV version %s\n" << CV_VERSION << "\n\n" -" 1). This demo is mainly based on work from Javier Barandiaran Martirena\n" -" See this page http://code.opencv.org/projects/opencv/wiki/Posit.\n" -" 2). This is a demo to illustrate how to use **OpenGL Callback**.\n" -" 3). You need Qt binding to compile this sample with OpenGL support enabled.\n" -" 4). The features' detection is very basic and could highly be improved \n" -" (basic thresholding tuned for the specific video) but 2).\n" -" 5) THANKS TO Google Summer of Code 2010 for supporting this work!\n" << endl; -} - -#define FOCAL_LENGTH 600 -#define CUBE_SIZE 10 - -static void renderCube(float size) -{ - glBegin(GL_QUADS); - // Front Face - glNormal3f( 0.0f, 0.0f, 1.0f); - glVertex3f( 0.0f, 0.0f, 0.0f); - glVertex3f( size, 0.0f, 0.0f); - glVertex3f( size, size, 0.0f); - glVertex3f( 0.0f, size, 0.0f); - // Back Face - glNormal3f( 0.0f, 0.0f,-1.0f); - glVertex3f( 0.0f, 0.0f, size); - glVertex3f( 0.0f, size, size); - glVertex3f( size, size, size); - glVertex3f( size, 0.0f, size); - // Top Face - glNormal3f( 0.0f, 1.0f, 0.0f); - glVertex3f( 0.0f, size, 0.0f); - glVertex3f( size, size, 0.0f); - glVertex3f( size, size, size); - glVertex3f( 0.0f, size, size); - // Bottom Face - glNormal3f( 0.0f,-1.0f, 0.0f); - glVertex3f( 0.0f, 0.0f, 0.0f); - glVertex3f( 0.0f, 0.0f, size); - glVertex3f( size, 0.0f, size); - glVertex3f( size, 0.0f, 0.0f); - // Right face - glNormal3f( 1.0f, 0.0f, 0.0f); - glVertex3f( size, 0.0f, 0.0f); - glVertex3f( size, 0.0f, size); - glVertex3f( size, size, size); - glVertex3f( size, size, 0.0f); - // Left Face - glNormal3f(-1.0f, 0.0f, 0.0f); - glVertex3f( 0.0f, 0.0f, 0.0f); - glVertex3f( 0.0f, size, 0.0f); - glVertex3f( 0.0f, size, size); - glVertex3f( 0.0f, 0.0f, size); - glEnd(); -} - - -static void on_opengl(void* param) -{ - //Draw the object with the estimated pose - glLoadIdentity(); - glScalef( 1.0f, 1.0f, -1.0f); - glMultMatrixf( (float*)param ); - glEnable( GL_LIGHTING ); - glEnable( GL_LIGHT0 ); - glEnable( GL_BLEND ); - glBlendFunc(GL_SRC_ALPHA, GL_ONE); - renderCube( CUBE_SIZE ); - glDisable(GL_BLEND); - glDisable( GL_LIGHTING ); -} - -static void initPOSIT(std::vector *modelPoints) -{ - //Create the model pointss - modelPoints->push_back(cvPoint3D32f(0.0f, 0.0f, 0.0f)); //The first must be (0,0,0) - modelPoints->push_back(cvPoint3D32f(0.0f, 0.0f, CUBE_SIZE)); - modelPoints->push_back(cvPoint3D32f(CUBE_SIZE, 0.0f, 0.0f)); - modelPoints->push_back(cvPoint3D32f(0.0f, CUBE_SIZE, 0.0f)); -} - -static void foundCorners(vector *srcImagePoints, const Mat& source, Mat& grayImage) -{ - cvtColor(source, grayImage, COLOR_RGB2GRAY); - GaussianBlur(grayImage, grayImage, Size(11,11), 0, 0); - normalize(grayImage, grayImage, 0, 255, NORM_MINMAX); - threshold(grayImage, grayImage, 26, 255, THRESH_BINARY_INV); //25 - - vector > contours; - vector hierarchy; - findContours(grayImage, contours, hierarchy, RETR_EXTERNAL, CHAIN_APPROX_NONE); - - Point p; - vector srcImagePoints_temp(4,cvPoint2D32f(0,0)); - - if (contours.size() == srcImagePoints_temp.size()) - { - - for(size_t i = 0 ; i y = 0 - //> x = 1 - //< x = 2 - //< y = 3 - - //get point 0; - size_t index = 0; - for(size_t i = 1 ; i srcImagePoints_temp.at(index).y) - index = i; - } - srcImagePoints->at(0) = srcImagePoints_temp.at(index); - - //get point 1; - index = 0; - for(size_t i = 1 ; i srcImagePoints_temp.at(index).x) - index = i; - } - srcImagePoints->at(1) = srcImagePoints_temp.at(index); - - //get point 2; - index = 0; - for(size_t i = 1 ; iat(2) = srcImagePoints_temp.at(index); - - //get point 3; - index = 0; - for(size_t i = 1 ; iat(3) = srcImagePoints_temp.at(index); - - Mat Msource = source; - stringstream ss; - for(size_t i = 0 ; iat(i),5,Scalar(0,0,255)); - putText(Msource,ss.str(),srcImagePoints->at(i),FONT_HERSHEY_SIMPLEX,1,Scalar(0,0,255)); - ss.str(""); - - //new coordinate system in the middle of the frame and reversed (camera coordinate system) - srcImagePoints->at(i) = cvPoint2D32f(srcImagePoints_temp.at(i).x-source.cols/2,source.rows/2-srcImagePoints_temp.at(i).y); - } - } - -} - -static void createOpenGLMatrixFrom(float *posePOSIT,const CvMatr32f &rotationMatrix, const CvVect32f &translationVector) -{ - - - //coordinate system returned is relative to the first 3D input point - for (int f=0; f<3; f++) - { - for (int c=0; c<3; c++) - { - posePOSIT[c*4+f] = rotationMatrix[f*3+c]; //transposed - } - } - posePOSIT[3] = 0.0; - posePOSIT[7] = 0.0; - posePOSIT[11] = 0.0; - posePOSIT[12] = translationVector[0]; - posePOSIT[13] = translationVector[1]; - posePOSIT[14] = translationVector[2]; - posePOSIT[15] = 1.0; -} - -int main(void) -{ - help(); - VideoCapture video("cube4.avi"); - CV_Assert(video.isOpened()); - - Mat source, grayImage; - - video >> source; - - namedWindow("original", WINDOW_AUTOSIZE | WINDOW_FREERATIO); - namedWindow("POSIT", WINDOW_AUTOSIZE | WINDOW_FREERATIO); - displayOverlay("POSIT", "We lost the 4 corners' detection quite often (the red circles disappear). This demo is only to illustrate how to use OpenGL callback.\n -- Press ESC to exit.", 10000); - - float OpenGLMatrix[]={0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}; - setOpenGlDrawCallback("POSIT",on_opengl,OpenGLMatrix); - - vector modelPoints; - initPOSIT(&modelPoints); - - //Create the POSIT object with the model points - CvPOSITObject* positObject = cvCreatePOSITObject( &modelPoints[0], (int)modelPoints.size() ); - - CvMatr32f rotation_matrix = new float[9]; - CvVect32f translation_vector = new float[3]; - CvTermCriteria criteria = cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 100, 1.0e-4f); - - vector srcImagePoints(4,cvPoint2D32f(0,0)); - - - while(waitKey(33) != 27) - { - video >> source; - imshow("original",source); - - foundCorners(&srcImagePoints, source, grayImage); - cvPOSIT( positObject, &srcImagePoints[0], FOCAL_LENGTH, criteria, rotation_matrix, translation_vector ); - createOpenGLMatrixFrom(OpenGLMatrix,rotation_matrix,translation_vector); - - imshow("POSIT",source); - - if (video.get(CAP_PROP_POS_AVI_RATIO) > 0.99) - video.set(CAP_PROP_POS_AVI_RATIO, 0); - } - - destroyAllWindows(); - cvReleasePOSITObject(&positObject); - - return 0; -} diff --git a/samples/cpp/Qt_sample/qt_opengl.cpp b/samples/cpp/Qt_sample/qt_opengl.cpp new file mode 100644 index 000000000000..2878da4c00ed --- /dev/null +++ b/samples/cpp/Qt_sample/qt_opengl.cpp @@ -0,0 +1,268 @@ +// Yannick Verdie 2010 +// --- Please read help() below: --- + +#include +#include + +#include +#include +#include +#include +#include + +#ifdef __APPLE__ +#include +#else +#include +#endif + +using namespace std; +using namespace cv; + +static void help() +{ + cout << "This demo demonstrates the use of the Qt enhanced version of the highgui GUI interface\n" + "and dang if it doesn't throw in the use of of the POSIT 3D tracking algorithm too\n" + "It works off of the video: cube4.avi\n" + "Using OpenCV version " << CV_VERSION << "\n\n" + + " 1) This demo is mainly based on work from Javier Barandiaran Martirena\n" + " See this page http://code.opencv.org/projects/opencv/wiki/Posit.\n" + " 2) This is a demo to illustrate how to use **OpenGL Callback**.\n" + " 3) You need Qt binding to compile this sample with OpenGL support enabled.\n" + " 4) The features' detection is very basic and could highly be improved\n" + " (basic thresholding tuned for the specific video) but 2).\n" + " 5) Thanks to Google Summer of Code 2010 for supporting this work!\n" << endl; +} + +#define FOCAL_LENGTH 600 +#define CUBE_SIZE 0.5 + +static void renderCube(float size) +{ + glBegin(GL_QUADS); + // Front Face + glNormal3f( 0.0f, 0.0f, 1.0f); + glVertex3f( 0.0f, 0.0f, 0.0f); + glVertex3f( size, 0.0f, 0.0f); + glVertex3f( size, size, 0.0f); + glVertex3f( 0.0f, size, 0.0f); + // Back Face + glNormal3f( 0.0f, 0.0f,-1.0f); + glVertex3f( 0.0f, 0.0f, size); + glVertex3f( 0.0f, size, size); + glVertex3f( size, size, size); + glVertex3f( size, 0.0f, size); + // Top Face + glNormal3f( 0.0f, 1.0f, 0.0f); + glVertex3f( 0.0f, size, 0.0f); + glVertex3f( size, size, 0.0f); + glVertex3f( size, size, size); + glVertex3f( 0.0f, size, size); + // Bottom Face + glNormal3f( 0.0f,-1.0f, 0.0f); + glVertex3f( 0.0f, 0.0f, 0.0f); + glVertex3f( 0.0f, 0.0f, size); + glVertex3f( size, 0.0f, size); + glVertex3f( size, 0.0f, 0.0f); + // Right face + glNormal3f( 1.0f, 0.0f, 0.0f); + glVertex3f( size, 0.0f, 0.0f); + glVertex3f( size, 0.0f, size); + glVertex3f( size, size, size); + glVertex3f( size, size, 0.0f); + // Left Face + glNormal3f(-1.0f, 0.0f, 0.0f); + glVertex3f( 0.0f, 0.0f, 0.0f); + glVertex3f( 0.0f, size, 0.0f); + glVertex3f( 0.0f, size, size); + glVertex3f( 0.0f, 0.0f, size); + glEnd(); +} + +static void on_opengl(void* param) +{ + //Draw the object with the estimated pose + glLoadIdentity(); + glScalef( 1.0f, 1.0f, -1.0f); + glMultMatrixf( (float*)param ); + glEnable( GL_LIGHTING ); + glEnable( GL_LIGHT0 ); + glEnable( GL_BLEND ); + glBlendFunc(GL_SRC_ALPHA, GL_ONE); + renderCube( CUBE_SIZE ); + glDisable(GL_BLEND); + glDisable( GL_LIGHTING ); +} + +static void initPOSIT(std::vector * modelPoints) +{ + // Create the model pointss + modelPoints->push_back(cvPoint3D32f(0.0f, 0.0f, 0.0f)); // The first must be (0, 0, 0) + modelPoints->push_back(cvPoint3D32f(0.0f, 0.0f, CUBE_SIZE)); + modelPoints->push_back(cvPoint3D32f(CUBE_SIZE, 0.0f, 0.0f)); + modelPoints->push_back(cvPoint3D32f(0.0f, CUBE_SIZE, 0.0f)); +} + +static void foundCorners(vector * srcImagePoints, const Mat & source, Mat & grayImage) +{ + cvtColor(source, grayImage, COLOR_RGB2GRAY); + GaussianBlur(grayImage, grayImage, Size(11, 11), 0, 0); + normalize(grayImage, grayImage, 0, 255, NORM_MINMAX); + threshold(grayImage, grayImage, 26, 255, THRESH_BINARY_INV); //25 + + Mat MgrayImage = grayImage; + vector > contours; + vector hierarchy; + findContours(MgrayImage, contours, hierarchy, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE); + + Point p; + vector srcImagePoints_temp(4, cvPoint2D32f(0, 0)); + + if (contours.size() == srcImagePoints_temp.size()) + { + for (size_t i = 0; i < contours.size(); i++ ) + { + p.x = p.y = 0; + + for (size_t j = 0 ; j < contours[i].size(); j++) + p += contours[i][j]; + + srcImagePoints_temp.at(i) = cvPoint2D32f(float(p.x) / contours[i].size(), float(p.y) / contours[i].size()); + } + + // Need to keep the same order + // > y = 0 + // > x = 1 + // < x = 2 + // < y = 3 + + // get point 0; + size_t index = 0; + for (size_t i = 1 ; i srcImagePoints_temp.at(index).y) + index = i; + srcImagePoints->at(0) = srcImagePoints_temp.at(index); + + // get point 1; + index = 0; + for (size_t i = 1 ; i srcImagePoints_temp.at(index).x) + index = i; + srcImagePoints->at(1) = srcImagePoints_temp.at(index); + + // get point 2; + index = 0; + for (size_t i = 1 ; iat(2) = srcImagePoints_temp.at(index); + + // get point 3; + index = 0; + for (size_t i = 1 ; iat(3) = srcImagePoints_temp.at(index); + + Mat Msource = source; + stringstream ss; + for (size_t i = 0; iat(i), 5, Scalar(0, 0, 255)); + putText(Msource, ss.str(), srcImagePoints->at(i), FONT_HERSHEY_SIMPLEX, 1, Scalar(0, 0, 255)); + ss.str(""); + + // new coordinate system in the middle of the frame and reversed (camera coordinate system) + srcImagePoints->at(i) = cvPoint2D32f(srcImagePoints_temp.at(i).x - source.cols / 2, + source.rows / 2 - srcImagePoints_temp.at(i).y); + } + } +} + +static void createOpenGLMatrixFrom(float * posePOSIT, const CvMatr32f & rotationMatrix, + const CvVect32f & translationVector) +{ + // coordinate system returned is relative to the first 3D input point + for (int f = 0; f < 3; f++) + for (int c = 0; c < 3; c++) + posePOSIT[c * 4 + f] = rotationMatrix[f * 3 + c]; // transposed + + posePOSIT[3] = translationVector[0]; + posePOSIT[7] = translationVector[1]; + posePOSIT[11] = translationVector[2]; + posePOSIT[12] = 0.0f; + posePOSIT[13] = 0.0f; + posePOSIT[14] = 0.0f; + posePOSIT[15] = 1.0f; +} + +int main(void) +{ + help(); + + string fileName = "cube4.avi"; + VideoCapture video(fileName); + if (!video.isOpened()) + { + cerr << "Video file " << fileName << " could not be opened" << endl; + return EXIT_FAILURE; + } + + Mat source, grayImage; + video >> source; + + namedWindow("Original", WINDOW_AUTOSIZE | CV_WINDOW_FREERATIO); + namedWindow("POSIT", WINDOW_OPENGL | CV_WINDOW_FREERATIO); + resizeWindow("POSIT", source.cols, source.rows); + + displayOverlay("POSIT", "We lost the 4 corners' detection quite often (the red circles disappear).\n" + "This demo is only to illustrate how to use OpenGL callback.\n" + " -- Press ESC to exit.", 10000); + + float OpenGLMatrix[] = { 0, 0, 0, 0, + 0, 0, 0, 0, + 0, 0, 0, 0, + 0, 0, 0, 0 }; + setOpenGlContext("POSIT"); + setOpenGlDrawCallback("POSIT", on_opengl, OpenGLMatrix); + + vector modelPoints; + initPOSIT(&modelPoints); + + // Create the POSIT object with the model points + CvPOSITObject* positObject = cvCreatePOSITObject( &modelPoints[0], (int)modelPoints.size()); + + CvMatr32f rotation_matrix = new float[9]; + CvVect32f translation_vector = new float[3]; + CvTermCriteria criteria = cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 100, 1e-4f); + vector srcImagePoints(4, cvPoint2D32f(0, 0)); + + while (waitKey(33) != 27) + { + video >> source; + if (source.empty()) + break; + + imshow("Original", source); + + foundCorners(&srcImagePoints, source, grayImage); + cvPOSIT(positObject, &srcImagePoints[0], FOCAL_LENGTH, criteria, rotation_matrix, translation_vector); + createOpenGLMatrixFrom(OpenGLMatrix, rotation_matrix, translation_vector); + + updateWindow("POSIT"); + + if (video.get(CV_CAP_PROP_POS_AVI_RATIO) > 0.99) + video.set(CV_CAP_PROP_POS_AVI_RATIO, 0); + } + + setOpenGlDrawCallback("POSIT", NULL, NULL); + destroyAllWindows(); + cvReleasePOSITObject(&positObject); + + delete[]rotation_matrix; + delete[]translation_vector; + + return EXIT_SUCCESS; +}