All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.bytedeco.javacpp.opencv_structured_light Maven / Gradle / Ivy

The newest version!
// Targeted by JavaCPP version 1.4.4: DO NOT EDIT THIS FILE

package org.bytedeco.javacpp;

import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;

import static org.bytedeco.javacpp.opencv_core.*;
import static org.bytedeco.javacpp.opencv_imgproc.*;
import static org.bytedeco.javacpp.opencv_phase_unwrapping.*;
import static org.bytedeco.javacpp.opencv_imgcodecs.*;
import static org.bytedeco.javacpp.opencv_videoio.*;
import static org.bytedeco.javacpp.opencv_highgui.*;
import static org.bytedeco.javacpp.opencv_flann.*;
import static org.bytedeco.javacpp.opencv_features2d.*;
import static org.bytedeco.javacpp.opencv_calib3d.*;

public class opencv_structured_light extends org.bytedeco.javacpp.presets.opencv_structured_light {
    static { Loader.load(); }

// Parsed from 

/*M///////////////////////////////////////////////////////////////////////////////////////
 //
 //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
 //
 //  By downloading, copying, installing or using the software you agree to this license.
 //  If you do not agree to this license, do not download, install,
 //  copy or use the software.
 //
 //
 //                           License Agreement
 //                For Open Source Computer Vision Library
 //
 // Copyright (C) 2015, OpenCV Foundation, all rights reserved.
 // Third party copyrights are property of their respective owners.
 //
 // Redistribution and use in source and binary forms, with or without modification,
 // are permitted provided that the following conditions are met:
 //
 //   * Redistribution's of source code must retain the above copyright notice,
 //     this list of conditions and the following disclaimer.
 //
 //   * Redistribution's in binary form must reproduce the above copyright notice,
 //     this list of conditions and the following disclaimer in the documentation
 //     and/or other materials provided with the distribution.
 //
 //   * The name of the copyright holders may not be used to endorse or promote products
 //     derived from this software without specific prior written permission.
 //
 // This software is provided by the copyright holders and contributors "as is" and
 // any express or implied warranties, including, but not limited to, the implied
 // warranties of merchantability and fitness for a particular purpose are disclaimed.
 // In no event shall the Intel Corporation or contributors be liable for any direct,
 // indirect, incidental, special, exemplary, or consequential damages
 // (including, but not limited to, procurement of substitute goods or services;
 // loss of use, data, or profits; or business interruption) however caused
 // and on any theory of liability, whether in contract, strict liability,
 // or tort (including negligence or otherwise) arising in any way out of
 // the use of this software, even if advised of the possibility of such damage.
 //
 //M*/

/*#ifdef __OPENCV_BUILD
 #error this is a compatibility header which should not be used inside the OpenCV library
 #endif*/

// #include "opencv2/structured_light/structured_light.hpp"
// #include "opencv2/structured_light/graycodepattern.hpp"
// #include "opencv2/structured_light/sinusoidalpattern.hpp"

/** \defgroup structured_light Structured Light API
 

Structured light is considered one of the most effective techniques to acquire 3D models. This technique is based on projecting a light pattern and capturing the illuminated scene from one or more points of view. Since the pattern is coded, correspondences between image points and points of the projected pattern can be quickly found and 3D information easily retrieved.

One of the most commonly exploited coding strategies is based on trmatime-multiplexing. In this case, a set of patterns are successively projected onto the measuring surface. The codeword for a given pixel is usually formed by the sequence of illuminance values for that pixel across the projected patterns. Thus, the codification is called temporal because the bits of the codewords are multiplexed in time \cite pattern .

In this module a time-multiplexing coding strategy based on Gray encoding is implemented following the (stereo) approach described in 3DUNDERWORLD algorithm \cite UNDERWORLD . For more details, see \ref tutorial_structured_light.

*/ // Parsed from /*M/////////////////////////////////////////////////////////////////////////////////////// // // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. // // By downloading, copying, installing or using the software you agree to this license. // If you do not agree to this license, do not download, install, // copy or use the software. // // // License Agreement // For Open Source Computer Vision Library // // Copyright (C) 2015, OpenCV Foundation, all rights reserved. // Third party copyrights are property of their respective owners. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistribution's of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistribution's in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * The name of the copyright holders may not be used to endorse or promote products // derived from this software without specific prior written permission. // // This software is provided by the copyright holders and contributors "as is" and // any express or implied warranties, including, but not limited to, the implied // warranties of merchantability and fitness for a particular purpose are disclaimed. // In no event shall the Intel Corporation or contributors be liable for any direct, // indirect, incidental, special, exemplary, or consequential damages // (including, but not limited to, procurement of substitute goods or services; // loss of use, data, or profits; or business interruption) however caused // and on any theory of liability, whether in contract, strict liability, // or tort (including negligence or otherwise) arising in any way out of // the use of this software, even if advised of the possibility of such damage. // //M*/ // #ifndef __OPENCV_STRUCTURED_LIGHT_HPP__ // #define __OPENCV_STRUCTURED_LIGHT_HPP__ // #include "opencv2/core.hpp" /** \addtogroup structured_light * \{

* Type of the decoding algorithm */ // other algorithms can be implemented /** enum cv::structured_light:: */ public static final int /** Kyriakos Herakleous, Charalambos Poullis. "3DUNDERWORLD-SLS: An Open-Source Structured-Light Scanning System for Rapid Geometry Acquisition", arXiv preprint arXiv:1406.6595 (2014). */ DECODE_3D_UNDERWORLD = 0; /** \brief Abstract base class for generating and decoding structured light patterns. */ @Namespace("cv::structured_light") public static class StructuredLightPattern extends Algorithm { static { Loader.load(); } /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */ public StructuredLightPattern(Pointer p) { super(p); } /** \brief Generates the structured light pattern to project.

@param patternImages The generated pattern: a vector, in which each image is a CV_8U Mat at projector's resolution. */ public native @Cast("bool") boolean generate( @ByVal MatVector patternImages ); public native @Cast("bool") boolean generate( @ByVal UMatVector patternImages ); public native @Cast("bool") boolean generate( @ByVal GpuMatVector patternImages ); /** \brief Decodes the structured light pattern, generating a disparity map

@param patternImages The acquired pattern images to decode (vector>), loaded as grayscale and previously rectified. @param disparityMap The decoding result: a CV_64F Mat at image resolution, storing the computed disparity map. @param blackImages The all-black images needed for shadowMasks computation. @param whiteImages The all-white images needed for shadowMasks computation. @param flags Flags setting decoding algorithms. Default: DECODE_3D_UNDERWORLD. \note All the images must be at the same resolution. */ public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal Mat disparityMap, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") MatVector blackImages, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") MatVector whiteImages, int flags/*=cv::structured_light::DECODE_3D_UNDERWORLD*/ ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal Mat disparityMap ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal Mat disparityMap, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") UMatVector blackImages, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") UMatVector whiteImages, int flags/*=cv::structured_light::DECODE_3D_UNDERWORLD*/ ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal Mat disparityMap, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") GpuMatVector blackImages, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") GpuMatVector whiteImages, int flags/*=cv::structured_light::DECODE_3D_UNDERWORLD*/ ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal UMat disparityMap, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") MatVector blackImages, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") MatVector whiteImages, int flags/*=cv::structured_light::DECODE_3D_UNDERWORLD*/ ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal UMat disparityMap ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal UMat disparityMap, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") UMatVector blackImages, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") UMatVector whiteImages, int flags/*=cv::structured_light::DECODE_3D_UNDERWORLD*/ ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal UMat disparityMap, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") GpuMatVector blackImages, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") GpuMatVector whiteImages, int flags/*=cv::structured_light::DECODE_3D_UNDERWORLD*/ ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal GpuMat disparityMap, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") MatVector blackImages, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") MatVector whiteImages, int flags/*=cv::structured_light::DECODE_3D_UNDERWORLD*/ ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal GpuMat disparityMap ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal GpuMat disparityMap, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") UMatVector blackImages, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") UMatVector whiteImages, int flags/*=cv::structured_light::DECODE_3D_UNDERWORLD*/ ); public native @Cast("bool") boolean decode( @Const @ByRef MatVectorVector patternImages, @ByVal GpuMat disparityMap, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") GpuMatVector blackImages, @ByVal(nullValue = "cv::InputArrayOfArrays(cv::noArray())") GpuMatVector whiteImages, int flags/*=cv::structured_light::DECODE_3D_UNDERWORLD*/ ); } /** \} */ // #endif // Parsed from /*M/////////////////////////////////////////////////////////////////////////////////////// // // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. // // By downloading, copying, installing or using the software you agree to this license. // If you do not agree to this license, do not download, install, // copy or use the software. // // // License Agreement // For Open Source Computer Vision Library // // Copyright (C) 2015, OpenCV Foundation, all rights reserved. // Third party copyrights are property of their respective owners. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistribution's of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistribution's in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * The name of the copyright holders may not be used to endorse or promote products // derived from this software without specific prior written permission. // // This software is provided by the copyright holders and contributors "as is" and // any express or implied warranties, including, but not limited to, the implied // warranties of merchantability and fitness for a particular purpose are disclaimed. // In no event shall the Intel Corporation or contributors be liable for any direct, // indirect, incidental, special, exemplary, or consequential damages // (including, but not limited to, procurement of substitute goods or services; // loss of use, data, or profits; or business interruption) however caused // and on any theory of liability, whether in contract, strict liability, // or tort (including negligence or otherwise) arising in any way out of // the use of this software, even if advised of the possibility of such damage. // //M*/ // #ifndef __OPENCV_GRAY_CODE_PATTERN_HPP__ // #define __OPENCV_GRAY_CODE_PATTERN_HPP__ // #include "opencv2/core.hpp" // #include "opencv2/structured_light/structured_light.hpp" /** \addtogroup structured_light * \{

/** \brief Class implementing the Gray-code pattern, based on \cite UNDERWORLD. * * The generation of the pattern images is performed with Gray encoding using the traditional white and black colors. * * The information about the two image axes x, y is encoded separately into two different pattern sequences. * A projector P with resolution (P_res_x, P_res_y) will result in Ncols = log 2 (P_res_x) encoded pattern images representing the columns, and * in Nrows = log 2 (P_res_y) encoded pattern images representing the rows. * For example a projector with resolution 1024x768 will result in Ncols = 10 and Nrows = 10.

* However, the generated pattern sequence consists of both regular color and color-inverted images: inverted pattern images are images * with the same structure as the original but with inverted colors. * This provides an effective method for easily determining the intensity value of each pixel when it is lit (highest value) and * when it is not lit (lowest value). So for a a projector with resolution 1024x768, the number of pattern images will be Ncols * 2 + Nrows * 2 = 40. * */ @Namespace("cv::structured_light") public static class GrayCodePattern extends StructuredLightPattern { static { Loader.load(); } /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */ public GrayCodePattern(Pointer p) { super(p); } /** \brief Parameters of StructuredLightPattern constructor. * @param width Projector's width. Default value is 1024. * @param height Projector's height. Default value is 768. */ @NoOffset public static class Params extends Pointer { static { Loader.load(); } /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */ public Params(Pointer p) { super(p); } /** Native array allocator. Access with {@link Pointer#position(long)}. */ public Params(long size) { super((Pointer)null); allocateArray(size); } private native void allocateArray(long size); @Override public Params position(long position) { return (Params)super.position(position); } public Params() { super((Pointer)null); allocate(); } private native void allocate(); public native int width(); public native Params width(int width); public native int height(); public native Params height(int height); } /** \brief Constructor @param parameters GrayCodePattern parameters GrayCodePattern::Params: the width and the height of the projector. */ public static native @Ptr GrayCodePattern create( @Const @ByRef(nullValue = "cv::structured_light::GrayCodePattern::Params()") Params parameters ); public static native @Ptr GrayCodePattern create( ); // alias for scripting public static native @Ptr GrayCodePattern create( int width, int height ); /** \brief Get the number of pattern images needed for the graycode pattern. * * @return The number of pattern images needed for the graycode pattern. * */ public native @Cast("size_t") long getNumberOfPatternImages(); /** \brief Sets the value for white threshold, needed for decoding. * * White threshold is a number between 0-255 that represents the minimum brightness difference required for valid pixels, between the graycode pattern and its inverse images; used in getProjPixel method. * * @param value The desired white threshold value. * */ public native void setWhiteThreshold( @Cast("size_t") long value ); /** \brief Sets the value for black threshold, needed for decoding (shadowsmasks computation). * * Black threshold is a number between 0-255 that represents the minimum brightness difference required for valid pixels, between the fully illuminated (white) and the not illuminated images (black); used in computeShadowMasks method. * * @param value The desired black threshold value. * */ public native void setBlackThreshold( @Cast("size_t") long value ); /** \brief Generates the all-black and all-white images needed for shadowMasks computation. * * To identify shadow regions, the regions of two images where the pixels are not lit by projector's light and thus where there is not coded information, * the 3DUNDERWORLD algorithm computes a shadow mask for the two cameras views, starting from a white and a black images captured by each camera. * This method generates these two additional images to project. * * @param blackImage The generated all-black CV_8U image, at projector's resolution. * @param whiteImage The generated all-white CV_8U image, at projector's resolution. */ public native void getImagesForShadowMasks( @ByVal Mat blackImage, @ByVal Mat whiteImage ); public native void getImagesForShadowMasks( @ByVal UMat blackImage, @ByVal UMat whiteImage ); public native void getImagesForShadowMasks( @ByVal GpuMat blackImage, @ByVal GpuMat whiteImage ); /** \brief For a (x,y) pixel of a camera returns the corresponding projector pixel. * * The function decodes each pixel in the pattern images acquired by a camera into their corresponding decimal numbers representing the projector's column and row, * providing a mapping between camera's and projector's pixel. * * @param patternImages The pattern images acquired by the camera, stored in a grayscale vector < Mat >. * @param x x coordinate of the image pixel. * @param y y coordinate of the image pixel. * @param projPix Projector's pixel corresponding to the camera's pixel: projPix.x and projPix.y are the image coordinates of the projector's pixel corresponding to the pixel being decoded in a camera. */ public native @Cast("bool") boolean getProjPixel( @ByVal MatVector patternImages, int x, int y, @ByRef Point projPix ); public native @Cast("bool") boolean getProjPixel( @ByVal UMatVector patternImages, int x, int y, @ByRef Point projPix ); public native @Cast("bool") boolean getProjPixel( @ByVal GpuMatVector patternImages, int x, int y, @ByRef Point projPix ); } /** \} */ // #endif // Parsed from /*M/////////////////////////////////////////////////////////////////////////////////////// // // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. // // By downloading, copying, installing or using the software you agree to this license. // If you do not agree to this license, do not download, install, // copy or use the software. // // // License Agreement // For Open Source Computer Vision Library // // Copyright (C) 2015, OpenCV Foundation, all rights reserved. // Third party copyrights are property of their respective owners. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistribution's of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistribution's in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * The name of the copyright holders may not be used to endorse or promote products // derived from this software without specific prior written permission. // // This software is provided by the copyright holders and contributors "as is" and // any express or implied warranties, including, but not limited to, the implied // warranties of merchantability and fitness for a particular purpose are disclaimed. // In no event shall the Intel Corporation or contributors be liable for any direct, // indirect, incidental, special, exemplary, or consequential damages // (including, but not limited to, procurement of substitute goods or services; // loss of use, data, or profits; or business interruption) however caused // and on any theory of liability, whether in contract, strict liability, // or tort (including negligence or otherwise) arising in any way out of // the use of this software, even if advised of the possibility of such damage. // //M*/ // #ifndef __OPENCV_SINUSOIDAL_PATTERN_HPP__ // #define __OPENCV_SINUSOIDAL_PATTERN_HPP__ // #include "opencv2/core.hpp" // #include "opencv2/imgproc.hpp" // #include "opencv2/structured_light/structured_light.hpp" // #include // #include /** \addtogroup structured_light * \{

* Type of sinusoidal pattern profilometry methods. */ /** enum cv::structured_light:: */ public static final int FTP = 0, PSP = 1, FAPS = 2; /** * \brief Class implementing Fourier transform profilometry (FTP) , phase-shifting profilometry (PSP) * and Fourier-assisted phase-shifting profilometry (FAPS) based on \cite faps.

* This class generates sinusoidal patterns that can be used with FTP, PSP and FAPS. */ @Namespace("cv::structured_light") public static class SinusoidalPattern extends StructuredLightPattern { static { Loader.load(); } /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */ public SinusoidalPattern(Pointer p) { super(p); } /** * \brief Parameters of SinusoidalPattern constructor * @param width Projector's width. * @param height Projector's height. * @param nbrOfPeriods Number of period along the patterns direction. * @param shiftValue Phase shift between two consecutive patterns. * @param methodId Allow to choose between FTP, PSP and FAPS. * @param nbrOfPixelsBetweenMarkers Number of pixels between two consecutive markers on the same row. * @param setMarkers Allow to set markers on the patterns. * @param markersLocation vector used to store markers location on the patterns. */ @NoOffset public static class Params extends Pointer { static { Loader.load(); } /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */ public Params(Pointer p) { super(p); } /** Native array allocator. Access with {@link Pointer#position(long)}. */ public Params(long size) { super((Pointer)null); allocateArray(size); } private native void allocateArray(long size); @Override public Params position(long position) { return (Params)super.position(position); } public Params() { super((Pointer)null); allocate(); } private native void allocate(); public native int width(); public native Params width(int width); public native int height(); public native Params height(int height); public native int nbrOfPeriods(); public native Params nbrOfPeriods(int nbrOfPeriods); public native float shiftValue(); public native Params shiftValue(float shiftValue); public native int methodId(); public native Params methodId(int methodId); public native int nbrOfPixelsBetweenMarkers(); public native Params nbrOfPixelsBetweenMarkers(int nbrOfPixelsBetweenMarkers); public native @Cast("bool") boolean horizontal(); public native Params horizontal(boolean horizontal); public native @Cast("bool") boolean setMarkers(); public native Params setMarkers(boolean setMarkers); public native @ByRef Point2fVector markersLocation(); public native Params markersLocation(Point2fVector markersLocation); } /** * \brief Constructor. * @param parameters SinusoidalPattern parameters SinusoidalPattern::Params: width, height of the projector and patterns parameters. * */ public static native @Ptr SinusoidalPattern create( @Ptr Params parameters/*=makePtr()*/ ); public static native @Ptr SinusoidalPattern create( ); /** * \brief Compute a wrapped phase map from sinusoidal patterns. * @param patternImages Input data to compute the wrapped phase map. * @param wrappedPhaseMap Wrapped phase map obtained through one of the three methods. * @param shadowMask Mask used to discard shadow regions. * @param fundamental Fundamental matrix used to compute epipolar lines and ease the matching step. */ public native void computePhaseMap( @ByVal MatVector patternImages, @ByVal Mat wrappedPhaseMap, @ByVal(nullValue = "cv::OutputArray(cv::noArray())") Mat shadowMask, @ByVal(nullValue = "cv::InputArray(cv::noArray())") Mat fundamental); public native void computePhaseMap( @ByVal MatVector patternImages, @ByVal Mat wrappedPhaseMap); public native void computePhaseMap( @ByVal UMatVector patternImages, @ByVal Mat wrappedPhaseMap, @ByVal(nullValue = "cv::OutputArray(cv::noArray())") Mat shadowMask, @ByVal(nullValue = "cv::InputArray(cv::noArray())") Mat fundamental); public native void computePhaseMap( @ByVal UMatVector patternImages, @ByVal Mat wrappedPhaseMap); public native void computePhaseMap( @ByVal GpuMatVector patternImages, @ByVal Mat wrappedPhaseMap, @ByVal(nullValue = "cv::OutputArray(cv::noArray())") Mat shadowMask, @ByVal(nullValue = "cv::InputArray(cv::noArray())") Mat fundamental); public native void computePhaseMap( @ByVal GpuMatVector patternImages, @ByVal Mat wrappedPhaseMap); public native void computePhaseMap( @ByVal MatVector patternImages, @ByVal UMat wrappedPhaseMap, @ByVal(nullValue = "cv::OutputArray(cv::noArray())") UMat shadowMask, @ByVal(nullValue = "cv::InputArray(cv::noArray())") UMat fundamental); public native void computePhaseMap( @ByVal MatVector patternImages, @ByVal UMat wrappedPhaseMap); public native void computePhaseMap( @ByVal UMatVector patternImages, @ByVal UMat wrappedPhaseMap, @ByVal(nullValue = "cv::OutputArray(cv::noArray())") UMat shadowMask, @ByVal(nullValue = "cv::InputArray(cv::noArray())") UMat fundamental); public native void computePhaseMap( @ByVal UMatVector patternImages, @ByVal UMat wrappedPhaseMap); public native void computePhaseMap( @ByVal GpuMatVector patternImages, @ByVal UMat wrappedPhaseMap, @ByVal(nullValue = "cv::OutputArray(cv::noArray())") UMat shadowMask, @ByVal(nullValue = "cv::InputArray(cv::noArray())") UMat fundamental); public native void computePhaseMap( @ByVal GpuMatVector patternImages, @ByVal UMat wrappedPhaseMap); public native void computePhaseMap( @ByVal MatVector patternImages, @ByVal GpuMat wrappedPhaseMap, @ByVal(nullValue = "cv::OutputArray(cv::noArray())") GpuMat shadowMask, @ByVal(nullValue = "cv::InputArray(cv::noArray())") GpuMat fundamental); public native void computePhaseMap( @ByVal MatVector patternImages, @ByVal GpuMat wrappedPhaseMap); public native void computePhaseMap( @ByVal UMatVector patternImages, @ByVal GpuMat wrappedPhaseMap, @ByVal(nullValue = "cv::OutputArray(cv::noArray())") GpuMat shadowMask, @ByVal(nullValue = "cv::InputArray(cv::noArray())") GpuMat fundamental); public native void computePhaseMap( @ByVal UMatVector patternImages, @ByVal GpuMat wrappedPhaseMap); public native void computePhaseMap( @ByVal GpuMatVector patternImages, @ByVal GpuMat wrappedPhaseMap, @ByVal(nullValue = "cv::OutputArray(cv::noArray())") GpuMat shadowMask, @ByVal(nullValue = "cv::InputArray(cv::noArray())") GpuMat fundamental); public native void computePhaseMap( @ByVal GpuMatVector patternImages, @ByVal GpuMat wrappedPhaseMap); /** * \brief Unwrap the wrapped phase map to remove phase ambiguities. * @param wrappedPhaseMap The wrapped phase map computed from the pattern. * @param unwrappedPhaseMap The unwrapped phase map used to find correspondences between the two devices. * @param camSize Resolution of the camera. * @param shadowMask Mask used to discard shadow regions. */ public native void unwrapPhaseMap( @ByVal MatVector wrappedPhaseMap, @ByVal Mat unwrappedPhaseMap, @ByVal Size camSize, @ByVal(nullValue = "cv::InputArray(cv::noArray())") Mat shadowMask ); public native void unwrapPhaseMap( @ByVal MatVector wrappedPhaseMap, @ByVal Mat unwrappedPhaseMap, @ByVal Size camSize ); public native void unwrapPhaseMap( @ByVal UMatVector wrappedPhaseMap, @ByVal Mat unwrappedPhaseMap, @ByVal Size camSize, @ByVal(nullValue = "cv::InputArray(cv::noArray())") Mat shadowMask ); public native void unwrapPhaseMap( @ByVal UMatVector wrappedPhaseMap, @ByVal Mat unwrappedPhaseMap, @ByVal Size camSize ); public native void unwrapPhaseMap( @ByVal GpuMatVector wrappedPhaseMap, @ByVal Mat unwrappedPhaseMap, @ByVal Size camSize, @ByVal(nullValue = "cv::InputArray(cv::noArray())") Mat shadowMask ); public native void unwrapPhaseMap( @ByVal GpuMatVector wrappedPhaseMap, @ByVal Mat unwrappedPhaseMap, @ByVal Size camSize ); public native void unwrapPhaseMap( @ByVal MatVector wrappedPhaseMap, @ByVal UMat unwrappedPhaseMap, @ByVal Size camSize, @ByVal(nullValue = "cv::InputArray(cv::noArray())") UMat shadowMask ); public native void unwrapPhaseMap( @ByVal MatVector wrappedPhaseMap, @ByVal UMat unwrappedPhaseMap, @ByVal Size camSize ); public native void unwrapPhaseMap( @ByVal UMatVector wrappedPhaseMap, @ByVal UMat unwrappedPhaseMap, @ByVal Size camSize, @ByVal(nullValue = "cv::InputArray(cv::noArray())") UMat shadowMask ); public native void unwrapPhaseMap( @ByVal UMatVector wrappedPhaseMap, @ByVal UMat unwrappedPhaseMap, @ByVal Size camSize ); public native void unwrapPhaseMap( @ByVal GpuMatVector wrappedPhaseMap, @ByVal UMat unwrappedPhaseMap, @ByVal Size camSize, @ByVal(nullValue = "cv::InputArray(cv::noArray())") UMat shadowMask ); public native void unwrapPhaseMap( @ByVal GpuMatVector wrappedPhaseMap, @ByVal UMat unwrappedPhaseMap, @ByVal Size camSize ); public native void unwrapPhaseMap( @ByVal MatVector wrappedPhaseMap, @ByVal GpuMat unwrappedPhaseMap, @ByVal Size camSize, @ByVal(nullValue = "cv::InputArray(cv::noArray())") GpuMat shadowMask ); public native void unwrapPhaseMap( @ByVal MatVector wrappedPhaseMap, @ByVal GpuMat unwrappedPhaseMap, @ByVal Size camSize ); public native void unwrapPhaseMap( @ByVal UMatVector wrappedPhaseMap, @ByVal GpuMat unwrappedPhaseMap, @ByVal Size camSize, @ByVal(nullValue = "cv::InputArray(cv::noArray())") GpuMat shadowMask ); public native void unwrapPhaseMap( @ByVal UMatVector wrappedPhaseMap, @ByVal GpuMat unwrappedPhaseMap, @ByVal Size camSize ); public native void unwrapPhaseMap( @ByVal GpuMatVector wrappedPhaseMap, @ByVal GpuMat unwrappedPhaseMap, @ByVal Size camSize, @ByVal(nullValue = "cv::InputArray(cv::noArray())") GpuMat shadowMask ); public native void unwrapPhaseMap( @ByVal GpuMatVector wrappedPhaseMap, @ByVal GpuMat unwrappedPhaseMap, @ByVal Size camSize ); /** * \brief Find correspondences between the two devices thanks to unwrapped phase maps. * @param projUnwrappedPhaseMap Projector's unwrapped phase map. * @param camUnwrappedPhaseMap Camera's unwrapped phase map. * @param matches Images used to display correspondences map. */ public native void findProCamMatches( @ByVal Mat projUnwrappedPhaseMap, @ByVal Mat camUnwrappedPhaseMap, @ByVal MatVector matches ); public native void findProCamMatches( @ByVal Mat projUnwrappedPhaseMap, @ByVal Mat camUnwrappedPhaseMap, @ByVal UMatVector matches ); public native void findProCamMatches( @ByVal Mat projUnwrappedPhaseMap, @ByVal Mat camUnwrappedPhaseMap, @ByVal GpuMatVector matches ); public native void findProCamMatches( @ByVal UMat projUnwrappedPhaseMap, @ByVal UMat camUnwrappedPhaseMap, @ByVal MatVector matches ); public native void findProCamMatches( @ByVal UMat projUnwrappedPhaseMap, @ByVal UMat camUnwrappedPhaseMap, @ByVal UMatVector matches ); public native void findProCamMatches( @ByVal UMat projUnwrappedPhaseMap, @ByVal UMat camUnwrappedPhaseMap, @ByVal GpuMatVector matches ); public native void findProCamMatches( @ByVal GpuMat projUnwrappedPhaseMap, @ByVal GpuMat camUnwrappedPhaseMap, @ByVal MatVector matches ); public native void findProCamMatches( @ByVal GpuMat projUnwrappedPhaseMap, @ByVal GpuMat camUnwrappedPhaseMap, @ByVal UMatVector matches ); public native void findProCamMatches( @ByVal GpuMat projUnwrappedPhaseMap, @ByVal GpuMat camUnwrappedPhaseMap, @ByVal GpuMatVector matches ); /** * \brief compute the data modulation term. * @param patternImages captured images with projected patterns. * @param dataModulationTerm Mat where the data modulation term is saved. * @param shadowMask Mask used to discard shadow regions. */ public native void computeDataModulationTerm( @ByVal MatVector patternImages, @ByVal Mat dataModulationTerm, @ByVal Mat shadowMask ); public native void computeDataModulationTerm( @ByVal UMatVector patternImages, @ByVal Mat dataModulationTerm, @ByVal Mat shadowMask ); public native void computeDataModulationTerm( @ByVal GpuMatVector patternImages, @ByVal Mat dataModulationTerm, @ByVal Mat shadowMask ); public native void computeDataModulationTerm( @ByVal MatVector patternImages, @ByVal UMat dataModulationTerm, @ByVal UMat shadowMask ); public native void computeDataModulationTerm( @ByVal UMatVector patternImages, @ByVal UMat dataModulationTerm, @ByVal UMat shadowMask ); public native void computeDataModulationTerm( @ByVal GpuMatVector patternImages, @ByVal UMat dataModulationTerm, @ByVal UMat shadowMask ); public native void computeDataModulationTerm( @ByVal MatVector patternImages, @ByVal GpuMat dataModulationTerm, @ByVal GpuMat shadowMask ); public native void computeDataModulationTerm( @ByVal UMatVector patternImages, @ByVal GpuMat dataModulationTerm, @ByVal GpuMat shadowMask ); public native void computeDataModulationTerm( @ByVal GpuMatVector patternImages, @ByVal GpuMat dataModulationTerm, @ByVal GpuMat shadowMask ); } /** \} */ // #endif }





© 2015 - 2024 Weber Informatics LLC | Privacy Policy