org.bytedeco.javacpp.opencv_tracking Maven / Gradle / Ivy
// Targeted by JavaCPP version 1.4.1: DO NOT EDIT THIS FILE
package org.bytedeco.javacpp;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import static org.bytedeco.javacpp.opencv_core.*;
import static org.bytedeco.javacpp.opencv_imgproc.*;
import static org.bytedeco.javacpp.opencv_plot.*;
import static org.bytedeco.javacpp.opencv_video.*;
import static org.bytedeco.javacpp.opencv_dnn.*;
public class opencv_tracking extends org.bytedeco.javacpp.presets.opencv_tracking {
static { Loader.load(); }
@Name("std::vector >") public static class TrackerVector extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerVector(Pointer p) { super(p); }
public TrackerVector(Tracker value) { this(1); put(0, value); }
public TrackerVector(Tracker ... array) { this(array.length); put(array); }
public TrackerVector() { allocate(); }
public TrackerVector(long n) { allocate(n); }
private native void allocate();
private native void allocate(@Cast("size_t") long n);
public native @Name("operator=") @ByRef TrackerVector put(@ByRef TrackerVector x);
public boolean empty() { return size() == 0; }
public native long size();
public void clear() { resize(0); }
public native void resize(@Cast("size_t") long n);
@Index(function = "at") public native @Ptr Tracker get(@Cast("size_t") long i);
public native TrackerVector put(@Cast("size_t") long i, Tracker value);
public native @ByVal Iterator begin();
public native @ByVal Iterator end();
@NoOffset @Name("iterator") public static class Iterator extends Pointer {
public Iterator(Pointer p) { super(p); }
public Iterator() { }
public native @Name("operator++") @ByRef Iterator increment();
public native @Name("operator==") boolean equals(@ByRef Iterator it);
public native @Name("operator*") @Ptr @Const Tracker get();
}
public Tracker[] get() {
Tracker[] array = new Tracker[size() < Integer.MAX_VALUE ? (int)size() : Integer.MAX_VALUE];
for (int i = 0; i < array.length; i++) {
array[i] = get(i);
}
return array;
}
@Override public String toString() {
return java.util.Arrays.toString(get());
}
public Tracker pop_back() {
long size = size();
Tracker value = get(size - 1);
resize(size - 1);
return value;
}
public TrackerVector push_back(Tracker value) {
long size = size();
resize(size + 1);
return put(size, value);
}
public TrackerVector put(Tracker value) {
if (size() != 1) { resize(1); }
return put(0, value);
}
public TrackerVector put(Tracker ... array) {
if (size() != array.length) { resize(array.length); }
for (int i = 0; i < array.length; i++) {
put(i, array[i]);
}
return this;
}
}
@Name("std::vector") public static class ConfidenceMapVector extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public ConfidenceMapVector(Pointer p) { super(p); }
public ConfidenceMapVector(ConfidenceMap value) { this(1); put(0, value); }
public ConfidenceMapVector(ConfidenceMap ... array) { this(array.length); put(array); }
public ConfidenceMapVector() { allocate(); }
public ConfidenceMapVector(long n) { allocate(n); }
private native void allocate();
private native void allocate(@Cast("size_t") long n);
public native @Name("operator=") @ByRef ConfidenceMapVector put(@ByRef ConfidenceMapVector x);
public boolean empty() { return size() == 0; }
public native long size();
public void clear() { resize(0); }
public native void resize(@Cast("size_t") long n);
@Index(function = "at") public native @ByRef ConfidenceMap get(@Cast("size_t") long i);
public native ConfidenceMapVector put(@Cast("size_t") long i, ConfidenceMap value);
public native @ByVal Iterator begin();
public native @ByVal Iterator end();
@NoOffset @Name("iterator") public static class Iterator extends Pointer {
public Iterator(Pointer p) { super(p); }
public Iterator() { }
public native @Name("operator++") @ByRef Iterator increment();
public native @Name("operator==") boolean equals(@ByRef Iterator it);
public native @Name("operator*") @ByRef @Const ConfidenceMap get();
}
public ConfidenceMap[] get() {
ConfidenceMap[] array = new ConfidenceMap[size() < Integer.MAX_VALUE ? (int)size() : Integer.MAX_VALUE];
for (int i = 0; i < array.length; i++) {
array[i] = get(i);
}
return array;
}
@Override public String toString() {
return java.util.Arrays.toString(get());
}
public ConfidenceMap pop_back() {
long size = size();
ConfidenceMap value = get(size - 1);
resize(size - 1);
return value;
}
public ConfidenceMapVector push_back(ConfidenceMap value) {
long size = size();
resize(size + 1);
return put(size, value);
}
public ConfidenceMapVector put(ConfidenceMap value) {
if (size() != 1) { resize(1); }
return put(0, value);
}
public ConfidenceMapVector put(ConfidenceMap ... array) {
if (size() != array.length) { resize(array.length); }
for (int i = 0; i < array.length; i++) {
put(i, array[i]);
}
return this;
}
}
@Name("std::vector,float> >") public static class ConfidenceMap extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public ConfidenceMap(Pointer p) { super(p); }
public ConfidenceMap(TrackerTargetState[] firstValue, float[] secondValue) { this(Math.min(firstValue.length, secondValue.length)); put(firstValue, secondValue); }
public ConfidenceMap() { allocate(); }
public ConfidenceMap(long n) { allocate(n); }
private native void allocate();
private native void allocate(@Cast("size_t") long n);
public native @Name("operator=") @ByRef ConfidenceMap put(@ByRef ConfidenceMap x);
public boolean empty() { return size() == 0; }
public native long size();
public void clear() { resize(0); }
public native void resize(@Cast("size_t") long n);
@Index(function = "at") public native @Ptr TrackerTargetState first(@Cast("size_t") long i); public native ConfidenceMap first(@Cast("size_t") long i, TrackerTargetState first);
@Index(function = "at") public native float second(@Cast("size_t") long i); public native ConfidenceMap second(@Cast("size_t") long i, float second);
public ConfidenceMap put(TrackerTargetState[] firstValue, float[] secondValue) {
for (int i = 0; i < firstValue.length && i < secondValue.length; i++) {
first(i, firstValue[i]);
second(i, secondValue[i]);
}
return this;
}
}
@Name("std::vector > >") public static class StringTrackerFeaturePairVector extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public StringTrackerFeaturePairVector(Pointer p) { super(p); }
public StringTrackerFeaturePairVector(BytePointer[] firstValue, TrackerFeature[] secondValue) { this(Math.min(firstValue.length, secondValue.length)); put(firstValue, secondValue); }
public StringTrackerFeaturePairVector(String[] firstValue, TrackerFeature[] secondValue) { this(Math.min(firstValue.length, secondValue.length)); put(firstValue, secondValue); }
public StringTrackerFeaturePairVector() { allocate(); }
public StringTrackerFeaturePairVector(long n) { allocate(n); }
private native void allocate();
private native void allocate(@Cast("size_t") long n);
public native @Name("operator=") @ByRef StringTrackerFeaturePairVector put(@ByRef StringTrackerFeaturePairVector x);
public boolean empty() { return size() == 0; }
public native long size();
public void clear() { resize(0); }
public native void resize(@Cast("size_t") long n);
@Index(function = "at") public native @Str BytePointer first(@Cast("size_t") long i); public native StringTrackerFeaturePairVector first(@Cast("size_t") long i, BytePointer first);
@Index(function = "at") public native @Ptr TrackerFeature second(@Cast("size_t") long i); public native StringTrackerFeaturePairVector second(@Cast("size_t") long i, TrackerFeature second);
@MemberSetter @Index(function = "at") public native StringTrackerFeaturePairVector first(@Cast("size_t") long i, @Str String first);
public StringTrackerFeaturePairVector put(BytePointer[] firstValue, TrackerFeature[] secondValue) {
for (int i = 0; i < firstValue.length && i < secondValue.length; i++) {
first(i, firstValue[i]);
second(i, secondValue[i]);
}
return this;
}
public StringTrackerFeaturePairVector put(String[] firstValue, TrackerFeature[] secondValue) {
for (int i = 0; i < firstValue.length && i < secondValue.length; i++) {
first(i, firstValue[i]);
second(i, secondValue[i]);
}
return this;
}
}
@Name("std::vector > >") public static class StringTrackerSamplerAlgorithmPairVector extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public StringTrackerSamplerAlgorithmPairVector(Pointer p) { super(p); }
public StringTrackerSamplerAlgorithmPairVector(BytePointer[] firstValue, TrackerSamplerAlgorithm[] secondValue) { this(Math.min(firstValue.length, secondValue.length)); put(firstValue, secondValue); }
public StringTrackerSamplerAlgorithmPairVector(String[] firstValue, TrackerSamplerAlgorithm[] secondValue) { this(Math.min(firstValue.length, secondValue.length)); put(firstValue, secondValue); }
public StringTrackerSamplerAlgorithmPairVector() { allocate(); }
public StringTrackerSamplerAlgorithmPairVector(long n) { allocate(n); }
private native void allocate();
private native void allocate(@Cast("size_t") long n);
public native @Name("operator=") @ByRef StringTrackerSamplerAlgorithmPairVector put(@ByRef StringTrackerSamplerAlgorithmPairVector x);
public boolean empty() { return size() == 0; }
public native long size();
public void clear() { resize(0); }
public native void resize(@Cast("size_t") long n);
@Index(function = "at") public native @Str BytePointer first(@Cast("size_t") long i); public native StringTrackerSamplerAlgorithmPairVector first(@Cast("size_t") long i, BytePointer first);
@Index(function = "at") public native @Ptr TrackerSamplerAlgorithm second(@Cast("size_t") long i); public native StringTrackerSamplerAlgorithmPairVector second(@Cast("size_t") long i, TrackerSamplerAlgorithm second);
@MemberSetter @Index(function = "at") public native StringTrackerSamplerAlgorithmPairVector first(@Cast("size_t") long i, @Str String first);
public StringTrackerSamplerAlgorithmPairVector put(BytePointer[] firstValue, TrackerSamplerAlgorithm[] secondValue) {
for (int i = 0; i < firstValue.length && i < secondValue.length; i++) {
first(i, firstValue[i]);
second(i, secondValue[i]);
}
return this;
}
public StringTrackerSamplerAlgorithmPairVector put(String[] firstValue, TrackerSamplerAlgorithm[] secondValue) {
for (int i = 0; i < firstValue.length && i < secondValue.length; i++) {
first(i, firstValue[i]);
second(i, secondValue[i]);
}
return this;
}
}
@Name("std::vector >") public static class Trajectory extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Trajectory(Pointer p) { super(p); }
public Trajectory(TrackerTargetState value) { this(1); put(0, value); }
public Trajectory(TrackerTargetState ... array) { this(array.length); put(array); }
public Trajectory() { allocate(); }
public Trajectory(long n) { allocate(n); }
private native void allocate();
private native void allocate(@Cast("size_t") long n);
public native @Name("operator=") @ByRef Trajectory put(@ByRef Trajectory x);
public boolean empty() { return size() == 0; }
public native long size();
public void clear() { resize(0); }
public native void resize(@Cast("size_t") long n);
@Index(function = "at") public native @Ptr TrackerTargetState get(@Cast("size_t") long i);
public native Trajectory put(@Cast("size_t") long i, TrackerTargetState value);
public native @ByVal Iterator begin();
public native @ByVal Iterator end();
@NoOffset @Name("iterator") public static class Iterator extends Pointer {
public Iterator(Pointer p) { super(p); }
public Iterator() { }
public native @Name("operator++") @ByRef Iterator increment();
public native @Name("operator==") boolean equals(@ByRef Iterator it);
public native @Name("operator*") @Ptr @Const TrackerTargetState get();
}
public TrackerTargetState[] get() {
TrackerTargetState[] array = new TrackerTargetState[size() < Integer.MAX_VALUE ? (int)size() : Integer.MAX_VALUE];
for (int i = 0; i < array.length; i++) {
array[i] = get(i);
}
return array;
}
@Override public String toString() {
return java.util.Arrays.toString(get());
}
public TrackerTargetState pop_back() {
long size = size();
TrackerTargetState value = get(size - 1);
resize(size - 1);
return value;
}
public Trajectory push_back(TrackerTargetState value) {
long size = size();
resize(size + 1);
return put(size, value);
}
public Trajectory put(TrackerTargetState value) {
if (size() != 1) { resize(1); }
return put(0, value);
}
public Trajectory put(TrackerTargetState ... array) {
if (size() != array.length) { resize(array.length); }
for (int i = 0; i < array.length; i++) {
put(i, array[i]);
}
return this;
}
}
// Parsed from
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
// #ifndef __OPENCV_TRACKING_LENLEN_HPP__
// #define __OPENCV_TRACKING_LENLEN_HPP__
// #include "opencv2/core/cvdef.h"
/** \defgroup tracking Tracking API
Long-term optical tracking API
------------------------------
Long-term optical tracking is one of most important issue for many computer vision applications in
real world scenario. The development in this area is very fragmented and this API is an unique
interface useful for plug several algorithms and compare them. This work is partially based on
\cite AAM and \cite AMVOT .
This algorithms start from a bounding box of the target and with their internal representation they
avoid the drift during the tracking. These long-term trackers are able to evaluate online the
quality of the location of the target in the new frame, without ground truth.
There are three main components: the TrackerSampler, the TrackerFeatureSet and the TrackerModel. The
first component is the object that computes the patches over the frame based on the last target
location. The TrackerFeatureSet is the class that manages the Features, is possible plug many kind
of these (HAAR, HOG, LBP, Feature2D, etc). The last component is the internal representation of the
target, it is the appearence model. It stores all state candidates and compute the trajectory (the
most likely target states). The class TrackerTargetState represents a possible state of the target.
The TrackerSampler and the TrackerFeatureSet are the visual representation of the target, instead
the TrackerModel is the statistical model.
A recent benchmark between these algorithms can be found in \cite OOT
To see how API works, try tracker demo:
Creating Own Tracker
--------------------
If you want create a new tracker, here's what you have to do. First, decide on the name of the class
for the tracker (to meet the existing style, we suggest something with prefix "tracker", e.g.
trackerMIL, trackerBoosting) -- we shall refer to this choice as to "classname" in subsequent. Also,
you should decide upon the name of the tracker, is it will be known to user (the current style
suggests using all capitals, say MIL or BOOSTING) --we'll call it a "name".
- Declare your tracker in include/opencv2/tracking/tracker.hpp. Your tracker should inherit from
Tracker (please, see the example below). You should declare the specialized Param structure,
where you probably will want to put the data, needed to initialize your tracker. Also don't
forget to put the BOILERPLATE_CODE(name,classname) macro inside the class declaration. That
macro will generate static createTracker() function, which we'll talk about later. You should
get something similar to :
{@code
class CV_EXPORTS_W TrackerMIL : public Tracker
{
public:
struct CV_EXPORTS Params
{
Params();
//parameters for sampler
float samplerInitInRadius; // radius for gathering positive instances during init
int samplerInitMaxNegNum; // # negative samples to use during init
float samplerSearchWinSize; // size of search window
float samplerTrackInRadius; // radius for gathering positive instances during tracking
int samplerTrackMaxPosNum; // # positive samples to use during tracking
int samplerTrackMaxNegNum; // # negative samples to use during tracking
int featureSetNumFeatures; // #features
void read( const FileNode& fn );
void write( FileStorage& fs ) const;
};
}
of course, you can also add any additional methods of your choice. It should be pointed out,
however, that it is not expected to have a constructor declared, as creation should be done via
the corresponding createTracker() method.
- In src/tracker.cpp file add BOILERPLATE_CODE(name,classname) line to the body of
Tracker::create() method you will find there, like :
{@code
Ptr Tracker::create( const String& trackerType )
{
BOILERPLATE_CODE("BOOSTING",TrackerBoosting);
BOILERPLATE_CODE("MIL",TrackerMIL);
return Ptr();
}
}
- Finally, you should implement the function with signature :
{@code
Ptr classname::createTracker(const classname::Params ¶meters){
...
}
}
That function can (and probably will) return a pointer to some derived class of "classname",
which will probably have a real constructor.
Every tracker has three component TrackerSampler, TrackerFeatureSet and TrackerModel. The first two
are instantiated from Tracker base class, instead the last component is abstract, so you must
implement your TrackerModel.
### TrackerSampler
TrackerSampler is already instantiated, but you should define the sampling algorithm and add the
classes (or single class) to TrackerSampler. You can choose one of the ready implementation as
TrackerSamplerCSC or you can implement your sampling method, in this case the class must inherit
TrackerSamplerAlgorithm. Fill the samplingImpl method that writes the result in "sample" output
argument.
Example of creating specialized TrackerSamplerAlgorithm TrackerSamplerCSC : :
{@code
class CV_EXPORTS_W TrackerSamplerCSC : public TrackerSamplerAlgorithm
{
public:
TrackerSamplerCSC( const TrackerSamplerCSC::Params ¶meters = TrackerSamplerCSC::Params() );
~TrackerSamplerCSC();
...
protected:
bool samplingImpl( const Mat& image, Rect boundingBox, std::vector& sample );
...
};
}
Example of adding TrackerSamplerAlgorithm to TrackerSampler : :
{@code
//sampler is the TrackerSampler
Ptr CSCSampler = new TrackerSamplerCSC( CSCparameters );
if( !sampler->addTrackerSamplerAlgorithm( CSCSampler ) )
return false;
//or add CSC sampler with default parameters
//sampler->addTrackerSamplerAlgorithm( "CSC" );
}
\sa
TrackerSamplerCSC, TrackerSamplerAlgorithm
### TrackerFeatureSet
TrackerFeatureSet is already instantiated (as first) , but you should define what kinds of features
you'll use in your tracker. You can use multiple feature types, so you can add a ready
implementation as TrackerFeatureHAAR in your TrackerFeatureSet or develop your own implementation.
In this case, in the computeImpl method put the code that extract the features and in the selection
method optionally put the code for the refinement and selection of the features.
Example of creating specialized TrackerFeature TrackerFeatureHAAR : :
{@code
class CV_EXPORTS_W TrackerFeatureHAAR : public TrackerFeature
{
public:
TrackerFeatureHAAR( const TrackerFeatureHAAR::Params ¶meters = TrackerFeatureHAAR::Params() );
~TrackerFeatureHAAR();
void selection( Mat& response, int npoints );
...
protected:
bool computeImpl( const std::vector& images, Mat& response );
...
};
}
Example of adding TrackerFeature to TrackerFeatureSet : :
{@code
//featureSet is the TrackerFeatureSet
Ptr trackerFeature = new TrackerFeatureHAAR( HAARparameters );
featureSet->addTrackerFeature( trackerFeature );
}
\sa
TrackerFeatureHAAR, TrackerFeatureSet
### TrackerModel
TrackerModel is abstract, so in your implementation you must develop your TrackerModel that inherit
from TrackerModel. Fill the method for the estimation of the state "modelEstimationImpl", that
estimates the most likely target location, see \cite AAM table I (ME) for further information. Fill
"modelUpdateImpl" in order to update the model, see \cite AAM table I (MU). In this class you can use
the :cConfidenceMap and :cTrajectory to storing the model. The first represents the model on the all
possible candidate states and the second represents the list of all estimated states.
Example of creating specialized TrackerModel TrackerMILModel : :
{@code
class TrackerMILModel : public TrackerModel
{
public:
TrackerMILModel( const Rect& boundingBox );
~TrackerMILModel();
...
protected:
void modelEstimationImpl( const std::vector& responses );
void modelUpdateImpl();
...
};
}
And add it in your Tracker : :
{@code
bool TrackerMIL::initImpl( const Mat& image, const Rect2d& boundingBox )
{
...
//model is the general TrackerModel field of the general Tracker
model = new TrackerMILModel( boundingBox );
...
}
}
In the last step you should define the TrackerStateEstimator based on your implementation or you can
use one of ready class as TrackerStateEstimatorMILBoosting. It represent the statistical part of the
model that estimates the most likely target state.
Example of creating specialized TrackerStateEstimator TrackerStateEstimatorMILBoosting : :
{@code
class CV_EXPORTS_W TrackerStateEstimatorMILBoosting : public TrackerStateEstimator
{
class TrackerMILTargetState : public TrackerTargetState
{
...
};
public:
TrackerStateEstimatorMILBoosting( int nFeatures = 250 );
~TrackerStateEstimatorMILBoosting();
...
protected:
Ptr estimateImpl( const std::vector& confidenceMaps );
void updateImpl( std::vector& confidenceMaps );
...
};
}
And add it in your TrackerModel : :
{@code
//model is the TrackerModel of your Tracker
Ptr stateEstimator = new TrackerStateEstimatorMILBoosting( params.featureSetNumFeatures );
model->setTrackerStateEstimator( stateEstimator );
}
\sa
TrackerModel, TrackerStateEstimatorMILBoosting, TrackerTargetState
During this step, you should define your TrackerTargetState based on your implementation.
TrackerTargetState base class has only the bounding box (upper-left position, width and height), you
can enrich it adding scale factor, target rotation, etc.
Example of creating specialized TrackerTargetState TrackerMILTargetState : :
{@code
class TrackerMILTargetState : public TrackerTargetState
{
public:
TrackerMILTargetState( const Point2f& position, int targetWidth, int targetHeight, bool foreground, const Mat& features );
~TrackerMILTargetState();
...
private:
bool isTarget;
Mat targetFeatures;
...
};
}
### Try it
To try your tracker you can use the demo at
.
The first argument is the name of the tracker and the second is a video source.
*/
// #include
// #include
// #endif //__OPENCV_TRACKING_LENLEN
// Parsed from
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
// #ifndef __OPENCV_FEATURE_HPP__
// #define __OPENCV_FEATURE_HPP__
// #include "opencv2/core.hpp"
// #include "opencv2/imgproc.hpp"
// #include
// #include
// #include
/*
* TODO This implementation is based on apps/traincascade/
* TODO Changed CvHaarEvaluator based on ADABOOSTING implementation (Grabner et al.)
*/
/** \addtogroup tracking
* \{ */
public static final String FEATURES = "features";
public static final String CC_FEATURES = FEATURES;
public static final String CC_FEATURE_PARAMS = "featureParams";
public static final String CC_MAX_CAT_COUNT = "maxCatCount";
public static final String CC_FEATURE_SIZE = "featSize";
public static final String CC_NUM_FEATURES = "numFeat";
public static final String CC_ISINTEGRAL = "isIntegral";
public static final String CC_RECTS = "rects";
public static final String CC_TILTED = "tilted";
public static final String CC_RECT = "rect";
public static final String LBPF_NAME = "lbpFeatureParams";
public static final String HOGF_NAME = "HOGFeatureParams";
public static final String HFP_NAME = "haarFeatureParams";
public static final int CV_HAAR_FEATURE_MAX = 3;
public static final int N_BINS = 9;
public static final int N_CELLS = 4;
// #define CV_SUM_OFFSETS( p0, p1, p2, p3, rect, step )
// /* (x, y) */
// (p0) = (rect).x + (step) * (rect).y;
// /* (x + w, y) */
// (p1) = (rect).x + (rect).width + (step) * (rect).y;
// /* (x + w, y) */
// (p2) = (rect).x + (step) * ((rect).y + (rect).height);
// /* (x + w, y + h) */
// (p3) = (rect).x + (rect).width + (step) * ((rect).y + (rect).height);
// #define CV_TILTED_OFFSETS( p0, p1, p2, p3, rect, step )
// /* (x, y) */
// (p0) = (rect).x + (step) * (rect).y;
// /* (x - h, y + h) */
// (p1) = (rect).x - (rect).height + (step) * ((rect).y + (rect).height);
// /* (x + w, y + w) */
// (p2) = (rect).x + (rect).width + (step) * ((rect).y + (rect).width);
// /* (x + w - h, y + w + h) */
// (p3) = (rect).x + (rect).width - (rect).height
// + (step) * ((rect).y + (rect).width + (rect).height);
@Namespace("cv") @NoOffset public static class CvParams extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CvParams(Pointer p) { super(p); }
// from|to file
public native void write( @ByRef FileStorage fs );
public native @Cast("bool") boolean read( @Const @ByRef FileNode node );
// from|to screen
public native void printDefaults();
public native void printAttrs();
public native @Cast("bool") boolean scanAttr( @StdString BytePointer prmName, @StdString BytePointer val );
public native @Cast("bool") boolean scanAttr( @StdString String prmName, @StdString String val );
public native @StdString BytePointer name(); public native CvParams name(BytePointer name);
}
@Namespace("cv") @NoOffset public static class CvFeatureParams extends CvParams {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CvFeatureParams(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public CvFeatureParams(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public CvFeatureParams position(long position) {
return (CvFeatureParams)super.position(position);
}
/** enum cv::CvFeatureParams:: */
public static final int
HAAR = 0,
LBP = 1,
HOG = 2;
public CvFeatureParams() { super((Pointer)null); allocate(); }
private native void allocate();
public native void init( @Const @ByRef CvFeatureParams fp );
public native void write( @ByRef FileStorage fs );
public native @Cast("bool") boolean read( @Const @ByRef FileNode node );
public static native @Ptr CvFeatureParams create( int featureType );
public native int maxCatCount(); public native CvFeatureParams maxCatCount(int maxCatCount); // 0 in case of numerical features
public native int featSize(); public native CvFeatureParams featSize(int featSize); // 1 in case of simple features (HAAR, LBP) and N_BINS(9)*N_CELLS(4) in case of Dalal's HOG features
public native int numFeatures(); public native CvFeatureParams numFeatures(int numFeatures);
}
@Namespace("cv") public static class CvFeatureEvaluator extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CvFeatureEvaluator(Pointer p) { super(p); }
public native void init( @Const CvFeatureParams _featureParams, int _maxSampleCount, @ByVal Size _winSize );
public native void setImage( @Const @ByRef Mat img, @Cast("uchar") byte clsLabel, int idx );
public native void writeFeatures( @ByRef FileStorage fs, @Const @ByRef Mat featureMap );
public native @Name("operator ()") float apply( int featureIdx, int sampleIdx );
public static native @Ptr CvFeatureEvaluator create( int type );
public native int getNumFeatures();
public native int getMaxCatCount();
public native int getFeatureSize();
public native @Const @ByRef Mat getCls();
public native float getCls( int si );
}
@Namespace("cv") @NoOffset public static class CvHaarFeatureParams extends CvFeatureParams {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CvHaarFeatureParams(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public CvHaarFeatureParams(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public CvHaarFeatureParams position(long position) {
return (CvHaarFeatureParams)super.position(position);
}
public CvHaarFeatureParams() { super((Pointer)null); allocate(); }
private native void allocate();
public native void init( @Const @ByRef CvFeatureParams fp );
public native void write( @ByRef FileStorage fs );
public native @Cast("bool") boolean read( @Const @ByRef FileNode node );
public native void printDefaults();
public native void printAttrs();
public native @Cast("bool") boolean scanAttr( @StdString BytePointer prm, @StdString BytePointer val );
public native @Cast("bool") boolean scanAttr( @StdString String prm, @StdString String val );
public native @Cast("bool") boolean isIntegral(); public native CvHaarFeatureParams isIntegral(boolean isIntegral);
}
@Namespace("cv") @NoOffset public static class CvHaarEvaluator extends CvFeatureEvaluator {
static { Loader.load(); }
/** Default native constructor. */
public CvHaarEvaluator() { super((Pointer)null); allocate(); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public CvHaarEvaluator(long size) { super((Pointer)null); allocateArray(size); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CvHaarEvaluator(Pointer p) { super(p); }
private native void allocate();
private native void allocateArray(long size);
@Override public CvHaarEvaluator position(long position) {
return (CvHaarEvaluator)super.position(position);
}
@NoOffset public static class FeatureHaar extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public FeatureHaar(Pointer p) { super(p); }
public FeatureHaar( @ByVal Size patchSize ) { super((Pointer)null); allocate(patchSize); }
private native void allocate( @ByVal Size patchSize );
public native @Cast("bool") boolean eval( @Const @ByRef Mat image, @ByVal Rect ROI, FloatPointer result );
public native @Cast("bool") boolean eval( @Const @ByRef Mat image, @ByVal Rect ROI, FloatBuffer result );
public native @Cast("bool") boolean eval( @Const @ByRef Mat image, @ByVal Rect ROI, float[] result );
public native int getNumAreas();
public native @StdVector FloatPointer getWeights();
public native @Const @ByRef RectVector getAreas();
public native void write( @ByVal FileStorage arg0 );
public native float getInitMean();
public native float getInitSigma();
}
public native void init( @Const CvFeatureParams _featureParams, int _maxSampleCount, @ByVal Size _winSize );
public native void setImage( @Const @ByRef Mat img, @Cast("uchar") byte clsLabel/*=0*/, int idx/*=1*/ );
public native void setImage( @Const @ByRef Mat img );
public native @Name("operator ()") float apply( int featureIdx, int sampleIdx );
public native void writeFeatures( @ByRef FileStorage fs, @Const @ByRef Mat featureMap );
public native void writeFeature( @ByRef FileStorage fs ); // for old file format
public native @StdVector FeatureHaar getFeatures();
public native @ByRef FeatureHaar getFeatures( int idx );
public native @Function void setWinSize( @ByVal Size patchSize );
public native @ByVal @Function Size setWinSize();
public native void generateFeatures();
/**
* TODO new method
* \brief Overload the original generateFeatures in order to limit the number of the features
* @param numFeatures Number of the features
*/
public native void generateFeatures( int numFeatures );
}
@Namespace("cv") public static class CvHOGFeatureParams extends CvFeatureParams {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CvHOGFeatureParams(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public CvHOGFeatureParams(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public CvHOGFeatureParams position(long position) {
return (CvHOGFeatureParams)super.position(position);
}
public CvHOGFeatureParams() { super((Pointer)null); allocate(); }
private native void allocate();
}
@Namespace("cv") @NoOffset public static class CvHOGEvaluator extends CvFeatureEvaluator {
static { Loader.load(); }
/** Default native constructor. */
public CvHOGEvaluator() { super((Pointer)null); allocate(); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public CvHOGEvaluator(long size) { super((Pointer)null); allocateArray(size); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CvHOGEvaluator(Pointer p) { super(p); }
private native void allocate();
private native void allocateArray(long size);
@Override public CvHOGEvaluator position(long position) {
return (CvHOGEvaluator)super.position(position);
}
public native void init( @Const CvFeatureParams _featureParams, int _maxSampleCount, @ByVal Size _winSize );
public native void setImage( @Const @ByRef Mat img, @Cast("uchar") byte clsLabel, int idx );
public native @Name("operator ()") float apply( int varIdx, int sampleIdx );
public native void writeFeatures( @ByRef FileStorage fs, @Const @ByRef Mat featureMap );
}
@Namespace("cv") public static class CvLBPFeatureParams extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CvLBPFeatureParams(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public CvLBPFeatureParams(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public CvLBPFeatureParams position(long position) {
return (CvLBPFeatureParams)super.position(position);
}
public CvLBPFeatureParams() { super((Pointer)null); allocate(); }
private native void allocate();
}
@Namespace("cv") @NoOffset public static class CvLBPEvaluator extends CvFeatureEvaluator {
static { Loader.load(); }
/** Default native constructor. */
public CvLBPEvaluator() { super((Pointer)null); allocate(); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public CvLBPEvaluator(long size) { super((Pointer)null); allocateArray(size); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public CvLBPEvaluator(Pointer p) { super(p); }
private native void allocate();
private native void allocateArray(long size);
@Override public CvLBPEvaluator position(long position) {
return (CvLBPEvaluator)super.position(position);
}
public native void init( @Const CvFeatureParams _featureParams, int _maxSampleCount, @ByVal Size _winSize );
public native void setImage( @Const @ByRef Mat img, @Cast("uchar") byte clsLabel, int idx );
public native @Name("operator ()") float apply( int featureIdx, int sampleIdx );
public native void writeFeatures( @ByRef FileStorage fs, @Const @ByRef Mat featureMap );
}
/** \} */
/* namespace cv */
// #endif
// Parsed from
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2015, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
// #ifndef __OPENCV_TRACKING_KALMAN_HPP_
// #define __OPENCV_TRACKING_KALMAN_HPP_
// #include "opencv2/core.hpp"
// #include
/** \brief The interface for Unscented Kalman filter and Augmented Unscented Kalman filter.
*/
@Namespace("cv::tracking") public static class UnscentedKalmanFilter extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public UnscentedKalmanFilter(Pointer p) { super(p); }
/** The function performs prediction step of the algorithm
* @param control - the current control vector,
* @return the predicted estimate of the state.
*/
public native @ByVal Mat predict( @ByVal(nullValue = "cv::InputArray(cv::noArray())") Mat control );
public native @ByVal Mat predict( );
public native @ByVal Mat predict( @ByVal(nullValue = "cv::InputArray(cv::noArray())") UMat control );
public native @ByVal Mat predict( @ByVal(nullValue = "cv::InputArray(cv::noArray())") GpuMat control );
/** The function performs correction step of the algorithm
* @param measurement - the current measurement vector,
* @return the corrected estimate of the state.
*/
public native @ByVal Mat correct( @ByVal Mat measurement );
public native @ByVal Mat correct( @ByVal UMat measurement );
public native @ByVal Mat correct( @ByVal GpuMat measurement );
/**
* @return the process noise cross-covariance matrix.
*/
public native @ByVal Mat getProcessNoiseCov();
/**
* @return the measurement noise cross-covariance matrix.
*/
public native @ByVal Mat getMeasurementNoiseCov();
/**
* @return the error cross-covariance matrix.
*/
public native @ByVal Mat getErrorCov();
/**
* @return the current estimate of the state.
*/
public native @ByVal Mat getState();
}
/** \brief Model of dynamical system for Unscented Kalman filter.
* The interface for dynamical system model. It contains functions for computing the next state and the measurement.
* It must be inherited for using UKF.
*/
@Namespace("cv::tracking") public static class UkfSystemModel extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public UkfSystemModel(Pointer p) { super(p); }
/** The function for computing the next state from the previous state
* @param x_k - previous state vector,
* @param u_k - control vector,
* @param v_k - noise vector,
* @param x_kplus1 - next state vector.
*/
public native void stateConversionFunction( @Const @ByRef Mat x_k, @Const @ByRef Mat u_k, @Const @ByRef Mat v_k, @ByRef Mat x_kplus1 );
/** The function for computing the measurement from the state
* @param x_k - state vector,
* @param n_k - noise vector,
* @param z_k - measurement vector.
*/
public native void measurementFunction( @Const @ByRef Mat x_k, @Const @ByRef Mat n_k, @ByRef Mat z_k );
}
/** \brief Unscented Kalman filter parameters.
* The class for initialization parameters of Unscented Kalman filter
*/
@Namespace("cv::tracking") @NoOffset public static class UnscentedKalmanFilterParams extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public UnscentedKalmanFilterParams(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public UnscentedKalmanFilterParams(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public UnscentedKalmanFilterParams position(long position) {
return (UnscentedKalmanFilterParams)super.position(position);
}
/** Dimensionality of the state vector. */
public native int DP(); public native UnscentedKalmanFilterParams DP(int DP);
/** Dimensionality of the measurement vector. */
public native int MP(); public native UnscentedKalmanFilterParams MP(int MP);
/** Dimensionality of the control vector. */
public native int CP(); public native UnscentedKalmanFilterParams CP(int CP);
/** Type of elements of vectors and matrices, default is CV_64F. */
public native int dataType(); public native UnscentedKalmanFilterParams dataType(int dataType);
/** Initial state, DP x 1, default is zero. */
public native @ByRef Mat stateInit(); public native UnscentedKalmanFilterParams stateInit(Mat stateInit);
/** State estimate cross-covariance matrix, DP x DP, default is identity. */
public native @ByRef Mat errorCovInit(); public native UnscentedKalmanFilterParams errorCovInit(Mat errorCovInit);
/** Process noise cross-covariance matrix, DP x DP. */
public native @ByRef Mat processNoiseCov(); public native UnscentedKalmanFilterParams processNoiseCov(Mat processNoiseCov);
/** Measurement noise cross-covariance matrix, MP x MP. */
public native @ByRef Mat measurementNoiseCov(); public native UnscentedKalmanFilterParams measurementNoiseCov(Mat measurementNoiseCov);
// Parameters of algorithm
/** Default is 1e-3. */
public native double alpha(); public native UnscentedKalmanFilterParams alpha(double alpha);
/** Default is 0. */
public native double k(); public native UnscentedKalmanFilterParams k(double k);
/** Default is 2.0. */
public native double beta(); public native UnscentedKalmanFilterParams beta(double beta);
//Dynamical system model
/** Object of the class containing functions for computing the next state and the measurement. */
public native @Ptr UkfSystemModel model(); public native UnscentedKalmanFilterParams model(UkfSystemModel model);
/** The constructors.
*/
public UnscentedKalmanFilterParams() { super((Pointer)null); allocate(); }
private native void allocate();
/**
* @param dp - dimensionality of the state vector,
* @param mp - dimensionality of the measurement vector,
* @param cp - dimensionality of the control vector,
* @param processNoiseCovDiag - value of elements on main diagonal process noise cross-covariance matrix,
* @param measurementNoiseCovDiag - value of elements on main diagonal measurement noise cross-covariance matrix,
* @param dynamicalSystem - ptr to object of the class containing functions for computing the next state and the measurement,
* @param type - type of the created matrices that should be CV_32F or CV_64F.
*/
public UnscentedKalmanFilterParams( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem, int type/*=CV_64F*/ ) { super((Pointer)null); allocate(dp, mp, cp, processNoiseCovDiag, measurementNoiseCovDiag, dynamicalSystem, type); }
private native void allocate( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem, int type/*=CV_64F*/ );
public UnscentedKalmanFilterParams( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem ) { super((Pointer)null); allocate(dp, mp, cp, processNoiseCovDiag, measurementNoiseCovDiag, dynamicalSystem); }
private native void allocate( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem );
/** The function for initialization of Unscented Kalman filter
* @param dp - dimensionality of the state vector,
* @param mp - dimensionality of the measurement vector,
* @param cp - dimensionality of the control vector,
* @param processNoiseCovDiag - value of elements on main diagonal process noise cross-covariance matrix,
* @param measurementNoiseCovDiag - value of elements on main diagonal measurement noise cross-covariance matrix,
* @param dynamicalSystem - ptr to object of the class containing functions for computing the next state and the measurement,
* @param type - type of the created matrices that should be CV_32F or CV_64F.
*/
public native void init( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem, int type/*=CV_64F*/ );
public native void init( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem );
}
/** \brief Augmented Unscented Kalman filter parameters.
* The class for initialization parameters of Augmented Unscented Kalman filter
*/
@Namespace("cv::tracking") public static class AugmentedUnscentedKalmanFilterParams extends UnscentedKalmanFilterParams {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public AugmentedUnscentedKalmanFilterParams(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public AugmentedUnscentedKalmanFilterParams(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public AugmentedUnscentedKalmanFilterParams position(long position) {
return (AugmentedUnscentedKalmanFilterParams)super.position(position);
}
public AugmentedUnscentedKalmanFilterParams() { super((Pointer)null); allocate(); }
private native void allocate();
/**
* @param dp - dimensionality of the state vector,
* @param mp - dimensionality of the measurement vector,
* @param cp - dimensionality of the control vector,
* @param processNoiseCovDiag - value of elements on main diagonal process noise cross-covariance matrix,
* @param measurementNoiseCovDiag - value of elements on main diagonal measurement noise cross-covariance matrix,
* @param dynamicalSystem - ptr to object of the class containing functions for computing the next state and the measurement,
* @param type - type of the created matrices that should be CV_32F or CV_64F.
*/
public AugmentedUnscentedKalmanFilterParams( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem, int type/*=CV_64F*/ ) { super((Pointer)null); allocate(dp, mp, cp, processNoiseCovDiag, measurementNoiseCovDiag, dynamicalSystem, type); }
private native void allocate( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem, int type/*=CV_64F*/ );
public AugmentedUnscentedKalmanFilterParams( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem ) { super((Pointer)null); allocate(dp, mp, cp, processNoiseCovDiag, measurementNoiseCovDiag, dynamicalSystem); }
private native void allocate( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem );
/** The function for initialization of Augmented Unscented Kalman filter
* @param dp - dimensionality of the state vector,
* @param mp - dimensionality of the measurement vector,
* @param cp - dimensionality of the control vector,
* @param processNoiseCovDiag - value of elements on main diagonal process noise cross-covariance matrix,
* @param measurementNoiseCovDiag - value of elements on main diagonal measurement noise cross-covariance matrix,
* @param dynamicalSystem - object of the class containing functions for computing the next state and the measurement,
* @param type - type of the created matrices that should be CV_32F or CV_64F.
*/
public native void init( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem, int type/*=CV_64F*/ );
public native void init( int dp, int mp, int cp, double processNoiseCovDiag, double measurementNoiseCovDiag,
@Ptr UkfSystemModel dynamicalSystem );
}
/** \brief Unscented Kalman Filter factory method
* The class implements an Unscented Kalman filter .
* @param params - an object of the UnscentedKalmanFilterParams class containing UKF parameters.
* @return pointer to the object of the UnscentedKalmanFilterImpl class implementing UnscentedKalmanFilter.
*/
@Namespace("cv::tracking") public static native @Ptr UnscentedKalmanFilter createUnscentedKalmanFilter( @Const @ByRef UnscentedKalmanFilterParams params );
/** \brief Augmented Unscented Kalman Filter factory method
* The class implements an Augmented Unscented Kalman filter http://becs.aalto.fi/en/research/bayes/ekfukf/documentation.pdf, page 31-33.
* AUKF is more accurate than UKF but its computational complexity is larger.
* @param params - an object of the AugmentedUnscentedKalmanFilterParams class containing AUKF parameters.
* @return pointer to the object of the AugmentedUnscentedKalmanFilterImpl class implementing UnscentedKalmanFilter.
*/
@Namespace("cv::tracking") public static native @Ptr UnscentedKalmanFilter createAugmentedUnscentedKalmanFilter( @Const @ByRef AugmentedUnscentedKalmanFilterParams params );
// tracking
// cv
// #endif
// Parsed from
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
// #ifndef __OPENCV_ONLINEMIL_HPP__
// #define __OPENCV_ONLINEMIL_HPP__
// #include "opencv2/core.hpp"
// #include
/** \addtogroup tracking
* \{ */
//TODO based on the original implementation
//http://vision.ucsd.edu/~bbabenko/project_miltrack.shtml
@Namespace("cv") @NoOffset public static class ClfMilBoost extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public ClfMilBoost(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public ClfMilBoost(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public ClfMilBoost position(long position) {
return (ClfMilBoost)super.position(position);
}
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
public native int _numSel(); public native Params _numSel(int _numSel);
public native int _numFeat(); public native Params _numFeat(int _numFeat);
public native float _lRate(); public native Params _lRate(float _lRate);
}
public ClfMilBoost() { super((Pointer)null); allocate(); }
private native void allocate();
public native void init( @Const @ByRef(nullValue = "cv::ClfMilBoost::Params()") Params parameters );
public native void init( );
public native void update( @Const @ByRef Mat posx, @Const @ByRef Mat negx );
public native @StdVector FloatPointer classify( @Const @ByRef Mat x, @Cast("bool") boolean logR/*=true*/ );
public native @StdVector FloatPointer classify( @Const @ByRef Mat x );
public native float sigmoid( float x );
}
@Namespace("cv") @NoOffset public static class ClfOnlineStump extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public ClfOnlineStump(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public ClfOnlineStump(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public ClfOnlineStump position(long position) {
return (ClfOnlineStump)super.position(position);
}
public native float _mu0(); public native ClfOnlineStump _mu0(float _mu0);
public native float _mu1(); public native ClfOnlineStump _mu1(float _mu1);
public native float _sig0(); public native ClfOnlineStump _sig0(float _sig0);
public native float _sig1(); public native ClfOnlineStump _sig1(float _sig1);
public native float _q(); public native ClfOnlineStump _q(float _q);
public native int _s(); public native ClfOnlineStump _s(int _s);
public native float _log_n1(); public native ClfOnlineStump _log_n1(float _log_n1);
public native float _log_n0(); public native ClfOnlineStump _log_n0(float _log_n0);
public native float _e1(); public native ClfOnlineStump _e1(float _e1);
public native float _e0(); public native ClfOnlineStump _e0(float _e0);
public native float _lRate(); public native ClfOnlineStump _lRate(float _lRate);
public ClfOnlineStump() { super((Pointer)null); allocate(); }
private native void allocate();
public ClfOnlineStump( int ind ) { super((Pointer)null); allocate(ind); }
private native void allocate( int ind );
public native void init();
public native void update( @Const @ByRef Mat posx, @Const @ByRef Mat negx );
public native @Cast("bool") boolean classify( @Const @ByRef Mat x, int i );
public native float classifyF( @Const @ByRef Mat x, int i );
public native @StdVector FloatPointer classifySetF( @Const @ByRef Mat x );
}
/** \} */
/* namespace cv */
// #endif
// Parsed from
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
// #ifndef __OPENCV_ONLINEBOOSTING_HPP__
// #define __OPENCV_ONLINEBOOSTING_HPP__
// #include "opencv2/core.hpp"
/** \addtogroup tracking
* \{ */
//TODO based on the original implementation
//http://vision.ucsd.edu/~bbabenko/project_miltrack.shtml
@Namespace("cv") @NoOffset public static class StrongClassifierDirectSelection extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public StrongClassifierDirectSelection(Pointer p) { super(p); }
public StrongClassifierDirectSelection( int numBaseClf, int numWeakClf, @ByVal Size patchSz, @Const @ByRef Rect sampleROI, @Cast("bool") boolean useFeatureEx/*=false*/, int iterationInit/*=0*/ ) { super((Pointer)null); allocate(numBaseClf, numWeakClf, patchSz, sampleROI, useFeatureEx, iterationInit); }
private native void allocate( int numBaseClf, int numWeakClf, @ByVal Size patchSz, @Const @ByRef Rect sampleROI, @Cast("bool") boolean useFeatureEx/*=false*/, int iterationInit/*=0*/ );
public StrongClassifierDirectSelection( int numBaseClf, int numWeakClf, @ByVal Size patchSz, @Const @ByRef Rect sampleROI ) { super((Pointer)null); allocate(numBaseClf, numWeakClf, patchSz, sampleROI); }
private native void allocate( int numBaseClf, int numWeakClf, @ByVal Size patchSz, @Const @ByRef Rect sampleROI );
public native void initBaseClassifier();
public native @Cast("bool") boolean update( @Const @ByRef Mat image, int target, float importance/*=1.0*/ );
public native @Cast("bool") boolean update( @Const @ByRef Mat image, int target );
public native float eval( @Const @ByRef Mat response );
public native @StdVector IntPointer getSelectedWeakClassifier();
public native float classifySmooth( @Const @ByRef MatVector images, @Const @ByRef Rect sampleROI, @ByRef IntPointer idx );
public native float classifySmooth( @Const @ByRef MatVector images, @Const @ByRef Rect sampleROI, @ByRef IntBuffer idx );
public native float classifySmooth( @Const @ByRef MatVector images, @Const @ByRef Rect sampleROI, @ByRef int[] idx );
public native int getNumBaseClassifier();
public native @ByVal Size getPatchSize();
public native @ByVal Rect getROI();
public native @Cast("bool") boolean getUseFeatureExchange();
public native int getReplacedClassifier();
public native void replaceWeakClassifier( int idx );
public native int getSwappedClassifier();
}
@Namespace("cv") @NoOffset public static class BaseClassifier extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public BaseClassifier(Pointer p) { super(p); }
public BaseClassifier( int numWeakClassifier, int iterationInit ) { super((Pointer)null); allocate(numWeakClassifier, iterationInit); }
private native void allocate( int numWeakClassifier, int iterationInit );
public BaseClassifier( int numWeakClassifier, int iterationInit, @Cast("cv::WeakClassifierHaarFeature**") PointerPointer weakCls ) { super((Pointer)null); allocate(numWeakClassifier, iterationInit, weakCls); }
private native void allocate( int numWeakClassifier, int iterationInit, @Cast("cv::WeakClassifierHaarFeature**") PointerPointer weakCls );
public BaseClassifier( int numWeakClassifier, int iterationInit, @ByPtrPtr WeakClassifierHaarFeature weakCls ) { super((Pointer)null); allocate(numWeakClassifier, iterationInit, weakCls); }
private native void allocate( int numWeakClassifier, int iterationInit, @ByPtrPtr WeakClassifierHaarFeature weakCls );
public native @Cast("cv::WeakClassifierHaarFeature**") PointerPointer getReferenceWeakClassifier();
public native void trainClassifier( @Const @ByRef Mat image, int target, float importance, @Cast("bool*") @StdVector BoolPointer errorMask );
public native void trainClassifier( @Const @ByRef Mat image, int target, float importance, @Cast("bool*") @StdVector boolean[] errorMask );
public native int selectBestClassifier( @Cast("bool*") @StdVector BoolPointer errorMask, float importance, @StdVector FloatPointer errors );
public native int selectBestClassifier( @Cast("bool*") @StdVector boolean[] errorMask, float importance, @StdVector FloatBuffer errors );
public native int selectBestClassifier( @Cast("bool*") @StdVector BoolPointer errorMask, float importance, @StdVector float[] errors );
public native int selectBestClassifier( @Cast("bool*") @StdVector boolean[] errorMask, float importance, @StdVector FloatPointer errors );
public native int selectBestClassifier( @Cast("bool*") @StdVector BoolPointer errorMask, float importance, @StdVector FloatBuffer errors );
public native int selectBestClassifier( @Cast("bool*") @StdVector boolean[] errorMask, float importance, @StdVector float[] errors );
public native int computeReplaceWeakestClassifier( @StdVector FloatPointer errors );
public native int computeReplaceWeakestClassifier( @StdVector FloatBuffer errors );
public native int computeReplaceWeakestClassifier( @StdVector float[] errors );
public native void replaceClassifierStatistic( int sourceIndex, int targetIndex );
public native int getIdxOfNewWeakClassifier();
public native int eval( @Const @ByRef Mat image );
public native float getError( int curWeakClassifier );
public native void getErrors( FloatPointer errors );
public native void getErrors( FloatBuffer errors );
public native void getErrors( float[] errors );
public native int getSelectedClassifier();
public native void replaceWeakClassifier( int index );
}
@Namespace("cv") @NoOffset public static class EstimatedGaussDistribution extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public EstimatedGaussDistribution(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public EstimatedGaussDistribution(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public EstimatedGaussDistribution position(long position) {
return (EstimatedGaussDistribution)super.position(position);
}
public EstimatedGaussDistribution() { super((Pointer)null); allocate(); }
private native void allocate();
public EstimatedGaussDistribution( float P_mean, float R_mean, float P_sigma, float R_sigma ) { super((Pointer)null); allocate(P_mean, R_mean, P_sigma, R_sigma); }
private native void allocate( float P_mean, float R_mean, float P_sigma, float R_sigma );
public native void update( float value ); //, float timeConstant = -1.0);
public native float getMean();
public native float getSigma();
public native void setValues( float mean, float sigma );
}
@Namespace("cv") @NoOffset public static class WeakClassifierHaarFeature extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public WeakClassifierHaarFeature(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public WeakClassifierHaarFeature(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public WeakClassifierHaarFeature position(long position) {
return (WeakClassifierHaarFeature)super.position(position);
}
public WeakClassifierHaarFeature() { super((Pointer)null); allocate(); }
private native void allocate();
public native @Cast("bool") boolean update( float value, int target );
public native int eval( float value );
}
@Namespace("cv") @NoOffset public static class Detector extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Detector(Pointer p) { super(p); }
public Detector( StrongClassifierDirectSelection classifier ) { super((Pointer)null); allocate(classifier); }
private native void allocate( StrongClassifierDirectSelection classifier );
public native void classifySmooth( @Const @ByRef MatVector image, float minMargin/*=0*/ );
public native void classifySmooth( @Const @ByRef MatVector image );
public native int getNumDetections();
public native float getConfidence( int patchIdx );
public native float getConfidenceOfDetection( int detectionIdx );
public native float getConfidenceOfBestDetection();
public native int getPatchIdxOfBestDetection();
public native int getPatchIdxOfDetection( int detectionIdx );
public native @StdVector IntPointer getIdxDetections();
public native @StdVector FloatPointer getConfidences();
public native @Const @ByRef Mat getConfImageDisplay();
}
@Namespace("cv") @NoOffset public static class ClassifierThreshold extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public ClassifierThreshold(Pointer p) { super(p); }
public ClassifierThreshold( EstimatedGaussDistribution posSamples, EstimatedGaussDistribution negSamples ) { super((Pointer)null); allocate(posSamples, negSamples); }
private native void allocate( EstimatedGaussDistribution posSamples, EstimatedGaussDistribution negSamples );
public native void update( float value, int target );
public native int eval( float value );
public native Pointer getDistribution( int target );
}
/** \} */
/* namespace cv */
// #endif
// Parsed from
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
// #ifndef OPENCV_TLD_DATASET
// #define OPENCV_TLD_DATASET
// #include "opencv2/core.hpp"
@Namespace("cv::tld") public static native @ByVal Rect2d tld_InitDataset(int videoInd, @Cast("const char*") BytePointer rootPath/*="TLD_dataset"*/, int datasetInd/*=0*/);
@Namespace("cv::tld") public static native @ByVal Rect2d tld_InitDataset(int videoInd);
@Namespace("cv::tld") public static native @ByVal Rect2d tld_InitDataset(int videoInd, String rootPath/*="TLD_dataset"*/, int datasetInd/*=0*/);
@Namespace("cv::tld") public static native @Str BytePointer tld_getNextDatasetFrame();
// #endif
// Parsed from
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
// #ifndef __OPENCV_TRACKER_HPP__
// #define __OPENCV_TRACKER_HPP__
// #include "opencv2/core.hpp"
// #include "opencv2/imgproc/types_c.h"
// #include "feature.hpp"
// #include "onlineMIL.hpp"
// #include "onlineBoosting.hpp"
/*
* Partially based on:
* ====================================================================================================================
* - [AAM] S. Salti, A. Cavallaro, L. Di Stefano, Adaptive Appearance Modeling for Video Tracking: Survey and Evaluation
* - [AMVOT] X. Li, W. Hu, C. Shen, Z. Zhang, A. Dick, A. van den Hengel, A Survey of Appearance Models in Visual Object Tracking
*
* This Tracking API has been designed with PlantUML. If you modify this API please change UML files under modules/tracking/doc/uml
*
*/
/** \addtogroup tracking
* \{
/************************************ TrackerFeature Base Classes ************************************/
/** \brief Abstract base class for TrackerFeature that represents the feature.
*/
@Namespace("cv") public static class TrackerFeature extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerFeature(Pointer p) { super(p); }
/** \brief Compute the features in the images collection
@param images The images
@param response The output response
*/
public native void compute( @Const @ByRef MatVector images, @ByRef Mat response );
/** \brief Create TrackerFeature by tracker feature type
@param trackerFeatureType The TrackerFeature name
The modes available now:
- "HAAR" -- Haar Feature-based
The modes that will be available soon:
- "HOG" -- Histogram of Oriented Gradients features
- "LBP" -- Local Binary Pattern features
- "FEATURE2D" -- All types of Feature2D
*/
public static native @Ptr @ByVal TrackerFeature create( @Str BytePointer trackerFeatureType );
public static native @Ptr @ByVal TrackerFeature create( @Str String trackerFeatureType );
/** \brief Identify most effective features
@param response Collection of response for the specific TrackerFeature
@param npoints Max number of features
\note This method modifies the response parameter
*/
public native void selection( @ByRef Mat response, int npoints );
/** \brief Get the name of the specific TrackerFeature
*/
public native @Str BytePointer getClassName();
}
/** \brief Class that manages the extraction and selection of features
\cite AAM Feature Extraction and Feature Set Refinement (Feature Processing and Feature Selection).
See table I and section III C \cite AMVOT Appearance modelling -\> Visual representation (Table II,
section 3.1 - 3.2)
TrackerFeatureSet is an aggregation of TrackerFeature
\sa
TrackerFeature
*/
@Namespace("cv") @NoOffset public static class TrackerFeatureSet extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerFeatureSet(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerFeatureSet(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public TrackerFeatureSet position(long position) {
return (TrackerFeatureSet)super.position(position);
}
public TrackerFeatureSet() { super((Pointer)null); allocate(); }
private native void allocate();
/** \brief Extract features from the images collection
@param images The input images
*/
public native void extraction( @Const @ByRef MatVector images );
/** \brief Identify most effective features for all feature types (optional)
*/
public native void selection();
/** \brief Remove outliers for all feature types (optional)
*/
public native void removeOutliers();
/** \brief Add TrackerFeature in the collection. Return true if TrackerFeature is added, false otherwise
@param trackerFeatureType The TrackerFeature name
The modes available now:
- "HAAR" -- Haar Feature-based
The modes that will be available soon:
- "HOG" -- Histogram of Oriented Gradients features
- "LBP" -- Local Binary Pattern features
- "FEATURE2D" -- All types of Feature2D
Example TrackerFeatureSet::addTrackerFeature : :
{@code
//sample usage:
Ptr trackerFeature = new TrackerFeatureHAAR( HAARparameters );
featureSet->addTrackerFeature( trackerFeature );
//or add CSC sampler with default parameters
//featureSet->addTrackerFeature( "HAAR" );
}
\note If you use the second method, you must initialize the TrackerFeature
*/
public native @Cast("bool") boolean addTrackerFeature( @Str BytePointer trackerFeatureType );
public native @Cast("bool") boolean addTrackerFeature( @Str String trackerFeatureType );
/** \overload
@param feature The TrackerFeature class
*/
public native @Cast("bool") boolean addTrackerFeature( @Ptr @ByVal TrackerFeature feature );
/** \brief Get the TrackerFeature collection (TrackerFeature name, TrackerFeature pointer)
*/
public native @Const @ByRef StringTrackerFeaturePairVector getTrackerFeature();
/** \brief Get the responses
\note Be sure to call extraction before getResponses Example TrackerFeatureSet::getResponses : :
*/
public native @Const @ByRef MatVector getResponses();
}
/************************************ TrackerSampler Base Classes ************************************/
/** \brief Abstract base class for TrackerSamplerAlgorithm that represents the algorithm for the specific
sampler.
*/
@Namespace("cv") public static class TrackerSamplerAlgorithm extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerSamplerAlgorithm(Pointer p) { super(p); }
/**
* \brief Destructor
*/
/** \brief Create TrackerSamplerAlgorithm by tracker sampler type.
@param trackerSamplerType The trackerSamplerType name
The modes available now:
- "CSC" -- Current State Center
- "CS" -- Current State
*/
public static native @Ptr @ByVal TrackerSamplerAlgorithm create( @Str BytePointer trackerSamplerType );
public static native @Ptr @ByVal TrackerSamplerAlgorithm create( @Str String trackerSamplerType );
/** \brief Computes the regions starting from a position in an image.
Return true if samples are computed, false otherwise
@param image The current frame
@param boundingBox The bounding box from which regions can be calculated
@param sample The computed samples \cite AAM Fig. 1 variable Sk
*/
public native @Cast("bool") boolean sampling( @Const @ByRef Mat image, @ByVal Rect boundingBox, @ByRef MatVector sample );
/** \brief Get the name of the specific TrackerSamplerAlgorithm
*/
public native @Str BytePointer getClassName();
}
/**
* \brief Class that manages the sampler in order to select regions for the update the model of the tracker
* [AAM] Sampling e Labeling. See table I and section III B
*/
/** \brief Class that manages the sampler in order to select regions for the update the model of the tracker
\cite AAM Sampling e Labeling. See table I and section III B
TrackerSampler is an aggregation of TrackerSamplerAlgorithm
\sa
TrackerSamplerAlgorithm
*/
@Namespace("cv") @NoOffset public static class TrackerSampler extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerSampler(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerSampler(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public TrackerSampler position(long position) {
return (TrackerSampler)super.position(position);
}
/**
* \brief Constructor
*/
public TrackerSampler() { super((Pointer)null); allocate(); }
private native void allocate();
/**
* \brief Destructor
*/
/** \brief Computes the regions starting from a position in an image
@param image The current frame
@param boundingBox The bounding box from which regions can be calculated
*/
public native void sampling( @Const @ByRef Mat image, @ByVal Rect boundingBox );
/** \brief Return the collection of the TrackerSamplerAlgorithm
*/
public native @Const @ByRef StringTrackerSamplerAlgorithmPairVector getSamplers();
/** \brief Return the samples from all TrackerSamplerAlgorithm, \cite AAM Fig. 1 variable Sk
*/
public native @Const @ByRef MatVector getSamples();
/** \brief Add TrackerSamplerAlgorithm in the collection. Return true if sampler is added, false otherwise
@param trackerSamplerAlgorithmType The TrackerSamplerAlgorithm name
The modes available now:
- "CSC" -- Current State Center
- "CS" -- Current State
- "PF" -- Particle Filtering
Example TrackerSamplerAlgorithm::addTrackerSamplerAlgorithm : :
{@code
TrackerSamplerCSC::Params CSCparameters;
Ptr CSCSampler = new TrackerSamplerCSC( CSCparameters );
if( !sampler->addTrackerSamplerAlgorithm( CSCSampler ) )
return false;
//or add CSC sampler with default parameters
//sampler->addTrackerSamplerAlgorithm( "CSC" );
}
\note If you use the second method, you must initialize the TrackerSamplerAlgorithm
*/
public native @Cast("bool") boolean addTrackerSamplerAlgorithm( @Str BytePointer trackerSamplerAlgorithmType );
public native @Cast("bool") boolean addTrackerSamplerAlgorithm( @Str String trackerSamplerAlgorithmType );
/** \overload
@param sampler The TrackerSamplerAlgorithm
*/
public native @Cast("bool") boolean addTrackerSamplerAlgorithm( @Ptr @ByVal TrackerSamplerAlgorithm sampler );
}
/************************************ TrackerModel Base Classes ************************************/
/** \brief Abstract base class for TrackerTargetState that represents a possible state of the target.
See \cite AAM \f$\hat{x}^{i}_{k}\f$ all the states candidates.
Inherits this class with your Target state, In own implementation you can add scale variation,
width, height, orientation, etc.
*/
@Namespace("cv") public static class TrackerTargetState extends Pointer {
static { Loader.load(); }
/** Default native constructor. */
public TrackerTargetState() { super((Pointer)null); allocate(); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerTargetState(long size) { super((Pointer)null); allocateArray(size); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerTargetState(Pointer p) { super(p); }
private native void allocate();
private native void allocateArray(long size);
@Override public TrackerTargetState position(long position) {
return (TrackerTargetState)super.position(position);
}
/**
* \brief Get the position
* @return The position
*/
public native @ByVal Point2f getTargetPosition();
/**
* \brief Set the position
* @param position The position
*/
public native void setTargetPosition( @Const @ByRef Point2f position );
/**
* \brief Get the width of the target
* @return The width of the target
*/
public native int getTargetWidth();
/**
* \brief Set the width of the target
* @param width The width of the target
*/
public native void setTargetWidth( int width );
/**
* \brief Get the height of the target
* @return The height of the target
*/
public native int getTargetHeight();
/**
* \brief Set the height of the target
* @param height The height of the target
*/
public native void setTargetHeight( int height );
}
/** \brief Represents the model of the target at frame \f$k\f$ (all states and scores)
See \cite AAM The set of the pair \f$\langle \hat{x}^{i}_{k}, C^{i}_{k} \rangle\f$
\sa TrackerTargetState
*/
/** \brief Represents the estimate states for all frames
\cite AAM \f$x_{k}\f$ is the trajectory of the target up to time \f$k\f$
\sa TrackerTargetState
*/
/** \brief Abstract base class for TrackerStateEstimator that estimates the most likely target state.
See \cite AAM State estimator
See \cite AMVOT Statistical modeling (Fig. 3), Table III (generative) - IV (discriminative) - V (hybrid)
*/
@Namespace("cv") public static class TrackerStateEstimator extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerStateEstimator(Pointer p) { super(p); }
/** \brief Estimate the most likely target state, return the estimated state
@param confidenceMaps The overall appearance model as a list of :cConfidenceMap
*/
public native @Ptr @ByVal TrackerTargetState estimate( @Const @ByRef ConfidenceMapVector confidenceMaps );
/** \brief Update the ConfidenceMap with the scores
@param confidenceMaps The overall appearance model as a list of :cConfidenceMap
*/
public native void update( @ByRef ConfidenceMapVector confidenceMaps );
/** \brief Create TrackerStateEstimator by tracker state estimator type
@param trackeStateEstimatorType The TrackerStateEstimator name
The modes available now:
- "BOOSTING" -- Boosting-based discriminative appearance models. See \cite AMVOT section 4.4
The modes available soon:
- "SVM" -- SVM-based discriminative appearance models. See \cite AMVOT section 4.5
*/
public static native @Ptr TrackerStateEstimator create( @Str BytePointer trackeStateEstimatorType );
public static native @Ptr TrackerStateEstimator create( @Str String trackeStateEstimatorType );
/** \brief Get the name of the specific TrackerStateEstimator
*/
public native @Str BytePointer getClassName();
}
/** \brief Abstract class that represents the model of the target. It must be instantiated by specialized
tracker
See \cite AAM Ak
Inherits this with your TrackerModel
*/
@Namespace("cv") @NoOffset public static class TrackerModel extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerModel(Pointer p) { super(p); }
/**
* \brief Constructor
*/
/**
* \brief Destructor
*/
/** \brief Set TrackerEstimator, return true if the tracker state estimator is added, false otherwise
@param trackerStateEstimator The TrackerStateEstimator
\note You can add only one TrackerStateEstimator
*/
public native @Cast("bool") boolean setTrackerStateEstimator( @Ptr TrackerStateEstimator trackerStateEstimator );
/** \brief Estimate the most likely target location
\cite AAM ME, Model Estimation table I
@param responses Features extracted from TrackerFeatureSet
*/
public native void modelEstimation( @Const @ByRef MatVector responses );
/** \brief Update the model
\cite AAM MU, Model Update table I
*/
public native void modelUpdate();
/** \brief Run the TrackerStateEstimator, return true if is possible to estimate a new state, false otherwise
*/
public native @Cast("bool") boolean runStateEstimator();
/** \brief Set the current TrackerTargetState in the Trajectory
@param lastTargetState The current TrackerTargetState
*/
public native void setLastTargetState( @Const @Ptr @ByRef TrackerTargetState lastTargetState );
/** \brief Get the last TrackerTargetState from Trajectory
*/
public native @Ptr @ByVal TrackerTargetState getLastTargetState();
/** \brief Get the list of the ConfidenceMap
*/
public native @Const @ByRef ConfidenceMapVector getConfidenceMaps();
/** \brief Get the last ConfidenceMap for the current frame
*/
public native @Const @ByRef ConfidenceMap getLastConfidenceMap();
/** \brief Get the TrackerStateEstimator
*/
public native @Ptr TrackerStateEstimator getTrackerStateEstimator();
}
/************************************ Tracker Base Class ************************************/
/** \brief Base abstract class for the long-term tracker:
*/
@Namespace("cv") @NoOffset public static class Tracker extends Algorithm {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Tracker(Pointer p) { super(p); }
/** \brief Initialize the tracker with a known bounding box that surrounded the target
@param image The initial frame
@param boundingBox The initial bounding box
@return True if initialization went succesfully, false otherwise
*/
public native @Cast("bool") boolean init( @ByVal Mat image, @Const @ByRef Rect2d boundingBox );
public native @Cast("bool") boolean init( @ByVal UMat image, @Const @ByRef Rect2d boundingBox );
public native @Cast("bool") boolean init( @ByVal GpuMat image, @Const @ByRef Rect2d boundingBox );
/** \brief Update the tracker, find the new most likely bounding box for the target
@param image The current frame
@param boundingBox The bounding box that represent the new target location, if true was returned, not
modified otherwise
@return True means that target was located and false means that tracker cannot locate target in
current frame. Note, that latter *does not* imply that tracker has failed, maybe target is indeed
missing from the frame (say, out of sight)
*/
public native @Cast("bool") boolean update( @ByVal Mat image, @ByRef Rect2d boundingBox );
public native @Cast("bool") boolean update( @ByVal UMat image, @ByRef Rect2d boundingBox );
public native @Cast("bool") boolean update( @ByVal GpuMat image, @ByRef Rect2d boundingBox );
public native void read( @Const @ByRef FileNode fn );
public native void write( @ByRef FileStorage fs );
}
/************************************ Specific TrackerStateEstimator Classes ************************************/
/** \brief TrackerStateEstimator based on Boosting
*/
@Namespace("cv") @NoOffset public static class TrackerStateEstimatorMILBoosting extends TrackerStateEstimator {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerStateEstimatorMILBoosting(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerStateEstimatorMILBoosting(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public TrackerStateEstimatorMILBoosting position(long position) {
return (TrackerStateEstimatorMILBoosting)super.position(position);
}
/**
* Implementation of the target state for TrackerStateEstimatorMILBoosting
*/
@NoOffset public static class TrackerMILTargetState extends TrackerTargetState {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerMILTargetState(Pointer p) { super(p); }
/**
* \brief Constructor
* @param position Top left corner of the bounding box
* @param width Width of the bounding box
* @param height Height of the bounding box
* @param foreground label for target or background
* @param features features extracted
*/
public TrackerMILTargetState( @Const @ByRef Point2f position, int width, int height, @Cast("bool") boolean foreground, @Const @ByRef Mat features ) { super((Pointer)null); allocate(position, width, height, foreground, features); }
private native void allocate( @Const @ByRef Point2f position, int width, int height, @Cast("bool") boolean foreground, @Const @ByRef Mat features );
/**
* \brief Destructor
*/
/** \brief Set label: true for target foreground, false for background
@param foreground Label for background/foreground
*/
public native void setTargetFg( @Cast("bool") boolean foreground );
/** \brief Set the features extracted from TrackerFeatureSet
@param features The features extracted
*/
public native void setFeatures( @Const @ByRef Mat features );
/** \brief Get the label. Return true for target foreground, false for background
*/
public native @Cast("bool") boolean isTargetFg();
/** \brief Get the features extracted
*/
public native @ByVal Mat getFeatures();
}
/** \brief Constructor
@param nFeatures Number of features for each sample
*/
public TrackerStateEstimatorMILBoosting( int nFeatures/*=250*/ ) { super((Pointer)null); allocate(nFeatures); }
private native void allocate( int nFeatures/*=250*/ );
public TrackerStateEstimatorMILBoosting( ) { super((Pointer)null); allocate(); }
private native void allocate( );
/** \brief Set the current confidenceMap
@param confidenceMap The current :cConfidenceMap
*/
public native void setCurrentConfidenceMap( @ByRef ConfidenceMap confidenceMap );
}
/** \brief TrackerStateEstimatorAdaBoosting based on ADA-Boosting
*/
@Namespace("cv") @NoOffset public static class TrackerStateEstimatorAdaBoosting extends TrackerStateEstimator {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerStateEstimatorAdaBoosting(Pointer p) { super(p); }
/** \brief Implementation of the target state for TrackerAdaBoostingTargetState
*/
@NoOffset public static class TrackerAdaBoostingTargetState extends TrackerTargetState {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerAdaBoostingTargetState(Pointer p) { super(p); }
/**
* \brief Constructor
* @param position Top left corner of the bounding box
* @param width Width of the bounding box
* @param height Height of the bounding box
* @param foreground label for target or background
* @param responses list of features
*/
public TrackerAdaBoostingTargetState( @Const @ByRef Point2f position, int width, int height, @Cast("bool") boolean foreground, @Const @ByRef Mat responses ) { super((Pointer)null); allocate(position, width, height, foreground, responses); }
private native void allocate( @Const @ByRef Point2f position, int width, int height, @Cast("bool") boolean foreground, @Const @ByRef Mat responses );
/**
* \brief Destructor
*/
/** \brief Set the features extracted from TrackerFeatureSet
@param responses The features extracted
*/
public native void setTargetResponses( @Const @ByRef Mat responses );
/** \brief Set label: true for target foreground, false for background
@param foreground Label for background/foreground
*/
public native void setTargetFg( @Cast("bool") boolean foreground );
/** \brief Get the features extracted
*/
public native @ByVal Mat getTargetResponses();
/** \brief Get the label. Return true for target foreground, false for background
*/
public native @Cast("bool") boolean isTargetFg();
}
/** \brief Constructor
@param numClassifer Number of base classifiers
@param initIterations Number of iterations in the initialization
@param nFeatures Number of features/weak classifiers
@param patchSize tracking rect
@param ROI initial ROI
*/
public TrackerStateEstimatorAdaBoosting( int numClassifer, int initIterations, int nFeatures, @ByVal Size patchSize, @Const @ByRef Rect ROI ) { super((Pointer)null); allocate(numClassifer, initIterations, nFeatures, patchSize, ROI); }
private native void allocate( int numClassifer, int initIterations, int nFeatures, @ByVal Size patchSize, @Const @ByRef Rect ROI );
/**
* \brief Destructor
*/
/** \brief Get the sampling ROI
*/
public native @ByVal Rect getSampleROI();
/** \brief Set the sampling ROI
@param ROI the sampling ROI
*/
public native void setSampleROI( @Const @ByRef Rect ROI );
/** \brief Set the current confidenceMap
@param confidenceMap The current :cConfidenceMap
*/
public native void setCurrentConfidenceMap( @ByRef ConfidenceMap confidenceMap );
/** \brief Get the list of the selected weak classifiers for the classification step
*/
public native @StdVector IntPointer computeSelectedWeakClassifier();
/** \brief Get the list of the weak classifiers that should be replaced
*/
public native @StdVector IntPointer computeReplacedClassifier();
/** \brief Get the list of the weak classifiers that replace those to be replaced
*/
public native @StdVector IntPointer computeSwappedClassifier();
}
/**
* \brief TrackerStateEstimator based on SVM
*/
@Namespace("cv") public static class TrackerStateEstimatorSVM extends TrackerStateEstimator {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerStateEstimatorSVM(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerStateEstimatorSVM(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public TrackerStateEstimatorSVM position(long position) {
return (TrackerStateEstimatorSVM)super.position(position);
}
public TrackerStateEstimatorSVM() { super((Pointer)null); allocate(); }
private native void allocate();
}
/************************************ Specific TrackerSamplerAlgorithm Classes ************************************/
/** \brief TrackerSampler based on CSC (current state centered), used by MIL algorithm TrackerMIL
*/
@Namespace("cv") @NoOffset public static class TrackerSamplerCSC extends TrackerSamplerAlgorithm {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerSamplerCSC(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerSamplerCSC(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public TrackerSamplerCSC position(long position) {
return (TrackerSamplerCSC)super.position(position);
}
/** enum cv::TrackerSamplerCSC:: */
public static final int
/** mode for init positive samples */
MODE_INIT_POS = 1,
/** mode for init negative samples */
MODE_INIT_NEG = 2,
/** mode for update positive samples */
MODE_TRACK_POS = 3,
/** mode for update negative samples */
MODE_TRACK_NEG = 4,
/** mode for detect samples */
MODE_DETECT = 5;
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
/** radius for gathering positive instances during init */
public native float initInRad(); public native Params initInRad(float initInRad);
/** radius for gathering positive instances during tracking */
public native float trackInPosRad(); public native Params trackInPosRad(float trackInPosRad);
/** size of search window */
public native float searchWinSize(); public native Params searchWinSize(float searchWinSize);
/** # negative samples to use during init */
public native int initMaxNegNum(); public native Params initMaxNegNum(int initMaxNegNum);
/** # positive samples to use during training */
public native int trackMaxPosNum(); public native Params trackMaxPosNum(int trackMaxPosNum);
/** # negative samples to use during training */
public native int trackMaxNegNum(); public native Params trackMaxNegNum(int trackMaxNegNum);
}
/** \brief Constructor
@param parameters TrackerSamplerCSC parameters TrackerSamplerCSC::Params
*/
public TrackerSamplerCSC( @Const @ByRef(nullValue = "cv::TrackerSamplerCSC::Params()") Params parameters ) { super((Pointer)null); allocate(parameters); }
private native void allocate( @Const @ByRef(nullValue = "cv::TrackerSamplerCSC::Params()") Params parameters );
public TrackerSamplerCSC( ) { super((Pointer)null); allocate(); }
private native void allocate( );
/** \brief Set the sampling mode of TrackerSamplerCSC
@param samplingMode The sampling mode
The modes are:
- "MODE_INIT_POS = 1" -- for the positive sampling in initialization step
- "MODE_INIT_NEG = 2" -- for the negative sampling in initialization step
- "MODE_TRACK_POS = 3" -- for the positive sampling in update step
- "MODE_TRACK_NEG = 4" -- for the negative sampling in update step
- "MODE_DETECT = 5" -- for the sampling in detection step
*/
public native void setMode( int samplingMode );
}
/** \brief TrackerSampler based on CS (current state), used by algorithm TrackerBoosting
*/
@Namespace("cv") @NoOffset public static class TrackerSamplerCS extends TrackerSamplerAlgorithm {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerSamplerCS(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerSamplerCS(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public TrackerSamplerCS position(long position) {
return (TrackerSamplerCS)super.position(position);
}
/** enum cv::TrackerSamplerCS:: */
public static final int
/** mode for positive samples */
MODE_POSITIVE = 1,
/** mode for negative samples */
MODE_NEGATIVE = 2,
/** mode for classify samples */
MODE_CLASSIFY = 3;
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
/**overlapping for the search windows */
public native float overlap(); public native Params overlap(float overlap);
/**search region parameter */
public native float searchFactor(); public native Params searchFactor(float searchFactor);
}
/** \brief Constructor
@param parameters TrackerSamplerCS parameters TrackerSamplerCS::Params
*/
public TrackerSamplerCS( @Const @ByRef(nullValue = "cv::TrackerSamplerCS::Params()") Params parameters ) { super((Pointer)null); allocate(parameters); }
private native void allocate( @Const @ByRef(nullValue = "cv::TrackerSamplerCS::Params()") Params parameters );
public TrackerSamplerCS( ) { super((Pointer)null); allocate(); }
private native void allocate( );
/** \brief Set the sampling mode of TrackerSamplerCS
@param samplingMode The sampling mode
The modes are:
- "MODE_POSITIVE = 1" -- for the positive sampling
- "MODE_NEGATIVE = 2" -- for the negative sampling
- "MODE_CLASSIFY = 3" -- for the sampling in classification step
*/
public native void setMode( int samplingMode );
public native @Cast("bool") boolean samplingImpl( @Const @ByRef Mat image, @ByVal Rect boundingBox, @ByRef MatVector sample );
public native @ByVal Rect getROI();
}
/** \brief This sampler is based on particle filtering.
In principle, it can be thought of as performing some sort of optimization (and indeed, this
tracker uses opencv's optim module), where tracker seeks to find the rectangle in given frame,
which is the most *"similar"* to the initial rectangle (the one, given through the constructor).
The optimization performed is stochastic and somehow resembles genetic algorithms, where on each new
image received (submitted via TrackerSamplerPF::sampling()) we start with the region bounded by
boundingBox, then generate several "perturbed" boxes, take the ones most similar to the original.
This selection round is repeated several times. At the end, we hope that only the most promising box
remaining, and these are combined to produce the subrectangle of image, which is put as a sole
element in array sample.
It should be noted, that the definition of "similarity" between two rectangles is based on comparing
their histograms. As experiments show, tracker is *not* very succesfull if target is assumed to
strongly change its dimensions.
*/
@Namespace("cv") @NoOffset public static class TrackerSamplerPF extends TrackerSamplerAlgorithm {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerSamplerPF(Pointer p) { super(p); }
/** \brief This structure contains all the parameters that can be varied during the course of sampling
algorithm. Below is the structure exposed, together with its members briefly explained with
reference to the above discussion on algorithm's working.
*/
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
/** number of selection rounds */
public native int iterationNum(); public native Params iterationNum(int iterationNum);
/** number of "perturbed" boxes on each round */
public native int particlesNum(); public native Params particlesNum(int particlesNum);
/** with each new round we exponentially decrease the amount of "perturbing" we allow (like in simulated annealing)
* and this very alpha controls how fast annealing happens, ie. how fast perturbing decreases */
public native double alpha(); public native Params alpha(double alpha);
}
/** \brief Constructor
@param chosenRect Initial rectangle, that is supposed to contain target we'd like to track.
@param parameters
*/
public TrackerSamplerPF(@Const @ByRef Mat chosenRect,@Const @ByRef(nullValue = "cv::TrackerSamplerPF::Params()") Params parameters) { super((Pointer)null); allocate(chosenRect, parameters); }
private native void allocate(@Const @ByRef Mat chosenRect,@Const @ByRef(nullValue = "cv::TrackerSamplerPF::Params()") Params parameters);
public TrackerSamplerPF(@Const @ByRef Mat chosenRect) { super((Pointer)null); allocate(chosenRect); }
private native void allocate(@Const @ByRef Mat chosenRect);
}
/************************************ Specific TrackerFeature Classes ************************************/
/**
* \brief TrackerFeature based on Feature2D
*/
@Namespace("cv") @NoOffset public static class TrackerFeatureFeature2d extends TrackerFeature {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerFeatureFeature2d(Pointer p) { super(p); }
/**
* \brief Constructor
* @param detectorType string of FeatureDetector
* @param descriptorType string of DescriptorExtractor
*/
public TrackerFeatureFeature2d( @Str BytePointer detectorType, @Str BytePointer descriptorType ) { super((Pointer)null); allocate(detectorType, descriptorType); }
private native void allocate( @Str BytePointer detectorType, @Str BytePointer descriptorType );
public TrackerFeatureFeature2d( @Str String detectorType, @Str String descriptorType ) { super((Pointer)null); allocate(detectorType, descriptorType); }
private native void allocate( @Str String detectorType, @Str String descriptorType );
public native void selection( @ByRef Mat response, int npoints );
}
/**
* \brief TrackerFeature based on HOG
*/
@Namespace("cv") public static class TrackerFeatureHOG extends TrackerFeature {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerFeatureHOG(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerFeatureHOG(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public TrackerFeatureHOG position(long position) {
return (TrackerFeatureHOG)super.position(position);
}
public TrackerFeatureHOG() { super((Pointer)null); allocate(); }
private native void allocate();
public native void selection( @ByRef Mat response, int npoints );
}
/** \brief TrackerFeature based on HAAR features, used by TrackerMIL and many others algorithms
\note HAAR features implementation is copied from apps/traincascade and modified according to MIL
*/
@Namespace("cv") @NoOffset public static class TrackerFeatureHAAR extends TrackerFeature {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerFeatureHAAR(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerFeatureHAAR(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public TrackerFeatureHAAR position(long position) {
return (TrackerFeatureHAAR)super.position(position);
}
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
/** # of rects */
public native int numFeatures(); public native Params numFeatures(int numFeatures);
/** rect size */
public native @ByRef Size rectSize(); public native Params rectSize(Size rectSize);
/** true if input images are integral, false otherwise */
public native @Cast("bool") boolean isIntegral(); public native Params isIntegral(boolean isIntegral);
}
/** \brief Constructor
@param parameters TrackerFeatureHAAR parameters TrackerFeatureHAAR::Params
*/
public TrackerFeatureHAAR( @Const @ByRef(nullValue = "cv::TrackerFeatureHAAR::Params()") Params parameters ) { super((Pointer)null); allocate(parameters); }
private native void allocate( @Const @ByRef(nullValue = "cv::TrackerFeatureHAAR::Params()") Params parameters );
public TrackerFeatureHAAR( ) { super((Pointer)null); allocate(); }
private native void allocate( );
/** \brief Compute the features only for the selected indices in the images collection
@param selFeatures indices of selected features
@param images The images
@param response Collection of response for the specific TrackerFeature
*/
public native @Cast("bool") boolean extractSelected( @StdVector IntPointer selFeatures, @Const @ByRef MatVector images, @ByRef Mat response );
public native @Cast("bool") boolean extractSelected( @StdVector IntBuffer selFeatures, @Const @ByRef MatVector images, @ByRef Mat response );
public native @Cast("bool") boolean extractSelected( @StdVector int[] selFeatures, @Const @ByRef MatVector images, @ByRef Mat response );
/** \brief Identify most effective features
@param response Collection of response for the specific TrackerFeature
@param npoints Max number of features
\note This method modifies the response parameter
*/
public native void selection( @ByRef Mat response, int npoints );
/** \brief Swap the feature in position source with the feature in position target
@param source The source position
@param target The target position
*/
public native @Cast("bool") boolean swapFeature( int source, int target );
/** \brief Swap the feature in position id with the feature input
@param id The position
@param feature The feature
*/
public native @Cast("bool") boolean swapFeature( int id, @ByRef CvHaarEvaluator.FeatureHaar feature );
/** \brief Get the feature in position id
@param id The position
*/
public native @ByRef CvHaarEvaluator.FeatureHaar getFeatureAt( int id );
}
/**
* \brief TrackerFeature based on LBP
*/
@Namespace("cv") public static class TrackerFeatureLBP extends TrackerFeature {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerFeatureLBP(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public TrackerFeatureLBP(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public TrackerFeatureLBP position(long position) {
return (TrackerFeatureLBP)super.position(position);
}
public TrackerFeatureLBP() { super((Pointer)null); allocate(); }
private native void allocate();
public native void selection( @ByRef Mat response, int npoints );
}
/************************************ Specific Tracker Classes ************************************/
/** \brief The MIL algorithm trains a classifier in an online manner to separate the object from the
background.
Multiple Instance Learning avoids the drift problem for a robust tracking. The implementation is
based on \cite MIL .
Original code can be found here
*/
@Namespace("cv") public static class TrackerMIL extends Tracker {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerMIL(Pointer p) { super(p); }
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
//parameters for sampler
/** radius for gathering positive instances during init */
public native float samplerInitInRadius(); public native Params samplerInitInRadius(float samplerInitInRadius);
/** # negative samples to use during init */
public native int samplerInitMaxNegNum(); public native Params samplerInitMaxNegNum(int samplerInitMaxNegNum);
/** size of search window */
public native float samplerSearchWinSize(); public native Params samplerSearchWinSize(float samplerSearchWinSize);
/** radius for gathering positive instances during tracking */
public native float samplerTrackInRadius(); public native Params samplerTrackInRadius(float samplerTrackInRadius);
/** # positive samples to use during tracking */
public native int samplerTrackMaxPosNum(); public native Params samplerTrackMaxPosNum(int samplerTrackMaxPosNum);
/** # negative samples to use during tracking */
public native int samplerTrackMaxNegNum(); public native Params samplerTrackMaxNegNum(int samplerTrackMaxNegNum);
/** # features */
public native int featureSetNumFeatures(); public native Params featureSetNumFeatures(int featureSetNumFeatures);
public native void read( @Const @ByRef FileNode fn );
public native void write( @ByRef FileStorage fs );
}
/** \brief Constructor
@param parameters MIL parameters TrackerMIL::Params
*/
public static native @Ptr TrackerMIL create(@Const @ByRef Params parameters);
public static native @Ptr TrackerMIL create();
}
/** \brief This is a real-time object tracking based on a novel on-line version of the AdaBoost algorithm.
The classifier uses the surrounding background as negative examples in update step to avoid the
drifting problem. The implementation is based on \cite OLB .
*/
@Namespace("cv") public static class TrackerBoosting extends Tracker {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerBoosting(Pointer p) { super(p); }
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
/**the number of classifiers to use in a OnlineBoosting algorithm */
public native int numClassifiers(); public native Params numClassifiers(int numClassifiers);
/**search region parameters to use in a OnlineBoosting algorithm */
public native float samplerOverlap(); public native Params samplerOverlap(float samplerOverlap);
/** search region parameters to use in a OnlineBoosting algorithm */
public native float samplerSearchFactor(); public native Params samplerSearchFactor(float samplerSearchFactor);
/**the initial iterations */
public native int iterationInit(); public native Params iterationInit(int iterationInit);
/** # features */
public native int featureSetNumFeatures(); public native Params featureSetNumFeatures(int featureSetNumFeatures);
/**
* \brief Read parameters from a file
*/
public native void read( @Const @ByRef FileNode fn );
/**
* \brief Write parameters to a file
*/
public native void write( @ByRef FileStorage fs );
}
/** \brief Constructor
@param parameters BOOSTING parameters TrackerBoosting::Params
*/
public static native @Ptr TrackerBoosting create(@Const @ByRef Params parameters);
public static native @Ptr TrackerBoosting create();
}
/** \brief Median Flow tracker implementation.
Implementation of a paper \cite MedianFlow .
The tracker is suitable for very smooth and predictable movements when object is visible throughout
the whole sequence. It's quite and accurate for this type of problems (in particular, it was shown
by authors to outperform MIL). During the implementation period the code at
, the courtesy of the author Arthur Amarra, was used for the
reference purpose.
*/
@Namespace("cv") public static class TrackerMedianFlow extends Tracker {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerMedianFlow(Pointer p) { super(p); }
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
/**default constructor
* note that the default values of parameters are recommended for most of use cases */
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
/**square root of number of keypoints used; increase it to trade
* accurateness for speed */
public native int pointsInGrid(); public native Params pointsInGrid(int pointsInGrid);
/**window size parameter for Lucas-Kanade optical flow */
public native @ByRef Size winSize(); public native Params winSize(Size winSize);
/**maximal pyramid level number for Lucas-Kanade optical flow */
public native int maxLevel(); public native Params maxLevel(int maxLevel);
/**termination criteria for Lucas-Kanade optical flow */
public native @ByRef TermCriteria termCriteria(); public native Params termCriteria(TermCriteria termCriteria);
/**window size around a point for normalized cross-correlation check */
public native @ByRef Size winSizeNCC(); public native Params winSizeNCC(Size winSizeNCC);
/**criterion for loosing the tracked object */
public native double maxMedianLengthOfDisplacementDifference(); public native Params maxMedianLengthOfDisplacementDifference(double maxMedianLengthOfDisplacementDifference);
public native void read( @Const @ByRef FileNode arg0 );
public native void write( @ByRef FileStorage arg0 );
}
/** \brief Constructor
@param parameters Median Flow parameters TrackerMedianFlow::Params
*/
public static native @Ptr TrackerMedianFlow create(@Const @ByRef Params parameters);
public static native @Ptr TrackerMedianFlow create();
}
/** \brief TLD is a novel tracking framework that explicitly decomposes the long-term tracking task into
tracking, learning and detection.
The tracker follows the object from frame to frame. The detector localizes all appearances that
have been observed so far and corrects the tracker if necessary. The learning estimates detector's
errors and updates it to avoid these errors in the future. The implementation is based on \cite TLD .
The Median Flow algorithm (see cv::TrackerMedianFlow) was chosen as a tracking component in this
implementation, following authors. Tracker is supposed to be able to handle rapid motions, partial
occlusions, object absence etc.
*/
@Namespace("cv") public static class TrackerTLD extends Tracker {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerTLD(Pointer p) { super(p); }
public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
public native void read( @Const @ByRef FileNode arg0 );
public native void write( @ByRef FileStorage arg0 );
}
/** \brief Constructor
@param parameters TLD parameters TrackerTLD::Params
*/
public static native @Ptr TrackerTLD create(@Const @ByRef Params parameters);
public static native @Ptr TrackerTLD create();
}
/** \brief KCF is a novel tracking framework that utilizes properties of circulant matrix to enhance the processing speed.
* This tracking method is an implementation of \cite KCF_ECCV which is extended to KCF with color-names features (\cite KCF_CN).
* The original paper of KCF is available at
* as well as the matlab implementation. For more information about KCF with color-names features, please refer to
* .
*/
@Namespace("cv") public static class TrackerKCF extends Tracker {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerKCF(Pointer p) { super(p); }
/**
* \brief Feature type to be used in the tracking grayscale, colornames, compressed color-names
* The modes available now:
- "GRAY" -- Use grayscale values as the feature
- "CN" -- Color-names feature
*/
/** enum cv::TrackerKCF::MODE */
public static final int
GRAY = (1 << 0),
CN = (1 << 1),
CUSTOM = (1 << 2);
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
/**
* \brief Constructor
*/
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
/**
* \brief Read parameters from a file
*/
public native void read(@Const @ByRef FileNode arg0);
/**
* \brief Write parameters to a file
*/
public native void write(@ByRef FileStorage arg0);
/** detection confidence threshold */
public native float detect_thresh(); public native Params detect_thresh(float detect_thresh);
/** gaussian kernel bandwidth */
public native float sigma(); public native Params sigma(float sigma);
/** regularization */
public native float lambda(); public native Params lambda(float lambda);
/** linear interpolation factor for adaptation */
public native float interp_factor(); public native Params interp_factor(float interp_factor);
/** spatial bandwidth (proportional to target) */
public native float output_sigma_factor(); public native Params output_sigma_factor(float output_sigma_factor);
/** compression learning rate */
public native float pca_learning_rate(); public native Params pca_learning_rate(float pca_learning_rate);
/** activate the resize feature to improve the processing speed */
public native @Cast("bool") boolean resize(); public native Params resize(boolean resize);
/** split the training coefficients into two matrices */
public native @Cast("bool") boolean split_coeff(); public native Params split_coeff(boolean split_coeff);
/** wrap around the kernel values */
public native @Cast("bool") boolean wrap_kernel(); public native Params wrap_kernel(boolean wrap_kernel);
/** activate the pca method to compress the features */
public native @Cast("bool") boolean compress_feature(); public native Params compress_feature(boolean compress_feature);
/** threshold for the ROI size */
public native int max_patch_size(); public native Params max_patch_size(int max_patch_size);
/** feature size after compression */
public native int compressed_size(); public native Params compressed_size(int compressed_size);
/** compressed descriptors of TrackerKCF::MODE */
public native int desc_pca(); public native Params desc_pca(int desc_pca);
/** non-compressed descriptors of TrackerKCF::MODE */
public native int desc_npca(); public native Params desc_npca(int desc_npca);
}
public static class Arg0_Mat_Rect_Mat extends FunctionPointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Arg0_Mat_Rect_Mat(Pointer p) { super(p); }
protected Arg0_Mat_Rect_Mat() { allocate(); }
private native void allocate();
public native void call(@Const @ByVal Mat arg0, @Const @ByVal Rect arg1, @ByRef Mat arg2);
}
public native void setFeatureExtractor(Arg0_Mat_Rect_Mat arg0, @Cast("bool") boolean pca_func/*=false*/);
public native void setFeatureExtractor(Arg0_Mat_Rect_Mat arg0);
/** \brief Constructor
@param parameters KCF parameters TrackerKCF::Params
*/
public static native @Ptr TrackerKCF create(@Const @ByRef Params parameters);
public static native @Ptr TrackerKCF create();
}
/** \brief GOTURN (\cite GOTURN) is kind of trackers based on Convolutional Neural Networks (CNN). While taking all advantages of CNN trackers,
* GOTURN is much faster due to offline training without online fine-tuning nature.
* GOTURN tracker addresses the problem of single target tracking: given a bounding box label of an object in the first frame of the video,
* we track that object through the rest of the video. NOTE: Current method of GOTURN does not handle occlusions; however, it is fairly
* robust to viewpoint changes, lighting changes, and deformations.
* Inputs of GOTURN are two RGB patches representing Target and Search patches resized to 227x227.
* Outputs of GOTURN are predicted bounding box coordinates, relative to Search patch coordinate system, in format X1,Y1,X2,Y2.
* Original paper is here:
* As long as original authors implementation:
* Implementation of training algorithm is placed in separately here due to 3d-party dependencies:
*
* GOTURN architecture goturn.prototxt and trained model goturn.caffemodel are accessible on opencv_extra GitHub repository.
*/
@Namespace("cv") public static class TrackerGOTURN extends Tracker {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerGOTURN(Pointer p) { super(p); }
public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
public native void read(@Const @ByRef FileNode arg0);
public native void write(@ByRef FileStorage arg0);
}
/** \brief Constructor
@param parameters GOTURN parameters TrackerGOTURN::Params
*/
public static native @Ptr TrackerGOTURN create(@Const @ByRef Params parameters);
public static native @Ptr TrackerGOTURN create();
}
/** \brief the MOSSE tracker
note, that this tracker works with grayscale images, if passed bgr ones, they will get converted internally.
\cite MOSSE Visual Object Tracking using Adaptive Correlation Filters
*/
@Namespace("cv") public static class TrackerMOSSE extends Tracker {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerMOSSE(Pointer p) { super(p); }
/** \brief Constructor
*/
public static native @Ptr TrackerMOSSE create();
}
/************************************ MultiTracker Class ---By Laksono Kurnianggoro---) ************************************/
/** \brief This class is used to track multiple objects using the specified tracker algorithm.
* The MultiTracker is naive implementation of multiple object tracking.
* It process the tracked objects independently without any optimization accross the tracked objects.
*/
@Namespace("cv") @NoOffset public static class MultiTracker extends Algorithm {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public MultiTracker(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public MultiTracker(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public MultiTracker position(long position) {
return (MultiTracker)super.position(position);
}
/**
* \brief Constructor.
*/
public MultiTracker() { super((Pointer)null); allocate(); }
private native void allocate();
/**
* \brief Destructor
*/
/**
* \brief Add a new object to be tracked.
*
* @param newTracker tracking algorithm to be used
* @param image input image
* @param boundingBox a rectangle represents ROI of the tracked object
*/
public native @Cast("bool") boolean add(@Ptr @ByVal Tracker newTracker, @ByVal Mat image, @Const @ByRef Rect2d boundingBox);
public native @Cast("bool") boolean add(@Ptr @ByVal Tracker newTracker, @ByVal UMat image, @Const @ByRef Rect2d boundingBox);
public native @Cast("bool") boolean add(@Ptr @ByVal Tracker newTracker, @ByVal GpuMat image, @Const @ByRef Rect2d boundingBox);
/**
* \brief Add a set of objects to be tracked.
* @param newTrackers list of tracking algorithms to be used
* @param image input image
* @param boundingBox list of the tracked objects
*/
public native @Cast("bool") boolean add(@ByVal TrackerVector newTrackers, @ByVal Mat image, @ByVal Rect2dVector boundingBox);
public native @Cast("bool") boolean add(@ByVal TrackerVector newTrackers, @ByVal UMat image, @ByVal Rect2dVector boundingBox);
public native @Cast("bool") boolean add(@ByVal TrackerVector newTrackers, @ByVal GpuMat image, @ByVal Rect2dVector boundingBox);
/**
* \brief Update the current tracking status.
* The result will be saved in the internal storage.
* @param image input image
*/
public native @Cast("bool") boolean update(@ByVal Mat image);
public native @Cast("bool") boolean update(@ByVal UMat image);
public native @Cast("bool") boolean update(@ByVal GpuMat image);
/**
* \brief Update the current tracking status.
* @param image input image
* @param boundingBox the tracking result, represent a list of ROIs of the tracked objects.
*/
public native @Cast("bool") boolean update(@ByVal Mat image, @ByRef Rect2dVector boundingBox);
public native @Cast("bool") boolean update(@ByVal UMat image, @ByRef Rect2dVector boundingBox);
public native @Cast("bool") boolean update(@ByVal GpuMat image, @ByRef Rect2dVector boundingBox);
/**
* \brief Returns a reference to a storage for the tracked objects, each object corresponds to one tracker algorithm
*/
public native @Const @ByRef Rect2dVector getObjects();
/**
* \brief Returns a pointer to a new instance of MultiTracker
*/
public static native @Ptr MultiTracker create();
}
/************************************ Multi-Tracker Classes ---By Tyan Vladimir---************************************/
/** \brief Base abstract class for the long-term Multi Object Trackers:
\sa Tracker, MultiTrackerTLD
*/
@Namespace("cv") @NoOffset public static class MultiTracker_Alt extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public MultiTracker_Alt(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public MultiTracker_Alt(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public MultiTracker_Alt position(long position) {
return (MultiTracker_Alt)super.position(position);
}
/** \brief Constructor for Multitracker
*/
public MultiTracker_Alt() { super((Pointer)null); allocate(); }
private native void allocate();
/** \brief Add a new target to a tracking-list and initialize the tracker with a known bounding box that surrounded the target
@param image The initial frame
@param boundingBox The initial bounding box of target
@param tracker_algorithm Multi-tracker algorithm
@return True if new target initialization went succesfully, false otherwise
*/
public native @Cast("bool") boolean addTarget(@ByVal Mat image, @Const @ByRef Rect2d boundingBox, @Ptr @ByVal Tracker tracker_algorithm);
public native @Cast("bool") boolean addTarget(@ByVal UMat image, @Const @ByRef Rect2d boundingBox, @Ptr @ByVal Tracker tracker_algorithm);
public native @Cast("bool") boolean addTarget(@ByVal GpuMat image, @Const @ByRef Rect2d boundingBox, @Ptr @ByVal Tracker tracker_algorithm);
/** \brief Update all trackers from the tracking-list, find a new most likely bounding boxes for the targets
@param image The current frame
@return True means that all targets were located and false means that tracker couldn't locate one of the targets in
current frame. Note, that latter *does not* imply that tracker has failed, maybe target is indeed
missing from the frame (say, out of sight)
*/
public native @Cast("bool") boolean update(@ByVal Mat image);
public native @Cast("bool") boolean update(@ByVal UMat image);
public native @Cast("bool") boolean update(@ByVal GpuMat image);
/** \brief Current number of targets in tracking-list
*/
public native int targetNum(); public native MultiTracker_Alt targetNum(int targetNum);
/** \brief Trackers list for Multi-Object-Tracker
*/
public native @ByRef TrackerVector trackers(); public native MultiTracker_Alt trackers(TrackerVector trackers);
/** \brief Bounding Boxes list for Multi-Object-Tracker
*/
public native @ByRef Rect2dVector boundingBoxes(); public native MultiTracker_Alt boundingBoxes(Rect2dVector boundingBoxes);
/** \brief List of randomly generated colors for bounding boxes display
*/
public native @ByRef ScalarVector colors(); public native MultiTracker_Alt colors(ScalarVector colors);
}
/** \brief Multi Object Tracker for TLD. TLD is a novel tracking framework that explicitly decomposes
the long-term tracking task into tracking, learning and detection.
The tracker follows the object from frame to frame. The detector localizes all appearances that
have been observed so far and corrects the tracker if necessary. The learning estimates detector's
errors and updates it to avoid these errors in the future. The implementation is based on \cite TLD .
The Median Flow algorithm (see cv::TrackerMedianFlow) was chosen as a tracking component in this
implementation, following authors. Tracker is supposed to be able to handle rapid motions, partial
occlusions, object absence etc.
\sa Tracker, MultiTracker, TrackerTLD
*/
@Namespace("cv") public static class MultiTrackerTLD extends MultiTracker_Alt {
static { Loader.load(); }
/** Default native constructor. */
public MultiTrackerTLD() { super((Pointer)null); allocate(); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public MultiTrackerTLD(long size) { super((Pointer)null); allocateArray(size); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public MultiTrackerTLD(Pointer p) { super(p); }
private native void allocate();
private native void allocateArray(long size);
@Override public MultiTrackerTLD position(long position) {
return (MultiTrackerTLD)super.position(position);
}
/** \brief Update all trackers from the tracking-list, find a new most likely bounding boxes for the targets by
optimized update method using some techniques to speedup calculations specifically for MO TLD. The only limitation
is that all target bounding boxes should have approximately same aspect ratios. Speed boost is around 20%
@param image The current frame.
@return True means that all targets were located and false means that tracker couldn't locate one of the targets in
current frame. Note, that latter *does not* imply that tracker has failed, maybe target is indeed
missing from the frame (say, out of sight)
*/
public native @Cast("bool") boolean update_opt(@ByVal Mat image);
public native @Cast("bool") boolean update_opt(@ByVal UMat image);
public native @Cast("bool") boolean update_opt(@ByVal GpuMat image);
}
/** \}
/*********************************** CSRT ************************************/
/** \brief Discriminative Correlation Filter Tracker with Channel and Spatial Reliability
*/
@Namespace("cv") public static class TrackerCSRT extends Tracker {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public TrackerCSRT(Pointer p) { super(p); }
@NoOffset public static class Params extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Params(Pointer p) { super(p); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Params(long size) { super((Pointer)null); allocateArray(size); }
private native void allocateArray(long size);
@Override public Params position(long position) {
return (Params)super.position(position);
}
/**
* \brief Constructor
*/
public Params() { super((Pointer)null); allocate(); }
private native void allocate();
/**
* \brief Read parameters from file
*/
public native void read(@Const @ByRef FileNode arg0);
/**
* \brief Write parameters from file
*/
public native void write(@ByRef FileStorage fs);
public native @Cast("bool") boolean use_hog(); public native Params use_hog(boolean use_hog);
public native @Cast("bool") boolean use_color_names(); public native Params use_color_names(boolean use_color_names);
public native @Cast("bool") boolean use_gray(); public native Params use_gray(boolean use_gray);
public native @Cast("bool") boolean use_rgb(); public native Params use_rgb(boolean use_rgb);
public native @Cast("bool") boolean use_channel_weights(); public native Params use_channel_weights(boolean use_channel_weights);
public native @Cast("bool") boolean use_segmentation(); public native Params use_segmentation(boolean use_segmentation);
/** Window function: "hann", "cheb", "kaiser" */
public native @StdString BytePointer window_function(); public native Params window_function(BytePointer window_function);
public native float kaiser_alpha(); public native Params kaiser_alpha(float kaiser_alpha);
public native float cheb_attenuation(); public native Params cheb_attenuation(float cheb_attenuation);
public native float template_size(); public native Params template_size(float template_size);
public native float gsl_sigma(); public native Params gsl_sigma(float gsl_sigma);
public native float hog_orientations(); public native Params hog_orientations(float hog_orientations);
public native float hog_clip(); public native Params hog_clip(float hog_clip);
public native float padding(); public native Params padding(float padding);
public native float filter_lr(); public native Params filter_lr(float filter_lr);
public native float weights_lr(); public native Params weights_lr(float weights_lr);
public native int num_hog_channels_used(); public native Params num_hog_channels_used(int num_hog_channels_used);
public native int admm_iterations(); public native Params admm_iterations(int admm_iterations);
public native int histogram_bins(); public native Params histogram_bins(int histogram_bins);
public native float histogram_lr(); public native Params histogram_lr(float histogram_lr);
public native int background_ratio(); public native Params background_ratio(int background_ratio);
public native int number_of_scales(); public native Params number_of_scales(int number_of_scales);
public native float scale_sigma_factor(); public native Params scale_sigma_factor(float scale_sigma_factor);
public native float scale_model_max_area(); public native Params scale_model_max_area(float scale_model_max_area);
public native float scale_lr(); public native Params scale_lr(float scale_lr);
public native float scale_step(); public native Params scale_step(float scale_step);
}
/** \brief Constructor
@param parameters CSRT parameters TrackerCSRT::Params
*/
public static native @Ptr TrackerCSRT create(@Const @ByRef Params parameters);
public static native @Ptr TrackerCSRT create();
public native void setInitialMask(@Const @ByVal Mat mask);
}
/* namespace cv */
// #endif
}