All Downloads are FREE. Search and download functionalities are using the official Maven repository.

boofcv.factory.feature.tracker.FactoryPointTracker Maven / Gradle / Ivy

Go to download

BoofCV is an open source Java library for real-time computer vision and robotics applications.

There is a newer version: 0.26
Show newest version
/*
 * Copyright (c) 2011-2016, Peter Abeles. All Rights Reserved.
 *
 * This file is part of BoofCV (http://boofcv.org).
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package boofcv.factory.feature.tracker;

import boofcv.abst.feature.associate.*;
import boofcv.abst.feature.describe.ConfigSurfDescribe;
import boofcv.abst.feature.describe.DescribeRegionPoint;
import boofcv.abst.feature.describe.WrapDescribeBrief;
import boofcv.abst.feature.describe.WrapDescribePixelRegionNCC;
import boofcv.abst.feature.detdesc.DetectDescribeFusion;
import boofcv.abst.feature.detdesc.DetectDescribePoint;
import boofcv.abst.feature.detect.interest.ConfigFast;
import boofcv.abst.feature.detect.interest.ConfigFastHessian;
import boofcv.abst.feature.detect.interest.ConfigGeneralDetector;
import boofcv.abst.feature.detect.interest.InterestPointDetector;
import boofcv.abst.feature.orientation.ConfigAverageIntegral;
import boofcv.abst.feature.orientation.ConfigSlidingIntegral;
import boofcv.abst.feature.orientation.OrientationImage;
import boofcv.abst.feature.orientation.OrientationIntegral;
import boofcv.abst.feature.tracker.*;
import boofcv.abst.filter.derivative.ImageGradient;
import boofcv.alg.feature.associate.AssociateSurfBasic;
import boofcv.alg.feature.describe.DescribePointBrief;
import boofcv.alg.feature.describe.DescribePointPixelRegionNCC;
import boofcv.alg.feature.describe.DescribePointSurf;
import boofcv.alg.feature.describe.brief.FactoryBriefDefinition;
import boofcv.alg.feature.detect.intensity.FastCornerIntensity;
import boofcv.alg.feature.detect.intensity.GradientCornerIntensity;
import boofcv.alg.feature.detect.intensity.ShiTomasiCornerIntensity;
import boofcv.alg.feature.detect.interest.EasyGeneralFeatureDetector;
import boofcv.alg.feature.detect.interest.GeneralFeatureDetector;
import boofcv.alg.filter.derivative.GImageDerivativeOps;
import boofcv.alg.interpolate.InterpolateRectangle;
import boofcv.alg.tracker.combined.CombinedTrackerScalePoint;
import boofcv.alg.tracker.klt.PkltConfig;
import boofcv.alg.transform.ii.GIntegralImageOps;
import boofcv.factory.feature.associate.FactoryAssociation;
import boofcv.factory.feature.describe.FactoryDescribePointAlgs;
import boofcv.factory.feature.describe.FactoryDescribeRegionPoint;
import boofcv.factory.feature.detdesc.FactoryDetectDescribe;
import boofcv.factory.feature.detect.intensity.FactoryIntensityPointAlg;
import boofcv.factory.feature.detect.interest.FactoryDetectPoint;
import boofcv.factory.feature.detect.interest.FactoryInterestPoint;
import boofcv.factory.feature.orientation.FactoryOrientation;
import boofcv.factory.feature.orientation.FactoryOrientationAlgs;
import boofcv.factory.filter.blur.FactoryBlurFilter;
import boofcv.factory.filter.derivative.FactoryDerivative;
import boofcv.factory.interpolate.FactoryInterpolation;
import boofcv.factory.tracker.FactoryTrackerAlg;
import boofcv.factory.transform.pyramid.FactoryPyramid;
import boofcv.struct.feature.*;
import boofcv.struct.image.ImageGray;
import boofcv.struct.pyramid.PyramidDiscrete;

import java.util.Random;


/**
 * Factory for creating trackers which implement {@link boofcv.abst.feature.tracker.PointTracker}.  These trackers
 * are intended for use in SFM applications.  Some features which individual trackers can provide are lost when
 * using the high level interface {@link PointTracker}.  To create low level tracking algorithms see
 * {@link FactoryTrackerAlg}
 *
 * @see FactoryTrackerAlg
 *
 * @author Peter Abeles
 */
public class FactoryPointTracker {

	/**
	 * Pyramid KLT feature tracker.
	 *
	 * @see boofcv.alg.tracker.klt.PyramidKltTracker
	 *
	 * @param scaling       Scales in the image pyramid. Recommend [1,2,4] or [2,4]
	 * @param configExtract Configuration for extracting features
	 * @param featureRadius Size of the tracked feature.  Try 3 or 5
	 * @param imageType     Input image type.
	 * @param derivType     Image derivative  type.
	 * @return KLT based tracker.
	 */
	public static 
	PointTracker klt(int scaling[], ConfigGeneralDetector configExtract, int featureRadius,
							 Class imageType, Class derivType) {
		PkltConfig config = new PkltConfig();
		config.pyramidScaling = scaling;
		config.templateRadius = featureRadius;

		return klt(config, configExtract, imageType, derivType );
	}

	/**
	 * Pyramid KLT feature tracker.
	 *
	 * @see boofcv.alg.tracker.klt.PyramidKltTracker
	 *
	 * @param config Config for the tracker. Try PkltConfig.createDefault().
	 * @param configExtract Configuration for extracting features
	 * @return KLT based tracker.
	 */
	public static 
	PointTracker klt(PkltConfig config, ConfigGeneralDetector configExtract,
						Class imageType, Class derivType ) {

		if( derivType == null )
			derivType = GImageDerivativeOps.getDerivativeType(imageType);

		if( config == null ) {
			config = new PkltConfig();
		}

		if( configExtract == null ) {
			configExtract = new ConfigGeneralDetector();
		}

		GeneralFeatureDetector detector = createShiTomasi(configExtract, derivType);

		InterpolateRectangle interpInput = FactoryInterpolation.bilinearRectangle(imageType);
		InterpolateRectangle interpDeriv = FactoryInterpolation.bilinearRectangle(derivType);

		ImageGradient gradient = FactoryDerivative.sobel(imageType, derivType);

		PyramidDiscrete pyramid = FactoryPyramid.discreteGaussian(config.pyramidScaling,-1,2,true,imageType);

		return new PointTrackerKltPyramid<>(config.config, config.templateRadius, pyramid, detector,
				gradient, interpInput, interpDeriv, derivType);
	}

	/**
	 * Creates a tracker which detects Fast-Hessian features and describes them with SURF using the faster variant
	 * of SURF.
	 *
	 * @see DescribePointSurf
	 * @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
	 *
	 * @param configDetector Configuration for SURF detector
	 * @param configDescribe Configuration for SURF descriptor
	 * @param configOrientation Configuration for orientation
	 * @param imageType      Type of image the input is.
	 * @return SURF based tracker.
	 */
	// TODO remove maxTracks?  Use number of detected instead
	public static 
	PointTracker dda_FH_SURF_Fast(
										  ConfigFastHessian configDetector ,
										  ConfigSurfDescribe.Speed configDescribe ,
										  ConfigAverageIntegral configOrientation ,
										  Class imageType)
	{
		ScoreAssociation score = FactoryAssociation.scoreEuclidean(TupleDesc_F64.class, true);
		AssociateSurfBasic assoc = new AssociateSurfBasic(FactoryAssociation.greedy(score, 5, true));

		AssociateDescription2D generalAssoc =
				new AssociateDescTo2D<>(new WrapAssociateSurfBasic(assoc));

		DetectDescribePoint fused =
				FactoryDetectDescribe.surfFast(configDetector, configDescribe, configOrientation,imageType);

		DdaManagerDetectDescribePoint manager = new DdaManagerDetectDescribePoint<>(fused);

		return new DetectDescribeAssociate<>(manager, generalAssoc, false);
	}

	/**
	 * Creates a tracker which detects Fast-Hessian features and describes them with SURF using the faster variant
	 * of SURF.
	 *
	 * @see DescribePointSurf
	 * @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
	 *
	 * @param configDetector Configuration for SURF detector
	 * @param configDescribe Configuration for SURF descriptor
	 * @param configOrientation Configuration for orientation
	 * @param imageType      Type of image the input is.
	 * @return SURF based tracker.
	 */
	// TODO remove maxTracks?  Use number of detected instead
	public static 
	PointTracker dda_FH_SURF_Stable(
											ConfigFastHessian configDetector ,
											ConfigSurfDescribe.Stability configDescribe ,
											ConfigSlidingIntegral configOrientation ,
											Class imageType)
	{
		ScoreAssociation score = FactoryAssociation.scoreEuclidean(TupleDesc_F64.class, true);
		AssociateSurfBasic assoc = new AssociateSurfBasic(FactoryAssociation.greedy(score, 5, true));

		AssociateDescription2D generalAssoc =
				new AssociateDescTo2D<>(new WrapAssociateSurfBasic(assoc));

		DetectDescribePoint fused =
				FactoryDetectDescribe.surfStable(configDetector,configDescribe,configOrientation,imageType);

		DdaManagerDetectDescribePoint manager = new DdaManagerDetectDescribePoint<>(fused);

		return new DetectDescribeAssociate<>(manager, generalAssoc, false);
	}

	/**
	 * Creates a tracker which detects Shi-Tomasi corner features and describes them with BRIEF.
	 *
	 * @see ShiTomasiCornerIntensity
	 * @see DescribePointBrief
	 * @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
	 *
	 * @param maxAssociationError Maximum allowed association error.  Try 200.
	 * @param configExtract Configuration for extracting features
	 * @param imageType           Type of image being processed.
	 * @param derivType Type of image used to store the image derivative. null == use default
	 */
	public static 
	PointTracker dda_ST_BRIEF(int maxAssociationError,
									  ConfigGeneralDetector configExtract,
									  Class imageType, Class derivType)
	{
		if( derivType == null )
			derivType = GImageDerivativeOps.getDerivativeType(imageType);

		DescribePointBrief brief = FactoryDescribePointAlgs.brief(FactoryBriefDefinition.gaussian2(new Random(123), 16, 512),
				FactoryBlurFilter.gaussian(imageType, 0, 4));

		GeneralFeatureDetector detectPoint = createShiTomasi(configExtract, derivType);
		EasyGeneralFeatureDetector easy = new EasyGeneralFeatureDetector<>(detectPoint, imageType, derivType);

		ScoreAssociateHamming_B score = new ScoreAssociateHamming_B();

		AssociateDescription2D association =
				new AssociateDescTo2D<>(FactoryAssociation.greedy(score, maxAssociationError, true));

		DdaManagerGeneralPoint manager =
				new DdaManagerGeneralPoint<>(easy, new WrapDescribeBrief<>(brief, imageType), 1.0);

		return new DetectDescribeAssociate<>(manager, association, false);
	}

	/**
	 * Creates a tracker which detects FAST corner features and describes them with BRIEF.
	 *
	 * @see FastCornerIntensity
	 * @see DescribePointBrief
	 * @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
	 *
	 * @param configFast Configuration for FAST detector
	 * @param configExtract Configuration for extracting features
	 * @param maxAssociationError Maximum allowed association error.  Try 200.
	 * @param imageType           Type of image being processed.
	 */
	public static 
	PointTracker dda_FAST_BRIEF(ConfigFast configFast,
								   ConfigGeneralDetector configExtract,
								   int maxAssociationError,
								   Class imageType )
	{
		DescribePointBrief brief = FactoryDescribePointAlgs.brief(FactoryBriefDefinition.gaussian2(new Random(123), 16, 512),
				FactoryBlurFilter.gaussian(imageType, 0, 4));

		GeneralFeatureDetector corner = FactoryDetectPoint.createFast(configFast, configExtract, imageType);
		EasyGeneralFeatureDetector easy = new EasyGeneralFeatureDetector<>(corner, imageType, null);

		ScoreAssociateHamming_B score = new ScoreAssociateHamming_B();

		AssociateDescription2D association =
				new AssociateDescTo2D<>(
						FactoryAssociation.greedy(score, maxAssociationError, true));

		DdaManagerGeneralPoint manager =
				new DdaManagerGeneralPoint<>(easy, new WrapDescribeBrief<>(brief, imageType), 1.0);

		return new DetectDescribeAssociate<>(manager, association, false);
	}

	/**
	 * Creates a tracker which detects Shi-Tomasi corner features and describes them with NCC.
	 *
	 * @see ShiTomasiCornerIntensity
	 * @see DescribePointPixelRegionNCC
	 * @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
	 *
	 * @param configExtract Configuration for extracting features
	 * @param describeRadius Radius of the region being described.  Try 2.
	 * @param imageType      Type of image being processed.
	 * @param derivType      Type of image used to store the image derivative. null == use default     */
	public static 
	PointTracker dda_ST_NCC(ConfigGeneralDetector configExtract, int describeRadius,
									Class imageType, Class derivType) {

		if( derivType == null )
			derivType = GImageDerivativeOps.getDerivativeType(imageType);

		int w = 2*describeRadius+1;

		DescribePointPixelRegionNCC alg = FactoryDescribePointAlgs.pixelRegionNCC(w, w, imageType);

		GeneralFeatureDetector corner = createShiTomasi(configExtract, derivType);
		EasyGeneralFeatureDetector easy = new EasyGeneralFeatureDetector<>(corner, imageType, derivType);

		ScoreAssociateNccFeature score = new ScoreAssociateNccFeature();

		AssociateDescription2D association =
				new AssociateDescTo2D<>(
						FactoryAssociation.greedy(score, Double.MAX_VALUE, true));

		DdaManagerGeneralPoint manager =
				new DdaManagerGeneralPoint<>(easy, new WrapDescribePixelRegionNCC<>(alg, imageType), 1.0);

		return new DetectDescribeAssociate<>(manager, association, false);
	}

	/**
	 * Creates a tracker which uses the detect, describe, associate architecture.
	 *
	 * @param detector Interest point detector.
	 * @param orientation Optional orientation estimation algorithm. Can be null.
	 * @param describe Region description.
	 * @param associate Description association.
	 * @param updateDescription After a track has been associated should the description be changed?  Try false.
	 * @param  Type of input image.
	 * @param  Type of region description
	 * @return tracker
	 */
	public static 
	DetectDescribeAssociate dda(InterestPointDetector detector,
										OrientationImage orientation ,
										DescribeRegionPoint describe,
										AssociateDescription2D associate ,
										boolean updateDescription ) {

		DetectDescribeFusion fused =
				new DetectDescribeFusion<>(detector, orientation, describe);

		DdaManagerDetectDescribePoint manager =
				new DdaManagerDetectDescribePoint<>(fused);

		DetectDescribeAssociate dat =
				new DetectDescribeAssociate<>(manager, associate, updateDescription);

		return dat;
	}

	public static 
	DetectDescribeAssociate dda( DetectDescribePoint detDesc,
										AssociateDescription2D associate ,
										boolean updateDescription ) {

		DdaManagerDetectDescribePoint manager =
				new DdaManagerDetectDescribePoint<>(detDesc);

		DetectDescribeAssociate dat =
				new DetectDescribeAssociate<>(manager, associate, updateDescription);

		return dat;
	}

	/**
	 * Creates a tracker which detects Fast-Hessian features, describes them with SURF, nominally tracks them using KLT.
	 *
	 * @see DescribePointSurf
	 * @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
	 *
	 * @param kltConfig Configuration for KLT tracker
	 * @param reactivateThreshold Tracks are reactivated after this many have been dropped.  Try 10% of maxMatches
	 * @param configDetector Configuration for SURF detector
	 * @param configDescribe Configuration for SURF descriptor
	 * @param configOrientation Configuration for region orientation
	 * @param imageType      Type of image the input is.
	 * @param             Input image type.
	 * @return SURF based tracker.
	 */
	public static 
	PointTracker combined_FH_SURF_KLT( PkltConfig kltConfig ,
										  int reactivateThreshold ,
										  ConfigFastHessian configDetector ,
										  ConfigSurfDescribe.Stability configDescribe ,
										  ConfigSlidingIntegral configOrientation ,
										  Class imageType) {

		ScoreAssociation score = FactoryAssociation.defaultScore(TupleDesc_F64.class);
		AssociateSurfBasic assoc = new AssociateSurfBasic(FactoryAssociation.greedy(score, 100000, true));

		AssociateDescription generalAssoc = new WrapAssociateSurfBasic(assoc);

		DetectDescribePoint fused =
				FactoryDetectDescribe.surfStable(configDetector, configDescribe, configOrientation,imageType);

		return combined(fused,generalAssoc, kltConfig,reactivateThreshold, imageType);
	}

	/**
	 * Creates a tracker which detects Shi-Tomasi corner features, describes them with SURF, and
	 * nominally tracks them using KLT.
	 *
	 * @see ShiTomasiCornerIntensity
	 * @see DescribePointSurf
	 * @see boofcv.abst.feature.tracker.DdaManagerDetectDescribePoint
	 *
	 * @param configExtract Configuration for extracting features
	 * @param kltConfig Configuration for KLT
	 * @param reactivateThreshold Tracks are reactivated after this many have been dropped.  Try 10% of maxMatches
	 * @param configDescribe Configuration for SURF descriptor
	 * @param configOrientation Configuration for region orientation.  If null then orientation isn't estimated
	 * @param imageType      Type of image the input is.
	 * @param derivType      Image derivative type.        @return SURF based tracker.
	 */
	public static 
	PointTracker combined_ST_SURF_KLT(ConfigGeneralDetector configExtract,
										 PkltConfig kltConfig,
										 int reactivateThreshold,
										 ConfigSurfDescribe.Stability configDescribe,
										 ConfigSlidingIntegral configOrientation,
										 Class imageType,
										 Class derivType) {

		if( derivType == null )
			derivType = GImageDerivativeOps.getDerivativeType(imageType);

		GeneralFeatureDetector corner = createShiTomasi(configExtract, derivType);
		InterestPointDetector detector = FactoryInterestPoint.wrapPoint(corner, 1, imageType, derivType);

		DescribeRegionPoint regionDesc
				= FactoryDescribeRegionPoint.surfStable(configDescribe, imageType);

		ScoreAssociation score = FactoryAssociation.scoreEuclidean(TupleDesc_F64.class, true);
		AssociateSurfBasic assoc = new AssociateSurfBasic(FactoryAssociation.greedy(score, 100000, true));

		AssociateDescription generalAssoc = new WrapAssociateSurfBasic(assoc);

		OrientationImage orientation = null;

		if( configOrientation != null ) {
			Class integralType = GIntegralImageOps.getIntegralType(imageType);
			OrientationIntegral orientationII = FactoryOrientationAlgs.sliding_ii(configOrientation, integralType);
			orientation = FactoryOrientation.convertImage(orientationII,imageType);
		}

		return combined(detector,orientation,regionDesc,generalAssoc, kltConfig,reactivateThreshold,
				imageType);
	}

	/**
	 * Creates a tracker that is a hybrid between KLT and Detect-Describe-Associate (DDA) trackers.
	 *
	 * @see CombinedTrackerScalePoint
	 *
	 * @param detector Feature detector.
	 * @param orientation Optional feature orientation.  Can be null.
	 * @param describe Feature description
	 * @param associate Association algorithm.
	 * @param kltConfig Configuration for KLT tracker
	 * @param reactivateThreshold Tracks are reactivated after this many have been dropped.  Try 10% of maxMatches
	 * @param imageType Input image type.     @return Feature tracker
	 */
	public static 
	PointTracker combined(InterestPointDetector detector,
							 OrientationImage orientation,
							 DescribeRegionPoint describe,
							 AssociateDescription associate,
							 PkltConfig kltConfig ,
							 int reactivateThreshold,
							 Class imageType)
	{
		DetectDescribeFusion fused = new DetectDescribeFusion<>(detector, orientation, describe);

		return combined(fused,associate, kltConfig, reactivateThreshold,imageType);
	}

	/**
	 * Creates a tracker that is a hybrid between KLT and Detect-Describe-Associate (DDA) trackers.
	 *
	 * @see CombinedTrackerScalePoint
	 *
	 * @param detector Feature detector and describer.
	 * @param associate Association algorithm.
	 * @param kltConfig Configuration for KLT tracker
	 * @param reactivateThreshold Tracks are reactivated after this many have been dropped.  Try 10% of maxMatches
	 * @param imageType Input image type.     @return Feature tracker
	 */
	public static 
	PointTracker combined(DetectDescribePoint detector,
							 AssociateDescription associate,
							 PkltConfig kltConfig ,
							 int reactivateThreshold, Class imageType )
	{
		Class derivType = GImageDerivativeOps.getDerivativeType(imageType);

		if( kltConfig == null ) {
			kltConfig = new PkltConfig();
		}

		CombinedTrackerScalePoint tracker =
				FactoryTrackerAlg.combined(detector,associate, kltConfig,imageType,derivType);

		return new PointTrackerCombined<>(tracker, reactivateThreshold, imageType, derivType);
	}


	public static 
	PointTracker dda(GeneralFeatureDetector detector,
						DescribeRegionPoint describe,
						AssociateDescription2D associate,
						double scale,
						Class imageType) {

		EasyGeneralFeatureDetector easy = new EasyGeneralFeatureDetector<>(detector, imageType, null);

		DdaManagerGeneralPoint manager =
				new DdaManagerGeneralPoint<>(easy, describe, scale);

		return new DetectDescribeAssociate<>(manager, associate, false);
	}

	/**
	 * Creates a Shi-Tomasi corner detector specifically designed for SFM.  Smaller feature radius work better.
	 * Variable detectRadius to control the number of features.  When larger features are used weighting should
	 * be set to true, but because this is so small, it is set to false
	 */
	public static 
	GeneralFeatureDetector createShiTomasi(ConfigGeneralDetector config ,
												 Class derivType)
	{
		GradientCornerIntensity cornerIntensity = FactoryIntensityPointAlg.shiTomasi(1, false, derivType);

		return FactoryDetectPoint.createGeneral(cornerIntensity, config );
	}
}