All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.opt4j.optimizers.ea.Hypervolume Maven / Gradle / Ivy

The newest version!
/*******************************************************************************
 * Copyright (c) 2014 Opt4J
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in all
 * copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
 * SOFTWARE.
 *******************************************************************************/
 

package org.opt4j.optimizers.ea;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.opt4j.core.Individual;
import org.opt4j.core.common.archive.FrontDensityIndicator;
import org.opt4j.core.start.Constant;

import com.google.inject.Inject;

/**
 * The {@link Hypervolume}, see "Zitzler, E., and Thiele, L. (1998):
 * Multiobjective Optimization Using Evolutionary Algorithms - A Comparative
 * Case Study. Parallel Problem Solving from Nature (PPSN-V), 292-301." is a
 * {@link FrontDensityIndicator} based on determination of the hypervolume
 * contribution. The calculation is based on a normalization between 0 and 1 in
 * each dimension and a transformation to a maximization problem. Additionally
 * an offset value (default 1) is added to each dimension.
 * 
 * 
 * @see SMSModule
 * @author Ramin Etemaadi
 * @author Johannes Kruisselbrink
 * @author Rui Li
 * @author lukasiewycz
 * 
 */
public class Hypervolume implements FrontDensityIndicator {

	protected final double offset;

	/**
	 * Constructs a {@link Hypervolume}.
	 * 
	 * @param offset
	 *            the offset that is added to each dimension before the
	 *            hypervolume is calculated
	 */
	@Inject
	public Hypervolume(@Constant(value = "offset", namespace = Hypervolume.class) double offset) {
		this.offset = offset;
	}

	/*
	 * (non-Javadoc)
	 * 
	 * @see
	 * org.opt4j.optimizer.ea.FrontDensityIndicator#getDensityValues(java.util
	 * .Collection)
	 */
	@Override
	public Map getDensityValues(Collection individuals) {
		return getDensityValues(individuals, this.offset);
	}

	/**
	 * Calculates the density values for a front of non-dominated individuals
	 * based on the contribution of the {@link Hypervolume}.
	 * 
	 * A special approach for two dimension exists as well as a general approach
	 * for n dimensions.
	 * 
	 * @param individuals
	 *            the individuals
	 * @param offset
	 *            the offset
	 * @return the map of density values
	 */
	protected Map getDensityValues(Collection individuals, double offset) {
		if (individuals.isEmpty()) {
			throw new IllegalArgumentException("Individuals is empty.");
		}

		List orderIndividuals = new ArrayList(individuals);
		int m = individuals.iterator().next().getObjectives().size();

		if (m < 2) {
			Map result = new HashMap();
			for (Individual individual : individuals) {
				result.put(individual, 0.0);
			}
			return result;
		} else if (m == 2) {
			return calculateHypervolumeContribution2D(orderIndividuals, offset);
		} else {
			return calculateHypervolumeContributionN(orderIndividuals, offset);
		}
	}

	/**
	 * Calculates the {@link Hypervolume} contribution for n dimensions.
	 * 
	 * @param individuals
	 *            the individuals
	 * @param offset
	 *            the offset
	 * @return the map of density values
	 */
	protected Map calculateHypervolumeContributionN(List individuals, double offset) {
		Map result = new HashMap();
		List front = invert(normalize(getMinValues(individuals)), offset);

		int m = front.get(0).length;

		double hvAll = calculateHypervolume(front, m);

		for (int i = 0; i < front.size(); i++) {
			List iFront = new ArrayList(front);
			iFront.remove(i);
			double iHv = calculateHypervolume(iFront, m);
			result.put(individuals.get(i), hvAll - iHv);
		}

		return result;
	}

	/**
	 * Calculates the {@link Hypervolume} contribution for two dimensions.
	 * 
	 * @param individuals
	 *            the individuals
	 * @param offset
	 *            the offset
	 * @return the map of density values
	 */
	protected Map calculateHypervolumeContribution2D(List individuals, double offset) {
		Map result = new HashMap();
		List front = invert(normalize(getMinValues(individuals)), offset);
		List sorted = new ArrayList(front);

		Collections.sort(sorted, new Comparator() {
			@Override
			public int compare(double[] o1, double[] o2) {
				Double v1 = o1[0];
				Double v2 = o2[0];
				return v1.compareTo(v2);
			}
		});

		final int size = sorted.size();

		for (int i = 0; i < size; i++) {
			double diffX = sorted.get(i)[0] - (i > 0 ? sorted.get(i - 1)[0] : 0);
			double diffY = sorted.get(i)[1] - (i < size - 1 ? sorted.get(i + 1)[1] : 0);
			double contribution = diffX * diffY;

			result.put(individuals.get(front.indexOf(sorted.get(i))), contribution);
		}

		return result;
	}

	/**
	 * Transforms the non-dominated {@link Individual}s to a front where each
	 * objective is to be minimized.
	 * 
	 * @param individuals
	 *            the individuals
	 * @return the front of vectors that is minimized
	 */
	protected List getMinValues(List individuals) {
		List minValues = new ArrayList();
		for (Individual individual : individuals) {
			minValues.add(individual.getObjectives().array());
		}
		return minValues;
	}

	/**
	 * Normalizes a front of non-dominated solutions to values between 0 and 1.
	 * 
	 * @param front
	 *            the front of non-dominated solutions
	 * @return the normalized front
	 */
	protected List normalize(List front) {
		int m = front.get(0).length;

		double[] min = new double[m];
		double[] max = new double[m];

		Arrays.fill(min, +Double.MAX_VALUE);
		Arrays.fill(max, -Double.MAX_VALUE);

		for (double[] p : front) {
			for (int i = 0; i < m; i++) {
				min[i] = Math.min(min[i], p[i]);
				max[i] = Math.max(max[i], p[i]);
			}
		}

		for (int i = 0; i < m; i++) {
			if (min[i] == max[i]) {
				max[i]++;
			}
		}

		List normalized = new ArrayList();
		for (double[] p : front) {
			double[] pn = new double[m];
			for (int i = 0; i < m; i++) {
				pn[i] = (p[i] - min[i]) / (max[i] - min[i]);
			}
			normalized.add(pn);
		}
		return normalized;
	}

	/**
	 * Inverts (from a minimization to a maximization problem) a front of
	 * solutions and adds an offset value to each dimension.
	 * 
	 * @param front
	 *            the front of non-dominated solutions
	 * @param offset
	 *            the offset
	 * @return the inverted front
	 */
	protected List invert(List front, double offset) {
		int m = front.get(0).length;

		double[] nadir = new double[m];
		Arrays.fill(nadir, 1.0 + offset);

		List inverted = new ArrayList();
		for (double[] element : front) {
			double[] in = new double[element.length];
			for (int i = 0; i < element.length; i++) {
				in[i] = nadir[i] - element[i];
			}
			inverted.add(in);
		}

		return inverted;
	}

	/**
	 * Implements the {@link Hypervolume} calculations as proposed by Zitzler,
	 * E., and Thiele, L. (1998). All points have positive values in all
	 * dimensions and the hypervolume is calculated from 0.
	 * 
	 * @param front
	 *            the front of non-dominated solutions
	 * @param nObjectives
	 *            the number of objectives
	 * @return the hypervolume
	 */
	protected double calculateHypervolume(List front, int nObjectives) {
		double volume = 0.0;
		double distance = 0.0;

		while (!front.isEmpty()) {
			List nondominatedPoints = filterNondominatedSet(front, nObjectives - 1);
			double tempVolume;
			if (nObjectives < 3) {
				assert nondominatedPoints.size() > 0;
				tempVolume = nondominatedPoints.get(0)[0];
			} else {
				tempVolume = calculateHypervolume(nondominatedPoints, nObjectives - 1);
			}
			double tempDistance = surfaceUnchangedTo(front, nObjectives - 1);
			volume += tempVolume * (tempDistance - distance);
			distance = tempDistance;
			front = reduceNondominatedSet(front, nObjectives - 1, distance);
		}

		return volume;
	}

	protected List filterNondominatedSet(List front, int nObjectives) {
		List nondominated = new ArrayList();

		for (double[] p1 : front) {
			boolean dominated = false;
			for (double[] p2 : nondominated) {
				if (dominates(p2, p1, nObjectives)) {
					dominated = true;
					break;
				}
			}

			if (!dominated) {
				for (Iterator it = nondominated.iterator(); it.hasNext();) {
					double[] p2 = it.next();
					if (dominates(p1, p2, nObjectives)) {
						it.remove();
					}
				}
				nondominated.add(p1);
			}
		}

		return nondominated;
	}

	protected boolean dominates(double[] p1, double[] p2, int nObjectives) {
		boolean strong = false;
		for (int i = 0; i < nObjectives; i++) {
			if (p1[i] > p2[i]) {
				strong = true;
			} else if (p1[i] < p2[i]) {
				return false;
			}
		}
		return strong;
	}

	protected double surfaceUnchangedTo(List front, int objective) {
		assert front.size() > 0;
		double value = Double.MAX_VALUE;
		for (double[] p : front) {
			value = Math.min(value, p[objective]);
		}
		return value;
	}

	protected List reduceNondominatedSet(List front, int objective, double threshold) {
		List result = new ArrayList();

		for (double[] p : front) {
			if (p[objective] > threshold) {
				result.add(p);
			}
		}
		return result;
	}

}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy