org.metacsp.meta.symbolsAndTime.Schedulable Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of meta-csp-framework Show documentation
Show all versions of meta-csp-framework Show documentation
A Java API for Meta-CSP based reasoning
/*******************************************************************************
* Copyright (c) 2010-2013 Federico Pecora
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
******************************************************************************/
package org.metacsp.meta.symbolsAndTime;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import java.util.Vector;
import java.util.logging.Logger;
import org.metacsp.framework.ConstraintNetwork;
import org.metacsp.framework.ValueOrderingH;
import org.metacsp.framework.Variable;
import org.metacsp.framework.VariableOrderingH;
import org.metacsp.framework.meta.MetaConstraint;
import org.metacsp.framework.meta.MetaVariable;
import org.metacsp.multi.activity.Activity;
import org.metacsp.multi.activity.ActivityComparator;
import org.metacsp.multi.allenInterval.AllenIntervalConstraint;
import org.metacsp.time.APSPSolver;
import org.metacsp.time.Bounds;
import org.metacsp.utility.PowerSet;
import org.metacsp.utility.logging.MetaCSPLogging;
public abstract class Schedulable extends MetaConstraint {
/**
*
*/
private static final long serialVersionUID = 5719994497319584156L;
public long getBeforeParameter() {
return beforeParameter;
}
public void setBeforeParameter(long beforeParameter) {
this.beforeParameter = beforeParameter;
}
protected long beforeParameter = 0;
public PEAKCOLLECTION getPeakCollectionStrategy() {
return peakCollectionStrategy;
}
public void setPeakCollectionStrategy(PEAKCOLLECTION peakCollectionStrategy) {
this.peakCollectionStrategy = peakCollectionStrategy;
}
protected Vector activities;
public static enum PEAKCOLLECTION {SAMPLING, COMPLETE, BINARY};
protected PEAKCOLLECTION peakCollectionStrategy = PEAKCOLLECTION.SAMPLING;
public Schedulable(VariableOrderingH varOH, ValueOrderingH valOH) {
super(varOH, valOH);
}
// Finds sets of overlapping activities and assesses whether they are conflicting (e.g., over-consuming a resource)
protected ConstraintNetwork[] samplingPeakCollection() {
if (activities != null && !activities.isEmpty()) {
Activity[] groundVars = activities.toArray(new Activity[activities.size()]);
Arrays.sort(groundVars,new ActivityComparator(true));
Vector ret = new Vector();
HashMap usages = new HashMap();
Vector> overlappingAll = new Vector>();
// this first block checks whether a single activity is overconsuming
// the resource
for (Activity act : activities) {
if (isConflicting(new Activity[] {act})) {
ConstraintNetwork temp = new ConstraintNetwork(null);
temp.addVariable(act.getVariable());
ret.add(temp);
}
}
// groundVars are ordered activities
for (int i = 0; i < groundVars.length; i++) {
Vector overlapping = new Vector();
overlapping.add(groundVars[i]);
long start = (groundVars[i]).getTemporalVariable().getEST();
long end = (groundVars[i]).getTemporalVariable().getEET();
Bounds intersection = new Bounds(start, end);
// starting from act[i] all the forthcoming activities are evaluated to see if they temporally
// overlaps with act[i]
for (int j = 0; j < groundVars.length; j++) {
if (i != j) {
start = (groundVars[j]).getTemporalVariable().getEST();
end = (groundVars[j]).getTemporalVariable().getEET();
Bounds nextInterval = new Bounds(start, end);
Bounds intersectionNew = intersection.intersectStrict(nextInterval);
// if act[j] overlaps it is added to the temporary (wrt i) set of activities
if (intersectionNew != null) {
overlapping.add(groundVars[j]);
// the current set of overlapping activities is evaluated to see if
// the resource capacity is exceeded
if (isConflicting(overlapping.toArray(new Activity[overlapping.size()]))) {
// if it is exceeded the Vector of activities gathered in this iteration is put
// in a Vector>
overlappingAll.add(overlapping);
break;
}
// if they don't exceed the capacity, just the newIntersection is taken into account...
else intersection = intersectionNew;
}
}
}
}
for (Vector overlapping : overlappingAll) {
if (overlapping.size() > 1) {
Activity first = overlapping.get(0);
ConstraintNetwork temp = new ConstraintNetwork(null);
for (Activity act : overlapping) temp.addVariable(act.getVariable());
usages.put(first, temp);
}
}
for (Activity key : usages.keySet()) {
if (usages.get(key).getVariables().length > 1) ret.add(usages.get(key));
}
return ret.toArray(new ConstraintNetwork[ret.size()]);
}
return (new ConstraintNetwork[0]);
}
protected ConstraintNetwork[] completePeakCollection() {
if (activities != null && !activities.isEmpty()) {
logger.finest("Doing complete peak collection with " + activities.size() + " activities...");
Activity[] groundVars = activities.toArray(new Activity[activities.size()]);
Vector discontinuities = new Vector();
for (Activity a : groundVars) {
long start = a.getTemporalVariable().getEST();
long end = a.getTemporalVariable().getEET();
if (!discontinuities.contains(start)) discontinuities.add(start);
if (!discontinuities.contains(end)) discontinuities.add(end);
}
Long[] discontinuitiesArray = discontinuities.toArray(new Long[discontinuities.size()]);
Arrays.sort(discontinuitiesArray);
HashSet> superPeaks = new HashSet>();
for (int i = 0; i < discontinuitiesArray.length-1; i++) {
HashSet onePeak = new HashSet();
superPeaks.add(onePeak);
Bounds interval = new Bounds(discontinuitiesArray[i], discontinuitiesArray[i+1]);
for (Activity a : groundVars) {
Bounds interval1 = new Bounds(a.getTemporalVariable().getEST(), a.getTemporalVariable().getEET());
Bounds intersection = interval.intersectStrict(interval1);
if (intersection != null && !intersection.isSingleton()) {
onePeak.add(a);
}
}
}
Vector ret = new Vector();
for (HashSet superSet : superPeaks) {
for (Set s : PowerSet.powerSet(superSet)) {
if (!s.isEmpty()) {
ConstraintNetwork cn = new ConstraintNetwork(null);
for (Activity a : s) cn.addVariable(a.getVariable());
if (!ret.contains(cn) && isConflicting(s.toArray(new Activity[s.size()]))) ret.add(cn);
}
}
}
logger.finest("Done peak sampling");
return ret.toArray(new ConstraintNetwork[ret.size()]);
}
return (new ConstraintNetwork[0]);
}
protected ConstraintNetwork[] binaryPeakCollection() {
if (activities != null && !activities.isEmpty()) {
Vector ret = new Vector();
logger.finest("Doing binary peak collection with " + activities.size() + " activities...");
Activity[] groundVars = activities.toArray(new Activity[activities.size()]);
for (Activity a : groundVars) {
if (isConflicting(new Activity[] {a})) {
ConstraintNetwork cn = new ConstraintNetwork(null);
cn.addVariable(a.getVariable());
ret.add(cn);
}
}
if (!ret.isEmpty()) {
return ret.toArray(new ConstraintNetwork[ret.size()]);
}
for (int i = 0; i < groundVars.length-1; i++) {
for (int j = i+1; j < groundVars.length; j++) {
Bounds bi = new Bounds(groundVars[i].getTemporalVariable().getEST(), groundVars[i].getTemporalVariable().getEET());
Bounds bj = new Bounds(groundVars[j].getTemporalVariable().getEST(), groundVars[j].getTemporalVariable().getEET());
if (bi.intersectStrict(bj) != null && isConflicting(new Activity[] {groundVars[i], groundVars[j]})) {
ConstraintNetwork cn = new ConstraintNetwork(null);
cn.addVariable(groundVars[i].getVariable());
cn.addVariable(groundVars[j].getVariable());
ret.add(cn);
}
}
}
if (!ret.isEmpty()) {
return ret.toArray(new ConstraintNetwork[ret.size()]);
}
}
return (new ConstraintNetwork[0]);
}
// private ConstraintNetwork[] binaryPeakCollection() {
// ConstraintNetwork[] nonMinimalPeaks = this.completePeakCollection();
// Vector ret = null;
// for (ConstraintNetwork cn : nonMinimalPeaks) {
// if (cn.getVariables().length == 2) {
// if (ret == null) ret = new Vector();
//// Variable[] vaux= cn.getVariables();
//// if(!vaux[0].equals(vaux[1]))
// ret.add(cn);
// }
// }
// if (ret != null) return ret.toArray(new ConstraintNetwork[ret.size()]);
// return (new ConstraintNetwork[0]);
// }
@Override
public ConstraintNetwork[] getMetaVariables() {
if (peakCollectionStrategy.equals(PEAKCOLLECTION.SAMPLING))
return samplingPeakCollection();
else if (peakCollectionStrategy.equals(PEAKCOLLECTION.BINARY))
return binaryPeakCollection();
return completePeakCollection();
}
// @Override
// public ConstraintNetwork[] getMetaValues(MetaVariable metaVariable, int initialTime) {
// return getMetaValues(metaVariable);
// }
@Override
public ConstraintNetwork[] getMetaValues(MetaVariable metaVariable) {
ConstraintNetwork conflict = metaVariable.getConstraintNetwork();
MCSData[] mcsinfo = getOrderedMCSs(conflict);
Vector ret = new Vector();
if(mcsinfo == null) //unresolvabe MCS: no solution can be found
{
//System.out.println("ESTA Fails: unresolvable MCS.");
return null;
}
for (MCSData mcs : mcsinfo) {
AllenIntervalConstraint before = new AllenIntervalConstraint(AllenIntervalConstraint.Type.BeforeOrMeets, new Bounds(this.beforeParameter, APSPSolver.INF));
before.setFrom(mcs.mcsActFrom.getVariable());
before.setTo(mcs.mcsActTo.getVariable());
ConstraintNetwork resolver = new ConstraintNetwork(mcs.mcsActFrom.getVariable().getConstraintSolver());
resolver.addVariable(mcs.mcsActFrom.getVariable());
resolver.addVariable(mcs.mcsActTo.getVariable());
resolver.addConstraint(before);
ret.add(resolver);
}
return ret.toArray(new ConstraintNetwork[ret.size()]);
}
/**
* Get a list of {@link MCSData} objects, ordered according to decreasing k, where k is a heuristic estimator
* of the amount of flexibility which is maintained when imposing a temporal constraint that resolves an MCS -
* see [P. Laborie, M. Ghallab, "Planning with Sharable Resource Constraints", IJCAI 1995].
* @param peak The peaks from which to sample MCSs and compute the k-based ordering.
* @return An ordered array of {@link MCSData} objects.
*/
public MCSData[] getOrderedMCSs(ConstraintNetwork peak)
{
//System.out.println("PEAK SIZE: " + peak.getVariables().length);
Vector mcslist = new Vector();
Variable[] vars = peak.getVariables();
for (int i = 0; i < vars.length; i++) {
for (int j = i+1; j < vars.length; j++) {
Variable[] oneMcs = new Variable[2];
oneMcs[0] = vars[i];
oneMcs[1] = vars[j];
mcslist.add(oneMcs);
}
}
//MCSData[] mcsinfo = new MCSData[mcslist.size()];
Vector mcsinfo = new Vector();
int index = 0;
boolean unresMCSFound = false;
//System.out.println("MCSINFO size: " + mcsinfo.length);
while((!unresMCSFound) && (index < mcslist.size())) //Per ogni MCS
{
float pcmin = 1.0f; //Valore del pcmin
float pcminBad = 1.0f; //Valore del pcmin
float kReciprocal = 0.0f; //Reciproco di K (Ribaltare prima di uscire)
Variable actFrom = null;
Variable actTo = null;
int unresMCS = 0;
Variable[] currentMcs = mcslist.elementAt(index);
//Vector che conterrà i valori dei commit
Vector pcVector = new Vector();
int mcsSize = currentMcs.length;
//Per ogni coppia di attività {Ag, Ah} dell'MCS
for (int g = 0; g < mcsSize; g++)
{
long est1 = ((Activity)currentMcs[g]).getTemporalVariable().getEST();
long eet1 = ((Activity)currentMcs[g]).getTemporalVariable().getEST();
long lst1 = ((Activity)currentMcs[g]).getTemporalVariable().getLST();
long let1 = ((Activity)currentMcs[g]).getTemporalVariable().getLET();
for (int h = g+1; h < mcsSize; h++)
{
long est2 = ((Activity)currentMcs[h]).getTemporalVariable().getEST();
long eet2 = ((Activity)currentMcs[h]).getTemporalVariable().getEET();
long lst2 = ((Activity)currentMcs[h]).getTemporalVariable().getLST();
long let2 = ((Activity)currentMcs[h]).getTemporalVariable().getLET();
//Analisi coppia diretta
long dmin = est2 - let1;
long dmax = lst2 - eet1;
if(dmin > dmax)
{
logger.severe("Direct pair and dmin > dmax: IMPOSSIBLE");
System.exit(0);
}
float pc = 0.0f; //pc corrente
if(dmin != dmax)
{
pc = ((float)(Math.min(dmax, 0) - Math.min(dmin, 0)))/((float)(dmax - dmin));
pcVector.add(pc);
if(pc < pcmin)
{
//System.out.println("ADDED DIRECT: " + pc);
pcmin = pc;
pcminBad = pcmin;
actFrom = currentMcs[g];
actTo = currentMcs[h];
}
else
{
unresMCS++;
}
}
else //If dmin == dmax, we can skip the analysis of the pair {Ag, Ah}
{
unresMCS++;
}
//Analisi coppia inversa
dmin = est1 - let2;
dmax = lst1 - eet2;
if(dmin > dmax)
{
logger.severe("Inverse pair and dmin > dmax: IMPOSSIBLE");
System.exit(0);
}
if(dmin != dmax) //Se dmin == dmax, possiamo skippare l'analisi della coppia {Ag, Ah}
{
pc = ((float)(Math.min(dmax, 0) - Math.min(dmin, 0)))/((float)(dmax - dmin));
pcVector.add(pc);
if(pc < pcmin)
{
//System.out.println("ADDED INVERSE: " + pc);
pcmin = pc;
actFrom = currentMcs[h];
actTo = currentMcs[g];
}
else
{
unresMCS++;
}
}
else
{
unresMCS++;
}
}
}
//SE INCONTRIAMO UN MCS IRRISOLVIBILE POSSIAMO INTERROMPERE IL CICLO: LA SOLUZIONE E' IMPOSSIBILE
if(unresMCS < (mcsSize*(mcsSize-1)))
{
//Calcolo del K(MCS)
for(int g=0; g();
for (Activity act : acts)
if (!activities.contains(act))
activities.add(act);
//System.out.println("-->" + activities.size());
}
public void removeUsage(Activity... acts) {
if (activities != null) {
for (Activity act : acts) activities.removeElement(act);
}
//System.out.println("-->" + activities.size());
}
public Vector getActivityOnUse(){
return activities;
}
}