All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.nuiton.topia.replication.model.ReplicationModel Maven / Gradle / Ivy

The newest version!
package org.nuiton.topia.replication.model;

/*
 * #%L
 * ToPIA :: Service Replication
 * $Id$
 * $HeadURL$
 * %%
 * Copyright (C) 2004 - 2014 CodeLutin
 * %%
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as 
 * published by the Free Software Foundation, either version 3 of the 
 * License, or (at your option) any later version.
 * 
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Lesser Public License for more details.
 * 
 * You should have received a copy of the GNU General Lesser Public 
 * License along with this program.  If not, see
 * .
 * #L%
 */

import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.nuiton.topia.persistence.TopiaException;
import org.nuiton.topia.persistence.TopiaEntity;
import org.nuiton.topia.persistence.TopiaEntityEnum;
import org.nuiton.topia.persistence.util.EntityOperator;
import org.nuiton.topia.persistence.util.TopiaEntityHelper;
import org.nuiton.topia.persistence.util.TopiaEntityIdsMap;
import org.nuiton.topia.replication.TopiaReplicationOperation;
import org.nuiton.topia.replication.operation.AttachLink;
import org.nuiton.topia.replication.operation.DettachAssociation;
import org.nuiton.topia.replication.operation.Duplicate;
import org.nuiton.topia.replication.operation.LoadLink;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

/**
 * Model of replication.
 *
 * the object contains the required {@link #nodes} to replicate :
 * 
    *
  • all entities for the given {@code nodes} if flag {@link #replicateAll} is * setted to {@code true}
  • *
  • the entities given by the field {@link #topiaIds}
  • *
* * @author Tony Chemit - [email protected] * @since 2.2.0 */ public class ReplicationModel { /** Logger */ private static final Log log = LogFactory.getLog(ReplicationModel.class); /** l'ensemble des contrats d'entites a repliquer */ protected final TopiaEntityEnum[] contracts; /** les ids des entites a repliquer (non utilise en mode replicateAll) */ protected final String[] topiaIds; /** le dictionnaire des noeuds a repliquer associes a leur type */ protected final Map nodes; /** la liste des noeuds a repliquer (dans l'ordre de replication) */ protected final List order; /** * un drapeau pour savoir si on effectue une replication de toutes les * donnees des contrats. */ protected final boolean replicateAll; public ReplicationModel(TopiaEntityEnum[] contracts, Set> types, String... topiaIds) { this.contracts = contracts.clone(); this.topiaIds = topiaIds; replicateAll = false; order = new ArrayList(); Map tmpNodes = new HashMap(); for (Class k : types) { TopiaEntityEnum e = getContract(k); ReplicationNode replicationNode = new ReplicationNode(e); tmpNodes.put(e, replicationNode); } nodes = Collections.unmodifiableMap(tmpNodes); } public ReplicationModel(TopiaEntityEnum[] contracts, boolean replicateAll, String... topiaIds) { this.contracts = contracts.clone(); this.topiaIds = topiaIds; this.replicateAll = replicateAll; order = new ArrayList(); Map tmpNodes = new HashMap(); for (TopiaEntityEnum e : contracts) { ReplicationNode replicationNode = new ReplicationNode(e); tmpNodes.put(e, replicationNode); if (!replicateAll) { // mode restreint : l'ordre est induit par l'ordre sur les // contrats passes order.add(replicationNode); } } nodes = Collections.unmodifiableMap(tmpNodes); } @SuppressWarnings({"unchecked"}) public TopiaEntityEnum getContract(Class type) { TopiaEntityEnum e = null; if (TopiaEntity.class.isAssignableFrom(type)) { e = TopiaEntityHelper.getEntityEnum( (Class) type, contracts); } return e; } public Collection getNodes() { return nodes.values(); } public Set> getTypes() { Set> result = new HashSet>(); for (TopiaEntityEnum e : nodes.keySet()) { result.add(e.getContract()); } return result; } public ReplicationNode getNode(TopiaEntityEnum contract) { return nodes.get(contract); } public void addDependency(List nodes) { if (log.isDebugEnabled()) { log.debug("Try to add nodes : " + nodes + " in universe : " + order); } for (ReplicationNode node : nodes) { if (order.contains(node)) { // can not be done throw new IllegalStateException( "Node " + node + " is already registred : " + order); } order.add(node); } } public TopiaEntityEnum[] getContracts() { return contracts; } public String[] getTopiaIds() { return topiaIds; } public List getOrder() { return order; } public boolean isReplicateAll() { return replicateAll; } @SuppressWarnings("unchecked") public ReplicationNode getNode(String propertyName, Class propertyType) { if (TopiaEntity.class.isAssignableFrom(propertyType)) { Class t = (Class) propertyType; TopiaEntityEnum e = getContract(t); if (nodes.containsKey(e)) { ReplicationNode dep = getNode(e); return dep; } } return null; } public void detectAssociations(TopiaEntityEnum... filter) throws TopiaException { for (TopiaEntityEnum type : nodes.keySet()) { ReplicationNode node = getNode(type); EntityOperator operator = node.getOperator(); List associationProperties = operator.getAssociationProperties(); if (!associationProperties.isEmpty()) { for (String p : associationProperties) { ReplicationNode dep = getNode( p, operator.getAssociationPropertyType(p) ); if (dep != null) { if (log.isDebugEnabled()) { log.debug("from type - " + type.getContract().getSimpleName() + " [" + p + ":" + dep + "]"); } node.addAssociation(p, dep); } } } } } public void detectDirectDependencies() throws TopiaException { for (TopiaEntityEnum type : nodes.keySet()) { ReplicationNode node = getNode(type); EntityOperator operator = node.getOperator(); List properties = operator.getProperties(); if (!properties.isEmpty()) { for (String p : properties) { ReplicationNode dep = getNode(p, operator.getPropertyType(p)); if (dep != null) { if (log.isDebugEnabled()) { log.debug("from type - " + type.getContract().getSimpleName() + " [" + p + ":" + dep + "]"); } node.addDependency(p, dep); } } } } } public void detectDependencies() throws TopiaException { Set toResolved = new HashSet(nodes.values()); Set resolved = new HashSet(); List> levels = new ArrayList>(); // premiere passe pour detecter les niveaux de replications // on ne regarde que les dependences directes et pas les associations // si A -> B alors B doit etre dans un niveau inferieure (i.e replique // avant). //TODO Il faut pouvoir gerer les cycles (pour cela on doit avoir //TODO un dictionnaire pour les compositions qui peuvent etre nulle //TODO ainsi on doit etre capable de ne pas tenir compte d'une composition //TODO si c'est nullable, sinon cela veut dire obligatoirement //TODO que B doit etre replique avant A... while (!toResolved.isEmpty()) { Set level = new HashSet(); for (ReplicationNode node : toResolved) { if (node.hasDependency()) { for (ReplicationNode n : node.getDependencies().values()) { if (!resolved.contains(n)) { level.add(n); } } } } Set safeLevel = new HashSet(); if (level.isEmpty()) { safeLevel.addAll(toResolved); } else { // des depedences trouvees for (ReplicationNode n : level) { //TODO il faut verifier que le type n'est pas une dependence de level safeLevel.add(n); } if (safeLevel.isEmpty()) { // on a detecte un cycle sur les dependences, on ne peut rien faire pour le moment throw new IllegalStateException( "un cycle dans les dependences a été détecté, " + "l\'algorithme necessite plus de donnes... " + "\n niveau courant : " + level + "\n resolus : " + getOrder()); } } if (log.isDebugEnabled()) { log.debug("level [" + levels.size() + "] resolved : " + safeLevel); } toResolved.removeAll(safeLevel); resolved.addAll(safeLevel); levels.add(safeLevel); level.clear(); } // seconde passe : on recherche le meilleur ordre possible pour // les types de chaque niveau // on se base desormais que sur les associations // si A -*> B alors on va essayer de repliquer B avant A // si on detecte un cycle alors on ne peut pas imposer un ordre optimal // et on devra dettacher B de A pendant la replication puis reattacher B // a A apres replication de A et B // l'ordre optimal permet de ne pas effectuer // calcul des ordres de replications pour les ensembles de chaque niveau // pour trouver le bon ordre, on travaille sur les couvertures // des noeuds. HashSet done = new HashSet(); for (Set level : levels) { detectDependenciesOrder(level, done); } } public void detectDependenciesOrder(Set safeLevel, Set doned) { if (log.isDebugEnabled()) { log.debug("will detect " + safeLevel); } Map> dico = new HashMap>(); for (ReplicationNode n : safeLevel) { Set shell = new HashSet(n.getShell()); shell.retainAll(safeLevel); if (log.isDebugEnabled()) { log.debug("shell to use for " + n + " : " + shell); } dico.put(n, shell); } List> levels = new ArrayList>(); while (!dico.isEmpty()) { if (log.isDebugEnabled()) { log.debug("level [" + levels.size() + "] on " + safeLevel); for (Entry> entry : dico.entrySet()) { log.debug("node " + entry.getKey() + " : " + entry.getValue()); } } // detection des noeud libres Set free = new HashSet(); for (Entry> e : dico.entrySet()) { if (e.getValue().isEmpty()) { free.add(e.getKey()); } } if (free.isEmpty()) { // un cycle a ete detectee // on ne peut plus rien predire pour cet ensemble if (log.isWarnEnabled()) { log.warn("level [" + levels.size() + "] cycle detecte : " + dico.keySet()); } throw new IllegalStateException( "un cycle n'a pas pu etre resoud entre l'ensemble " + dico.keySet()); } log.info("there is some free node(s) to resolve : " + free); for (Entry> e : dico.entrySet()) { Set list = e.getValue(); list.removeAll(free); } for (ReplicationNode n : free) { dico.remove(n); } if (log.isDebugEnabled()) { log.debug("level [" + levels.size() + "] resolved : " + free); } levels.add(free); doned.addAll(free); if (dico.isEmpty()) { // ordre optimal trouve break; } } for (Set nodesForLevel : levels) { addDependency(new ArrayList(nodesForLevel)); } dico.clear(); levels.clear(); } public void detectObjectsToDettach() { Set universe = new HashSet(); for (ReplicationNode node : getOrder()) { // on detecte si le node a des associations // sortants de l'universe deja replique // si oui, alors on marque l'association pour un dettachement if (node.hasAssociation()) { for (Entry e : node.getAssociations().entrySet()) { ReplicationNode nodeDst = e.getValue(); if (!universe.contains(nodeDst)) { if (log.isDebugEnabled()) { log.debug("association to dettach " + e.getKey() + " for " + node); } // association sortant node.addAssociationToDettach(e.getKey()); } } } // on fait de meme pour les dependences directes //TODO la resolution des conflits sur dependences n'est pas encore //TODO en place if (node.hasDependency()) { for (Entry e : node.getDependencies().entrySet()) { ReplicationNode nodeDst = e.getValue(); if (!universe.contains(nodeDst)) { if (log.isDebugEnabled()) { log.debug("dependency to dettach " + e.getKey() + " for " + node); } // association sortant node.addDependencyToDettach(e.getKey()); } } } universe.add(node); } } public void detectOperations() { Set universe = new HashSet(); Set links = new HashSet(); Set linksToLoad = new HashSet(); // premiere passe pour recuperer toutes les associations for (ReplicationNode node : order) { if (node.hasAssociation()) { for (Entry entry : node.getAssociations().entrySet()) { String name = entry.getKey(); // dans tous les cas, on ajoute un link d'association a reattacher ReplicationNode target = node.getAssociations().get(name); ReplicationLink link = new ReplicationLink(node, target, name, true); if (nodes.containsValue(target)) { // on a trouve une association que l'on doit gerer links.add(link); if (log.isDebugEnabled()) { log.debug("link to treate : " + link); } } else { } } List associationProperties = node.getOperator().getAssociationProperties(); for (String name : associationProperties) { Class associationPropertyType = node.getOperator().getAssociationPropertyType(name); TopiaEntityEnum contract = getContract(associationPropertyType); if (contract == null || !nodes.containsKey(contract)) { // if (!TopiaEntity.class.isAssignableFrom(associationPropertyType) || // !nodes.containsKey(associationPropertyType)) { ReplicationLink link = new ReplicationLink(node, null, name, true); linksToLoad.add(link); if (log.isDebugEnabled()) { log.debug("link to load before replication : " + link); } } } } } // deuxieme passe pour detecter les operations a realiser for (ReplicationNode node : order) { log.debug("------------------------------- for node " + node); // on detecte si le node a des associations // sortants de l'universe deja replique // si oui, alors on marque l'association pour un dettachement if (node.hasAssociationsToDettach()) { Set names = node.getAssociationsToDettach(); // operations de dettachement d'association for (String name : names) { addPreOperation(node, node, DettachAssociation.class, name); } } Set tmpLinks = new HashSet(); // recherche des associations a charger avant replication for (ReplicationLink link : linksToLoad) { if (node.equals(link.getSource())) { tmpLinks.add(link); } } if (!tmpLinks.isEmpty()) { // on a des associations a charger avant replication for (ReplicationLink link : tmpLinks) { // addPreOperation(node, node, LoadLink.class, link); addPreOperation(node, link.getSource(), LoadLink.class, link); } linksToLoad.removeAll(links); tmpLinks.clear(); } // operation de duplication addDuplicateOperation(node, node, Duplicate.class); universe.add(node); // operations de reattachement for (ReplicationLink link : links) { if (link.canReattach(universe, node)) { // lien reattachable tmpLinks.add(link); } } if (!tmpLinks.isEmpty()) { // on a trouve des liens a reattacher for (ReplicationLink link : tmpLinks) { // addPostOperation(node, node, AttachLink.class, link); addPostOperation(node, link.getTarget(), AttachLink.class, link); } // ces liens ne sont plus a traiter links.removeAll(tmpLinks); } } } // List realOperations = new ArrayList(); public void adjustOperations(TopiaEntityIdsMap data) { for (TopiaEntityEnum e : getContracts()) { List ids = data.get(e.getContract()); ReplicationNode node = getNode(e); if (node == null) { // le noeud n'est pas connu (ce n'est pas normal!) continue; } List realOperations = new ArrayList(); if (CollectionUtils.isEmpty(ids)) { if (log.isInfoEnabled()) { log.info("skip operations on node " + node + " (no data associated)"); } // must only keep operation that are not directly linked with the node // at the moment only attachLink is in that case ReplicationOperationDef[] operations = node.getOperations(); for (ReplicationOperationDef op : operations) { if (!node.equals(op.getNode())) { // keep this operation realOperations.add(op); if (log.isInfoEnabled()) { log.info(" keep " + op); } } else { if (log.isInfoEnabled()) { log.info(" skip " + op); } } } node.setOperations(realOperations); } // always sort operations on the phase node.sortOperations(); // if (CollectionUtils.isEmpty(ids)) { // ReplicationOperationDef[] operations = node.getOperations(); // log.info("skip operations on node " + node + // " (no data associated)"); // for (ReplicationOperationDef op : operations) { // log.info(" skip " + op); // } // node.clearOperations(); // } else { // node.sortOperations(); // } } } public void detectShell() { for (ReplicationNode n : nodes.values()) { Set shell = new HashSet(); getShell(n, shell); shell.remove(n); n.setShell(shell); } } protected void getShell(ReplicationNode node, Set explored) { if (!explored.contains(node)) { explored.add(node); } if (node.hasAssociation()) { for (ReplicationNode n : node.getAssociations().values()) { if (!explored.contains(n)) { getShell(n, explored); } } } if (node.hasDependency()) { for (ReplicationNode n : node.getDependencies().values()) { if (!explored.contains(n)) { getShell(n, explored); } } } } protected void addPreOperation(ReplicationNode ownerNode, ReplicationNode node, Class operationClass, Object... params) { addOperation(ownerNode, node, ReplicationOperationPhase.before, operationClass, params ); } protected void addDuplicateOperation(ReplicationNode ownerNode, ReplicationNode node, Class operationClass, Object... params) { addOperation(ownerNode, node, ReplicationOperationPhase.duplicate, operationClass, params ); } protected void addPostOperation(ReplicationNode ownerNode, ReplicationNode node, Class operationClass, Object... params) { addOperation(ownerNode, node, ReplicationOperationPhase.after, operationClass, params ); } protected void addOperation(ReplicationNode ownerNode, ReplicationNode node, ReplicationOperationPhase phase, Class operationClass, Object... params) { ReplicationOperationDef op; op = new ReplicationOperationDef(phase, operationClass, node, params); ownerNode.addOperation(op); } }




© 2015 - 2025 Weber Informatics LLC | Privacy Policy