All Downloads are FREE. Search and download functionalities are using the official Maven repository.

net.automatalib.util.automata.equivalence.CharacterizingSets Maven / Gradle / Ivy

Go to download

This artifact provides various common utility operations for analyzing and manipulating automata and graphs, such as traversal, minimization and copying.

There is a newer version: 0.11.0
Show newest version
/* Copyright (C) 2013-2019 TU Dortmund
 * This file is part of AutomataLib, http://www.automatalib.net/.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package net.automatalib.util.automata.equivalence;

import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;

import javax.annotation.ParametersAreNonnullByDefault;

import com.google.common.collect.AbstractIterator;
import net.automatalib.automata.UniversalDeterministicAutomaton;
import net.automatalib.util.automata.Automata;
import net.automatalib.words.Word;

/**
 * Operations for calculating characterizing sets.
 * 

* A characterizing set for a whole automaton is a set W of words such that for every two states * s1 and s2, there exists a word w ∈ W such that w exposes a * difference between s1 and s2 (i.e., either covers a transition with differing * property (or not defined in only one case), or reaching a successor state with differing properties), or there exists * no such word at all. *

* A characterizing set for a single state s is a set W of words such that for every state t, there * exists a word w ∈ W such that w exposes a difference between s and t, or there * exists no such word at all. * * @author Malte Isberner */ @ParametersAreNonnullByDefault public final class CharacterizingSets { private CharacterizingSets() { } /** * Computes a characterizing set for the given automaton. * * @param automaton * the automaton for which to determine the characterizing set. * @param inputs * the input alphabets to consider * @param result * the collection in which to store the characterizing words */ public static void findCharacterizingSet(UniversalDeterministicAutomaton automaton, Collection inputs, Collection> result) { findIncrementalCharacterizingSet(automaton, inputs, Collections.emptyList(), result); } /** * Computes a characterizing set for a specified state in the given automaton. * * @param automaton * the automaton containing the state * @param inputs * the input alphabets to consider * @param state * the state for which to determine the characterizing set * @param result * the collection in which to store the characterizing words */ public static void findCharacterizingSet(UniversalDeterministicAutomaton automaton, Collection inputs, S state, Collection> result) { Object prop = automaton.getStateProperty(state); List currentBlock = new ArrayList<>(); boolean multipleStateProps = false; for (S s : automaton) { if (Objects.equals(s, state)) { continue; } Object sProp = automaton.getStateProperty(s); if (!Objects.equals(sProp, prop)) { multipleStateProps = true; } else { currentBlock.add(s); } } if (multipleStateProps) { result.add(Word.epsilon()); } while (!currentBlock.isEmpty()) { Iterator it = currentBlock.iterator(); Word suffix = null; while (it.hasNext() && suffix == null) { S s = it.next(); suffix = Automata.findSeparatingWord(automaton, state, s, inputs); } if (suffix == null) { return; } result.add(suffix); List trace = buildTrace(automaton, state, suffix); List nextBlock = new ArrayList<>(); while (it.hasNext()) { S s = it.next(); if (checkTrace(automaton, s, suffix, trace)) { nextBlock.add(s); } } currentBlock = nextBlock; } } public static Iterator> characterizingSetIterator(UniversalDeterministicAutomaton automaton, Collection inputs) { return new IncrementalCharacterizingSetIterator<>(automaton, inputs, Collections.emptyList()); } private static List buildTrace(UniversalDeterministicAutomaton automaton, S state, Word suffix) { if (suffix.isEmpty()) { Object prop = automaton.getStateProperty(state); return Collections.singletonList(prop); } List trace = new ArrayList<>(2 * suffix.length()); S curr = state; for (I sym : suffix) { T trans = automaton.getTransition(curr, sym); if (trans == null) { break; } Object prop = automaton.getTransitionProperty(trans); trace.add(prop); curr = automaton.getSuccessor(trans); prop = automaton.getStateProperty(curr); trace.add(prop); } return trace; } private static boolean checkTrace(UniversalDeterministicAutomaton automaton, S state, Word suffix, List trace) { Iterator it = trace.iterator(); S curr = state; for (I sym : suffix) { T trans = automaton.getTransition(curr, sym); if (!it.hasNext()) { return (trans == null); } Object prop = automaton.getTransitionProperty(trans); if (!Objects.equals(prop, it.next())) { return false; } curr = automaton.getSuccessor(trans); prop = automaton.getStateProperty(curr); if (!Objects.equals(prop, it.next())) { return false; } } return true; } public static boolean findIncrementalCharacterizingSet(UniversalDeterministicAutomaton automaton, Collection inputs, Collection> oldSuffixes, Collection> newSuffixes) { boolean refined = false; // We need a list to ensure a stable iteration order List> oldSuffixList = toList(oldSuffixes); Queue> blocks = buildInitialBlocks(automaton, oldSuffixList); if (!oldSuffixes.contains(Word.epsilon())) { if (epsilonRefine(automaton, blocks)) { newSuffixes.add(Word.epsilon()); refined = true; } } Word suffix; while ((suffix = refine(automaton, inputs, blocks)) != null) { newSuffixes.add(suffix); refined = true; } return refined; } public static Iterator> incrementalCharacterizingSetIterator(UniversalDeterministicAutomaton automaton, Collection inputs, Collection> oldSuffixes) { return new IncrementalCharacterizingSetIterator<>(automaton, inputs, oldSuffixes); } private static List toList(Collection collection) { if (collection instanceof List) { return (List) collection; } else { return new ArrayList<>(collection); } } private static Queue> buildInitialBlocks(UniversalDeterministicAutomaton automaton, List> oldSuffixes) { Map>, List> initialPartitioning = new HashMap<>(); Queue> blocks = new ArrayDeque<>(); for (S state : automaton) { List> sig = buildSignature(automaton, oldSuffixes, state); List block = initialPartitioning.get(sig); if (block == null) { block = new ArrayList<>(); blocks.add(block); initialPartitioning.put(sig, block); } block.add(state); } return blocks; } private static List> buildSignature(UniversalDeterministicAutomaton automaton, List> suffixes, S state) { List> signature = new ArrayList<>(suffixes.size()); for (Word suffix : suffixes) { List trace = buildTrace(automaton, state, suffix); signature.add(trace); } return signature; } private static boolean epsilonRefine(UniversalDeterministicAutomaton automaton, Queue> blockQueue) { int initialSize = blockQueue.size(); boolean refined = false; for (int i = 0; i < initialSize; i++) { List block = blockQueue.poll(); if (block.size() <= 1) { continue; } Map> propCluster = clusterByProperty(automaton, block); if (propCluster.size() > 1) { refined = true; } blockQueue.addAll(propCluster.values()); } return refined; } private static Word refine(UniversalDeterministicAutomaton automaton, Collection inputs, Queue> blockQueue) { List currBlock; while ((currBlock = blockQueue.poll()) != null) { if (currBlock.size() <= 1) { continue; // we cannot split further } Iterator it = currBlock.iterator(); S ref = it.next(); Word suffix = null; S state = null; while (it.hasNext() && suffix == null) { state = it.next(); suffix = Automata.findSeparatingWord(automaton, ref, state, inputs); } if (suffix != null) { int otherBlocks = blockQueue.size(); Map, List> buckets = new HashMap<>(); List firstBucket = new ArrayList<>(); List secondBucket = new ArrayList<>(); firstBucket.add(ref); buckets.put(buildTrace(automaton, ref, suffix), firstBucket); secondBucket.add(state); buckets.put(buildTrace(automaton, state, suffix), secondBucket); cluster(automaton, suffix, it, buckets); blockQueue.addAll(buckets.values()); // Split all other blocks that were in the queue for (int i = 0; i < otherBlocks; i++) { List otherBlock = blockQueue.poll(); if (otherBlock.size() > 2) { buckets.clear(); cluster(automaton, suffix, otherBlock.iterator(), buckets); blockQueue.addAll(buckets.values()); } } return suffix; } } return null; } private static Map> clusterByProperty(UniversalDeterministicAutomaton automaton, List states) { Map> result = new HashMap<>(); for (S state : states) { Object prop = automaton.getStateProperty(state); List block = result.computeIfAbsent(prop, k -> new ArrayList<>()); block.add(state); } return result; } private static void cluster(UniversalDeterministicAutomaton automaton, Word suffix, Iterator stateIt, Map, List> bucketMap) { while (stateIt.hasNext()) { S state = stateIt.next(); List trace = buildTrace(automaton, state, suffix); List bucket = bucketMap.computeIfAbsent(trace, k -> new ArrayList<>()); bucket.add(state); } } private static class IncrementalCharacterizingSetIterator extends AbstractIterator> { private final UniversalDeterministicAutomaton automaton; private final Collection inputs; private final List> oldSuffixes; private Queue> blocks; IncrementalCharacterizingSetIterator(UniversalDeterministicAutomaton automaton, Collection inputs, Collection> oldSuffixes) { this.automaton = automaton; this.inputs = inputs; this.oldSuffixes = toList(oldSuffixes); } @Override protected Word computeNext() { // first call if (blocks == null) { blocks = buildInitialBlocks(automaton, oldSuffixes); if (!oldSuffixes.contains(Word.epsilon())) { if (epsilonRefine(automaton, blocks)) { return Word.epsilon(); } } } final Word suffix = refine(automaton, inputs, blocks); if (suffix != null) { return suffix; } return endOfData(); } } }