All Downloads are FREE. Search and download functionalities are using the official Maven repository.

shz.core.ToMap Maven / Gradle / Ivy

There is a newer version: 10.3.1
Show newest version
package shz.core;

import shz.core.model.tag.ixx.ILTag;
import shz.core.queue.p.ConcurrentMinPQueue;
import shz.core.queue.p.ConcurrentRBBSTPQueue;
import shz.core.queue.p.MinPQueue;
import shz.core.queue.p.RBBSTPQueue;

import java.lang.reflect.Field;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.*;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import java.util.stream.Stream;

@SuppressWarnings("unchecked")
public final class ToMap {
    private final Map map;

    private ToMap(int initialCapacity, int idx, boolean dummy) {
        if (initialCapacity <= 0) map = dummy ? new LinkedHashMap<>() : new HashMap<>();
        else map = dummy ? new LinkedHashMap<>(reduce(initialCapacity, idx), 1.0f)
                : new HashMap<>(reduce(initialCapacity, idx), 1.0f);
    }

    private static final int MAX = 1 << 30;

    static int reduce(int cap, int idx) {
        if (idx <= 0 || cap == 1) return (int) Math.ceil(cap / 0.75f);
        int n = cap - 1;
        n |= n >>> 1;
        n |= n >>> 2;
        n |= n >>> 4;
        n |= n >>> 8;
        n |= n >>> 16;
        n = n >= MAX ? MAX : n + 1;
        while (idx-- > 0) {
            if (n < 2) break;
            n >>>= 1;
        }
        return (int) Math.ceil(n / 0.75f);
    }

    public static  ToMap get(int initialCapacity, int idx, boolean dummy) {
        return new ToMap<>(initialCapacity, idx, dummy);
    }

    public static  ToMap get(int initialCapacity, int idx) {
        return new ToMap<>(initialCapacity, idx, false);
    }

    public static  ToMap get(int initialCapacity) {
        return new ToMap<>(initialCapacity, 0, false);
    }

    public static  ToMap get() {
        return new ToMap<>(0, 0, false);
    }

    public  T build() {
        return (T) map;
    }

    public ToMap put(Map m) {
        map.putAll(m);
        return this;
    }

    public  ToMap put(KK k, VV v) {
        map.put(k, v);
        return this;
    }

    public static > Collector collector(
            Function keyMapper, Function valueMapper,
            BinaryOperator mergeFunction, Supplier mapSupplier) {
        return Collector.of(
                mapSupplier,
                //替换默认merge方法
                (map, element) -> merge(keyMapper.apply(element), valueMapper.apply(element), map, mergeFunction),
                (m1, m2) -> {
                    for (Map.Entry e : m2.entrySet()) merge(e.getKey(), e.getValue(), m1, mergeFunction);
                    return m1;
                }
        );
    }

    private static > void merge(K key, V value, M map, BinaryOperator mergeFunction) {
        V oldValue;
        if ((oldValue = map.get(key)) == null) {
            //原key的值为null则使用新值(包括null)
            map.put(key, value);
            return;
        }
        //原key的值不为null则使用具体的取值策略
        map.put(key, mergeFunction.apply(oldValue, value));
    }

    public static > Collector collector(
            Function keyMapper, Function valueMapper,
            Supplier mapSupplier) {
        return collector(keyMapper, valueMapper,
                //原key的值不为null时的取值策略,默认策略为抛出异常,这里选择若新值不为null则替换原来的值
                (oldValue, newValue) -> newValue == null ? oldValue : newValue,
                mapSupplier
        );
    }

    /**
     * 大致确定元素个数时的收集器
     *
     * @param keyMapper       键映射器
     * @param valueMapper     值映射器
     * @param initialCapacity 大致确定的初始容量
     * @param idx             初始容量衰减次数
     * @param dummy           是否排序的
     */
    public static  Collector> collector(
            Function keyMapper, Function valueMapper, int initialCapacity, int idx, boolean dummy) {
        //提供初始化容量的HashMap(若dummy为true则为LinkedHashMap)
        return collector(keyMapper, valueMapper, () -> get(initialCapacity, idx, dummy).build());
    }

    public static  Collector> collector(
            Function keyMapper, Function valueMapper, int initialCapacity, int idx) {
        return collector(keyMapper, valueMapper, initialCapacity, idx, false);
    }

    public static  Collector> collector(
            Function keyMapper, Function valueMapper, int initialCapacity) {
        return collector(keyMapper, valueMapper, initialCapacity, 0, false);
    }

    /**
     * 大致确定元素个数时的流收集
     */
    public static  Map explicitCollect(
            Stream stream, Function keyMapper, Function valueMapper, int initialCapacity, int idx, boolean dummy) {
        Map map = stream.collect(collector(keyMapper, valueMapper, initialCapacity, idx, dummy));
        return map.isEmpty() ? Collections.emptyMap() : map;
    }

    public static  Map explicitCollect(
            Stream stream, Function keyMapper, Function valueMapper, int initialCapacity) {
        return explicitCollect(stream, keyMapper, valueMapper, initialCapacity, 0, false);
    }

    /**
     * 无法确定元素个数时的收集器
     */
    public static  Collector> collector(Function keyMapper, Function valueMapper, boolean dummy) {
        return collector(keyMapper, valueMapper, dummy ? LinkedHashMap::new : HashMap::new);
    }

    public static  Collector> collector(Function keyMapper, Function valueMapper) {
        return collector(keyMapper, valueMapper, false);
    }

    /**
     * 无法确定元素个数时的流收集
     *
     * @param extra 申请额外空间
     */
    public static  Map collect(Stream stream, Function keyMapper, Function valueMapper, boolean dummy, int extra) {
        int size = Math.max(extra, 0);
        Map map = stream.collect(collector(keyMapper, valueMapper, dummy));
        if (map.isEmpty()) return size == 0 ? Collections.emptyMap() : get(size, 0, dummy).build();
        if (size == 0) return map;
        //原来的初始容量
        int oldCap = reduce(map.size(), 0);
        //申请额外空间后计算需扩容一次达到指定容量的初始容量
        int newCap = reduce(size + map.size(), 1);
        //如果添加额外元素最多导致扩容一次则直接返回
        // 考虑到申请额外空间不一定会真正的去执行,因此这一次的扩容留给具体执行的方法(不一定会扩容)
        if (oldCap >= newCap) return map;
        //下面代码基本不会执行,除非申请额外空间非常大(即需要扩容两次以上)

        //初始容量避免添加额外元素时导致频繁扩容
        newCap = reduce(size + map.size(), 0);
        Map result = dummy ? new LinkedHashMap<>(newCap, 1.0f) : new HashMap<>(newCap, 1.0f);
        result.putAll(map);
        return result;
    }

    public static  Map collect(Stream stream, Function keyMapper, Function valueMapper, boolean dummy) {
        return collect(stream, keyMapper, valueMapper, dummy, 0);
    }

    public static  Map collect(Stream stream, Function keyMapper, Function valueMapper, int extra) {
        return collect(stream, keyMapper, valueMapper, false, extra);
    }

    /**
     * 如果流中具有重复的key又不想抛异常则使用该方法,否则使用对应的Collectors.toMap()即可
     */
    public static  Map collect(Stream stream, Function keyMapper, Function valueMapper) {
        return collect(stream, keyMapper, valueMapper, false, 0);
    }

    /**
     * 分组列表
     *
     * @param list        需要分组的数据
     * @param classifiers 分组函数集
     */
    @SafeVarargs
    public static > M unique(List list, Function... classifiers) {
        if (list == null || list.isEmpty()) return (M) Collections.emptyMap();
        Collector> collector = Collectors.groupingBy(classifiers[classifiers.length - 1], Collectors.reducing(null, (a, b) -> b));
        for (int i = classifiers.length - 2; i >= 0; --i) collector = Collectors.groupingBy(classifiers[i], collector);
        return (M) list.stream().collect(collector);
    }

    @SafeVarargs
    public static > M uniqueMapper(List list, Function lastMapper, Function... classifiers) {
        if (list == null || list.isEmpty()) return (M) Collections.emptyMap();
        Collector> collector = Collectors.groupingBy(classifiers[classifiers.length - 1], Collectors.mapping(lastMapper, Collectors.reducing(null, (a, b) -> b)));
        for (int i = classifiers.length - 2; i >= 0; --i) collector = Collectors.groupingBy(classifiers[i], collector);
        return (M) list.stream().collect(collector);
    }

    public static Map getSimple(Object obj, Predicate predicate, String mapField) {
        if (obj == null) return Collections.emptyMap();
        if (NullHelp.isEmpty(mapField)) {
            if (obj instanceof Map) return toMap(obj);
            Class cls;
            if (obj instanceof Collection || obj instanceof Object[] || AccessibleHelp.isCommon(cls = obj.getClass()))
                return Collections.emptyMap();
            List fields = AccessibleCacheHelp.fields(cls);
            Map map = get(fields.size()).build();
            fields.forEach(field -> {
                if (predicate != null && !predicate.test(field)) return;
                map.put(field.getName(), AccessibleHelp.getField(field, obj));
            });
            return map.isEmpty() ? Collections.emptyMap() : map;
        }
        if (obj instanceof Map) {
            Map map = toMap(obj);
            if (map.isEmpty()) return Collections.emptyMap();
            return get(1).put(mapField, map).build();
        }
        Class cls;
        if (obj instanceof Collection || obj instanceof Object[] || AccessibleHelp.isCommon(cls = obj.getClass()))
            return get(1).put(mapField, obj).build();
        List fields = AccessibleCacheHelp.fields(cls);
        Map map = get(fields.size()).build();
        fields.forEach(field -> {
            if (predicate != null && !predicate.test(field)) return;
            map.put(mapField + "." + field.getName(), AccessibleHelp.getField(field, obj));
        });
        return map.isEmpty() ? Collections.emptyMap() : map;
    }

    public static Map toMap(Object obj) {
        if (obj == null) return Collections.emptyMap();
        try {
            Map objMap = (Map) obj;
            if (objMap.isEmpty()) return Collections.emptyMap();
            return (Map) objMap;
        } catch (Exception e) {
            return Collections.emptyMap();
        }
    }

    public static Map getSimple(Object obj) {
        return getSimple(obj, null, null);
    }

    public static Map getDeep(Object obj, Predicate predicate, String mapField, int maxLevel) {
        if (obj == null) return Collections.emptyMap();
        Map map;
        if (NullHelp.isEmpty(mapField)) {
            if (obj instanceof Map) return toMap(obj);
            Class cls;
            if (obj instanceof Collection || obj instanceof Object[] || AccessibleHelp.isCommon(cls = obj.getClass()))
                return Collections.emptyMap();
            map = new HashMap<>();
            AccessibleCacheHelp.fields(cls).forEach(field -> {
                if (predicate != null && !predicate.test(field)) return;
                getDeep0(map, AccessibleHelp.getField(field, obj), predicate, field.getName(), maxLevel, 0);
            });
        } else {
            map = new HashMap<>();
            getDeep0(map, obj, predicate, mapField, maxLevel, 0);
        }
        return map.isEmpty() ? Collections.emptyMap() : map;
    }

    private static void getDeep0(Map map, Object obj, Predicate predicate, String mapField, int maxLevel, int nested) {
        if ((maxLevel > 0 && nested > maxLevel) || NullHelp.isEmpty(obj)) return;
        Class cls;
        if (obj instanceof Map || obj instanceof Collection || obj instanceof Object[] || AccessibleHelp.isCommon(cls = obj.getClass()))
            map.put(mapField, obj);
        else AccessibleCacheHelp.fields(cls).forEach(field -> {
            if (predicate != null && !predicate.test(field)) return;
            getDeep0(map, AccessibleHelp.getField(field, obj), predicate, mapField + "." + field.getName(), maxLevel, nested + 1);
        });
    }

    public static Map getDeep(Object obj) {
        return getDeep(obj, null, null, 0);
    }

    public static Map groupDeep(Map deepMap) {
        if (NullHelp.isEmpty(deepMap)) return Collections.emptyMap();
        Map result = new HashMap<>();
        deepMap.forEach((k, v) -> groupDeep0(result, k, v));
        return result;
    }

    private static void groupDeep0(Map result, String key, Object value) {
        int idx = key.indexOf(".");
        if (idx == -1) result.put(key, value);
        else {
            String prefix = key.substring(0, idx);
            Map data = (Map) result.get(prefix);
            if (data == null) {
                data = new LinkedHashMap<>();
                result.put(prefix, data);
            }
            groupDeep0(data, key.substring(idx + 1), value);
        }
    }

    /**
     * 获取topN
     */
    public static  Map> topN(Consumer> runnable, Function> group, Predicate filter, BiFunction comparator, boolean concurrent) {
        if (concurrent) {
            Map> queues = new ConcurrentHashMap<>();
            runnable.accept(e -> {
                if (e == null || (filter != null && !filter.test(e))) return;
                ILTag ilTag = group.apply(e);
                K key = ilTag.getData();
                int count = ilTag.getTag();
                ConcurrentRBBSTPQueue queue = queues.computeIfAbsent(key, k -> ConcurrentMinPQueue.of(comparator::apply));
                while (queue.size() > count) queue.poll();
                if (queue.size() == count) {
                    if (comparator.apply(e, queue.peek()) > 0) {
                        queue.poll();
                        queue.offer(e);
                    }
                } else queue.offer(e);
            });
            return explicitCollect(queues.keySet().stream(), Function.identity(), key -> queues.get(key).reverse(), queues.size());
        }

        Map> queues = new HashMap<>();
        runnable.accept(e -> {
            if (e == null || (filter != null && !filter.test(e))) return;
            ILTag ilTag = group.apply(e);
            K key = ilTag.getData();
            int count = ilTag.getTag();
            RBBSTPQueue queue = queues.computeIfAbsent(key, k -> MinPQueue.of(comparator::apply));
            while (queue.size() > count) queue.poll();
            if (queue.size() == count) {
                if (comparator.apply(e, queue.peek()) > 0) {
                    queue.poll();
                    queue.offer(e);
                }
            } else queue.offer(e);
        });
        return explicitCollect(queues.keySet().stream(), Function.identity(), key -> queues.get(key).reverse(), queues.size());
    }

    public static  Map> topN(Consumer> runnable, Function> group, Predicate filter, BiFunction comparator) {
        return topN(runnable, group, filter, comparator, false);
    }

    public static  Map> topN(Supplier supplier, Function> group, Predicate filter, BiFunction comparator, boolean concurrent) {
        return topN(Runner.runnableNoNull(supplier), group, filter, comparator, concurrent);
    }

    public static  Map> topN(Supplier supplier, Function> group, Predicate filter, BiFunction comparator) {
        return topN(supplier, group, filter, comparator, false);
    }

    public static  Map> topN(Collection dataset, Function> group, Predicate filter, BiFunction comparator) {
        return topN(Runner.runnable(dataset), group, filter, comparator, false);
    }

    public static  Map topOne(Consumer> runnable, Function group, Predicate filter, BiFunction comparator, boolean concurrent) {
        if (concurrent) {
            Map> queues = new ConcurrentHashMap<>();
            runnable.accept(e -> {
                if (e == null || (filter != null && !filter.test(e))) return;
                K key = group.apply(e);
                ConcurrentRBBSTPQueue queue = queues.computeIfAbsent(key, k -> ConcurrentMinPQueue.of(comparator::apply));
                while (queue.size() > 1) queue.poll();
                if (queue.size() == 1) {
                    if (comparator.apply(e, queue.peek()) > 0) {
                        queue.poll();
                        queue.offer(e);
                    }
                } else queue.offer(e);
            });
            return explicitCollect(queues.keySet().stream(), Function.identity(), key -> queues.get(key).poll(), queues.size());
        }

        Map> queues = new HashMap<>();
        runnable.accept(e -> {
            if (e == null || (filter != null && !filter.test(e))) return;
            K key = group.apply(e);
            RBBSTPQueue queue = queues.computeIfAbsent(key, k -> MinPQueue.of(comparator::apply));
            while (queue.size() > 1) queue.poll();
            if (queue.size() == 1) {
                if (comparator.apply(e, queue.peek()) > 0) {
                    queue.poll();
                    queue.offer(e);
                }
            } else queue.offer(e);
        });
        return explicitCollect(queues.keySet().stream(), Function.identity(), key -> queues.get(key).poll(), queues.size());
    }

    public static  Map topOne(Consumer> runnable, Function group, Predicate filter, BiFunction comparator) {
        return topOne(runnable, group, filter, comparator, false);
    }

    public static  Map topOne(Supplier supplier, Function group, Predicate filter, BiFunction comparator, boolean concurrent) {
        return topOne(Runner.runnableNoNull(supplier), group, filter, comparator, concurrent);
    }

    public static  Map topOne(Supplier supplier, Function group, Predicate filter, BiFunction comparator) {
        return topOne(supplier, group, filter, comparator, false);
    }

    public static  Map topOne(Collection dataset, Function group, Predicate filter, BiFunction comparator) {
        return topOne(Runner.runnable(dataset), group, filter, comparator, false);
    }

    public static  Map merge(Consumer> runnable, Function group, Predicate filter, BiFunction merger, Function, E> mapper, boolean concurrent) {
        if (concurrent) {
            Map> ilTags = new ConcurrentHashMap<>();
            Lock lock = new ReentrantLock();
            runnable.accept(e -> {
                if (e == null || (filter != null && !filter.test(e))) return;
                K key = group.apply(e);
                ILTag ilTag = ilTags.computeIfAbsent(key, k -> {
                    ILTag res = new ILTag<>();
                    res.setData(e);
                    res.setTag(1);
                    return res;
                });

                lock.lock();
                try {
                    ilTag.setData(merger.apply(ilTag.getData(), e));
                    ilTag.setTag(ilTag.getTag() + 1);
                } finally {
                    lock.unlock();
                }
            });
            return explicitCollect(ilTags.keySet().stream(), Function.identity(), key -> {
                ILTag ilTag = ilTags.get(key);
                return ilTag.getTag() == 1 ? ilTag.getData() : mapper.apply(ilTag);
            }, ilTags.size());
        }

        Map> ilTags = new HashMap<>();
        runnable.accept(e -> {
            if (e == null || (filter != null && !filter.test(e))) return;
            K key = group.apply(e);
            ILTag ilTag = ilTags.computeIfAbsent(key, k -> {
                ILTag res = new ILTag<>();
                res.setData(e);
                res.setTag(1);
                return res;
            });
            ilTag.setData(merger.apply(ilTag.getData(), e));
            ilTag.setTag(ilTag.getTag() + 1);
        });
        return explicitCollect(ilTags.keySet().stream(), Function.identity(), key -> {
            ILTag ilTag = ilTags.get(key);
            return ilTag.getTag() == 1 ? ilTag.getData() : mapper.apply(ilTag);
        }, ilTags.size());
    }

    public static  Map merge(Consumer> runnable, Function group, Predicate filter, BiFunction merger, Function, E> mapper) {
        return merge(runnable, group, filter, merger, mapper, false);
    }

    public static  Map merge(Supplier supplier, Function group, Predicate filter, BiFunction merger, Function, E> mapper, boolean concurrent) {
        return merge(Runner.runnableNoNull(supplier), group, filter, merger, mapper, concurrent);
    }

    public static  Map merge(Supplier supplier, Function group, Predicate filter, BiFunction merger, Function, E> mapper) {
        return merge(supplier, group, filter, merger, mapper, false);
    }

    public static  Map merge(Collection dataset, Function group, Predicate filter, BiFunction merger, Function, E> mapper) {
        return merge(Runner.runnable(dataset), group, filter, merger, mapper, false);
    }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy