All Downloads are FREE. Search and download functionalities are using the official Maven repository.

bt.data.DefaultChunkVerifier Maven / Gradle / Ivy

There is a newer version: 1.10
Show newest version
package bt.data;

import bt.BtException;
import bt.data.digest.Digester;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;

public class DefaultChunkVerifier implements ChunkVerifier {
    private static final Logger LOGGER = LoggerFactory.getLogger(DefaultChunkVerifier.class);

    private Digester digester;
    private int numOfHashingThreads;

    public DefaultChunkVerifier(Digester digester, int numOfHashingThreads) {
        this.digester = digester;
        this.numOfHashingThreads = numOfHashingThreads;
    }

    @Override
    public boolean verify(List chunks, Bitfield bitfield) {
        if (chunks.size() != bitfield.getPiecesTotal()) {
            throw new IllegalArgumentException("Bitfield has different size than the list of chunks. Bitfield size: " +
                    bitfield.getPiecesTotal() + ", number of chunks: " + chunks.size());
        }

        ChunkDescriptor[] arr = chunks.toArray(new ChunkDescriptor[chunks.size()]);
        if (numOfHashingThreads > 1) {
            collectParallel(arr, bitfield);
        } else {
            createWorker(arr, 0, arr.length, bitfield).run();
        }
        // try to purge all data that was loaded by the verifiers
        System.gc();

        return bitfield.getPiecesRemaining() == 0;
    }

    @Override
    public boolean verify(ChunkDescriptor chunk) {
        byte[] expected = chunk.getChecksum();
        byte[] actual = digester.digest(chunk.getData());
        return Arrays.equals(expected, actual);
    }

    private List collectParallel(ChunkDescriptor[] chunks, Bitfield bitfield) {
        int n = numOfHashingThreads;
        ExecutorService workers = Executors.newFixedThreadPool(n);

        List> futures = new ArrayList<>();

        int batchSize = chunks.length / n;
        int i, limit = 0;
        while ((i = limit) < chunks.length) {
            if (futures.size() == n - 1) {
                // assign the remaining bits to the last worker
                limit = chunks.length;
            } else {
                limit = i + batchSize;
            }
            futures.add(workers.submit(createWorker(chunks, i, Math.min(chunks.length, limit), bitfield)));
        }

        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("Verifying torrent data with {} workers", futures.size());
        }

        Set errors = ConcurrentHashMap.newKeySet();
        futures.forEach(f -> {
            try {
                f.get();
            } catch (Exception e) {
                LOGGER.error("Unexpected error during verification of torrent data", e);
                errors.add(e);
            }
        });

        workers.shutdown();
        while (!workers.isTerminated()) {
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                throw new BtException("Unexpectedly interrupted");
            }
        }

        if (!errors.isEmpty()) {
            throw new BtException("Failed to verify torrent data:" +
                    errors.stream().map(this::errorToString).reduce(String::concat).get());
        }

        return Arrays.asList(chunks);
    }

    private Runnable createWorker(ChunkDescriptor[] chunks,
                                  int from,
                                  int to,
                                  Bitfield bitfield) {
        return () -> {
            int i = from;
            while (i < to) {
                // optimization to speedup the initial verification of torrent's data
                int[] emptyUnits = new int[]{0};
                chunks[i].getData().visitUnits((u, off, lim) -> {
                    if (u.size() == 0) {
                        emptyUnits[0]++;
                    }
                    return true;
                });

                // if any of this chunk's storage units is empty,
                // then the chunk is neither complete nor verified
                if (emptyUnits[0] == 0) {
                    boolean verified = verify(chunks[i]);
                    if (verified) {
                        bitfield.markVerified(i);
                    }
                }
                i++;
            }
        };
    }

    private String errorToString(Throwable e) {
        StringBuilder buf = new StringBuilder();
        buf.append("\n");

        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        PrintStream out = new PrintStream(bos);
        e.printStackTrace(out);

        buf.append(bos.toString());
        return buf.toString();
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy