All Downloads are FREE. Search and download functionalities are using the official Maven repository.

cn.wanghaomiao.seimi.def.DefaultLocalQueue Maven / Gradle / Ivy

Go to download

一个支持分布式的可以高效开发且可以高效运行的爬虫框架。设计思想上融合了spring与scrapy的优点。

There is a newer version: 2.1.4
Show newest version
package cn.wanghaomiao.seimi.def;
/*
   Copyright 2015 Wang Haomiao

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

     http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.
 */

import cn.wanghaomiao.seimi.annotation.Queue;
import cn.wanghaomiao.seimi.core.SeimiQueue;
import cn.wanghaomiao.seimi.struct.Request;
import org.apache.commons.codec.digest.DigestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.LinkedBlockingQueue;

/**
 * @author 汪浩淼 [email protected]
 * @since 2015/7/21.
 */
@Queue
public class DefaultLocalQueue implements SeimiQueue {
    private Map> queueMap = new HashMap<>();
    private Map> processedData = new HashMap<>();
    private Logger logger = LoggerFactory.getLogger(getClass());
    @Override
    public Request bPop(String crawlerName) {
        try {
            LinkedBlockingQueue queue = getQueue(crawlerName);
            return queue.take();
        } catch (InterruptedException e) {
            logger.error(e.getMessage(),e);
        }
        return null;
    }

    @Override
    public boolean push(Request req) {
        try {
            LinkedBlockingQueue queue = getQueue(req.getCrawlerName());
            queue.put(req);
            return true;
        } catch (InterruptedException e) {
            logger.error(e.getMessage(),e);
        }
        return false;
    }

    @Override
    public long len(String crawlerName) {
        LinkedBlockingQueue queue = getQueue(crawlerName);
        return queue.size();
    }

    @Override
    public boolean isProcessed(Request req) {
        ConcurrentSkipListSet set = getProcessedSet(req.getCrawlerName());
        String sign = DigestUtils.md5Hex(req.getUrl());
        return set.contains(sign);
    }

    @Override
    public void addProcessed(Request req) {
        ConcurrentSkipListSet set = getProcessedSet(req.getCrawlerName());
        String sign = DigestUtils.md5Hex(req.getUrl());
        set.add(sign);
    }

    @Override
    public long totalCrawled(String crawlerName) {
        ConcurrentSkipListSet set = getProcessedSet(crawlerName);
        return set.size();
    }

    public LinkedBlockingQueue getQueue(String crawlerName){
        LinkedBlockingQueue queue = queueMap.get(crawlerName);
        if (queue==null){
            queue = new LinkedBlockingQueue<>();
            queueMap.put(crawlerName,queue);
        }
        return queue;
    }

    public ConcurrentSkipListSet getProcessedSet(String crawlerName){
        ConcurrentSkipListSet set = processedData.get(crawlerName);
        if (set == null){
            set = new ConcurrentSkipListSet<>();
            processedData.put(crawlerName,set);
        }
        return set;
    }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy