地质所 沉降监测网建设项目
chenhuan
2024-05-16 0fdd42e318f51f9e3c6581473416af1cca69877f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
package com.javaweb.spider.backend;
 
import com.javaweb.common.core.domain.ICallBack;
import com.javaweb.spider.config.SpiderConstants;
import com.javaweb.spider.domain.SpiderConfig;
import com.javaweb.spider.processer.AbstractProcessor;
import com.javaweb.spider.processer.DefalutProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
 
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
 
/**
 * 通过传入构建好的爬虫配置之后启动爬取任务
 */
public class FastSpiderBackendService extends Thread {
    protected final Logger logger = LoggerFactory.getLogger(FastSpiderBackendService.class);
    private ICallBack callBack;
    private SpiderConfig config;
 
    public FastSpiderBackendService(SpiderConfig config){
        this.config=config;
    }
    public FastSpiderBackendService(SpiderConfig config,ICallBack callBack){
        this.config=config;
        this.callBack=callBack;
    }
    @Override
    public void run() {
        AbstractProcessor processor=new DefalutProcessor(config);
        Date start=new Date();
        logger.info(">>>>>>>>>>>>爬虫任务开始>>>>>>>>>>>>");
        CopyOnWriteArrayList<LinkedHashMap<String, String>> datas = processor.execute();//执行爬虫
        Date end=new Date();
        Long timeSeconds=(end.getTime()-start.getTime())/1000;
 
        if(callBack!=null){
            Map<String,CopyOnWriteArrayList<LinkedHashMap<String, String>>> rmap=new HashMap();
            rmap.put("datas",datas);
            callBack.setParams(rmap);
            callBack.onSuccess();
        }
        logger.info(">>>>>>>>>>>>爬虫任务结束>>>>>耗时>"+timeSeconds+"秒>>>>>>>总计爬取到"+datas.size()+"条数据!");
    }
}