文档章节

爬虫

菜鸟上路中
 菜鸟上路中
发布于 2016/04/18 19:00
字数 1027
阅读 11
收藏 0

                       

1. [代码]主程序    

           

?

1
2
3
4
5
6
7
8
9
10
11
public class Demo {
     @SuppressWarnings ( "static-access" )
     public static void main(String[] args) {
         MyCrawler crawler = MyCrawler.getInstance();
         crawler.setUrl( "http://docs.oracle.com/javase/8/docs/api/" );
         crawler.setDir( "/api2" );
         crawler.setDeep( 3 );
         crawler.setThread( 1 );
         crawler.start();
     }
}

                   

                       

                       

2. [代码]数据参数处理    

           

?

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
public class MyCrawler {
     private static String url;
     private static int deep = 4 ;
     private static int topN = 10 ;
     private static int thread = 3 ;
     private static String host;
     private static String dir = System.getProperty( "user.dir" );
     private static MyCrawler crawler = new MyCrawler();
     public static MyCrawler getInstance(){
         return crawler;
     }
     private MyCrawler(){}
     public static int getDeep() {
         return deep;
     }
     public static void setDeep( int deep) {
         MyCrawler.deep = deep;
     }
     public static int getTopN() {
         return topN;
     }
     public static void setTopN( int topN) {
         MyCrawler.topN = topN;
     }
     public static String getUrl() {
         return url;
     }
     public static void setUrl(String url) {
         MyCrawler.url = url;
         if (url.endsWith( ".html" )){
             host = url.substring( 0 , url.lastIndexOf( "/" ));
         } else {
             MyCrawler.host = url;
         }
     }
     public static String getHost() {
         return host;
     }
     public static String getDir() {
         return dir;
     }  
     public void start() {
         UrlObject obj = new UrlObject(url);
         obj.setIdeep( 1 );
         QueryCrawler.push(obj);
         CrawlerWriterFiles writer = new CrawlerWriterFiles();
         writer.open();
     }
     public static void setDir(String dir) {
         MyCrawler.dir += dir+ "\\" ;
     }
     public static int getThread() {
         return MyCrawler.thread;
     }
     public static void setThread( int thread) {
         MyCrawler.thread = thread;
     }
}

                   

                       

                       

3. [代码]url对象    

           

?

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
public class UrlObject {
     private String url;
     private int ideep;
     public UrlObject(String url) {
         this .url = url;
     }
     public String getUrl() {
         return url;
     }
     public void setUrl(String url) {
         this .url = url;
     }
     public int getIdeep() {
         return ideep;
     }
     public void setIdeep( int ideep) {
         this .ideep = ideep;
     }
     public UrlObject(String url, int ideep) {
         this .url = url;
         this .ideep = ideep;
     }  
}

                   

                       

                       

4. [代码]url任务队列    

           

?

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
public class QueryCrawler {
     private static QueryCrawler query = new QueryCrawler();
     private static ArrayList<UrlObject> list = new ArrayList<UrlObject>();
     private QueryCrawler(){}
     public static QueryCrawler getInstance() {
         return query;
     }
     public synchronized static void push(UrlObject obj) {
         list.add(obj);
     }
     public synchronized static void push(List<UrlObject> objs) {
         list.addAll(objs);
     }
     public synchronized static UrlObject pop() {
         if (list.size() < 1 )
             return null ;
         return list.remove( 0 );
     }
}

                   

                       

                       

5. [代码]线程遍历抓取,存储    

           

?

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
public class CrawlerWriterFiles {
     public void open() {
         for ( int i = 0 ; i < MyCrawler.getThread(); i++) {
             new Thread( new Runnable() {
                 public void run() {
                     while ( true ){
                         try {
                             DefaultHttpClient client = new SystemDefaultHttpClient();
                             final UrlObject obj = QueryCrawler.pop();
                             if (obj != null ){
                                 HttpPost httpPost = new HttpPost(obj.getUrl());
                                 HttpResponse response = client.execute(httpPost);
                                 final String result = EntityUtils.toString(response.getEntity(), "UTF-8" );
                                 if (obj.getIdeep() < MyCrawler.getDeep() && !obj.getUrl().endsWith( ".css" )){
                                     CrawlerUtil.addUrlObject(obj, result);
                                 }
                                 new Thread( new Runnable() {
                                     public void run() {
                                         try {                                          
                                             CrawlerUtil.writer(obj.getUrl(), result);
                                         } catch (IOException e) {
                                             System.err.println( "输出错误url:" +obj.getUrl());
                                         }
                                     }
                                 }).start();
                             } else {
                                 System.out.println( "--------暂时没有任务!!" );
                                 Thread.sleep( 5000 );                            
                             }
                         } catch (Exception e) {
                             e.printStackTrace();
                             System.err.println( "error" );
                         }
                     }              
                 }
                 
             }).start();
         }              
     }  
}

                   

                       

                       

6. [代码]抓取url,存储页面数据    

           

?

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
public class CrawlerUtil {
     private static List<String> arrays = new ArrayList<String>();
     private static List<String> filearrays = new ArrayList<String>();
     static {
         String a = ",[]'\"+:;{}" ;
         String[] as = a.split( "" );
         for ( int i = 0 ; i < as.length; i++) {
             if (as[i].equals( "" )){
                 continue ;
             }
             arrays.add(as[i]);
         }
         filearrays.add( "?" );
         filearrays.add( "=" );
         //filearrays.add(".");
     }
     public static void writer(String url, String data) throws IOException {
         File file = null ;
         if (url.toLowerCase().endsWith( ".css" )){
             file = new File(getPathCSS(url));
         } else {
             file = new File(getPathHTML(url));
         }
         System.out.println(file.getPath());
         if (!file.getParentFile().exists()){
             file.getParentFile().mkdirs();
         }
         if (!file.exists()){
             byte [] datab = data.getBytes();
             FileOutputStream f = new FileOutputStream(file);
             f.write(datab, 0 , datab.length);
             f.close();
         }
     }
 
     private static String getPathHTML(String url) {
         if (url.equals(MyCrawler.getHost())){
             url += "index" ;
         }
         if (!url.endsWith( "html" )){
             if (url.endsWith( "/" )){
                 url+= "index.html" ;
             } else if (url.lastIndexOf( "/" ) < url.lastIndexOf( "." )) {
                 url = url.substring( 0 , url.lastIndexOf( "." )) + ".html" ;
             } else {
                 url += ".html" ;
             }
         }
         if (url.startsWith( "http://" )){
             url = MyCrawler.getDir() + url.replace(MyCrawler.getHost(), "" );
         }      
         for ( int i = 0 ; i < filearrays.size(); i++) {
             url = url.replaceAll( "\\" +filearrays.get(i)+ "" , "_" );
         }
         return url;
     }
     private static String getPathCSS(String url) {     
         if (url.startsWith( "http://" )){
             url = MyCrawler.getDir() + url.replace(MyCrawler.getHost(), "" );
         }      
         return url;
     }
 
     public static void addUrlObject(UrlObject obj, String result) {
         //"<a\\s+href\\s*=\\s*\"?(.*?)[\"|>]"
         Pattern pcss =Pattern.compile( "<link.*href\\s*=\\s*\"?(.*?)[\"|>]" ,Pattern.CASE_INSENSITIVE);
         addUrlObjToPattern(pcss, obj, result);
         Pattern pa =Pattern.compile( "<a\\s+href\\s*=\\s*\"?(.*?)[\"|>]" ,Pattern.CASE_INSENSITIVE);
         addUrlObjToPattern(pa, obj, result);
         Pattern pframe =Pattern.compile( "<frame\\s+src\\s*=\\s*\"?(.*?)[\"|>]" ,Pattern.CASE_INSENSITIVE);
         addUrlObjToPattern(pframe, obj, result);
     }
     private static void addUrlObjToPattern(Pattern p, UrlObject obj,
             String result) {
         Matcher m = p.matcher(result);
         ArrayList<UrlObject> urlobjs = new ArrayList<UrlObject>();
         while (m.find()){
             String link = m.group( 1 ).trim();
             //urlobjs.add(new UrlObject(link, 1+obj.getIdeep()));
             if (!isLink(link)){
                 continue ;
             }
             if (link.startsWith(MyCrawler.getHost())){
                 urlobjs.add( new UrlObject(link, 1 +obj.getIdeep()));
             } else if (!link.contains( "://" )){
                 urlobjs.add( new UrlObject(MyCrawler.getHost() + link, 1 +obj.getIdeep()));
             }
         }
         QueryCrawler.push(urlobjs);
         show(urlobjs);
     }
 
     private static void show(ArrayList<UrlObject> urlobjs) {
         /*for (int i = 0; i < urlobjs.size(); i++) {
             System.out.println(urlobjs.get(i).getUrl());
         }*/    
     }
 
     private static boolean isLink(String link) {
         if ( null == link) return false ;
         link = link.replace(MyCrawler.getHost(), "" );
         for ( int i = 0 ; i < arrays.size(); i++) {
             if (link.contains(arrays.get(i))){
                 return false ;
             }
         }
         return true ;
     }
}

                   

                       

                       

7. [图片] 官网.png    

           

                       

                       

                       

8. [图片] 自己抓取得.png    

           

                       


本文转载自:

共有 人打赏支持
菜鸟上路中
粉丝 1
博文 14
码字总数 7161
作品 0
浦东

暂无文章

一个可能的NEO链上安全随机数解决方案

0x00 困境 链上安全随机数生成应该算是一个比较蛋疼的问题,哪怕你的系统再牛逼,合约程序困在小小的虚拟机里,哪怕天大的本事也施展不开。 更悲催的是,交易执行的时候,是在每一个节点都执...

暖冰
今天
1
0
【大福利】极客时间专栏返现二维码大汇总

我已经购买了如下专栏,大家通过我的二维码你可以获得一定额度的返现! 然后,再给大家来个福利,只要你通过我的二维码购买,并且关注了【飞鱼说编程】公众号,可以加我微信或者私聊我,我再...

飞鱼说编程
今天
1
0
Spring5对比Spring3.2源码之容器的基本实现

最近看了《Spring源码深度解析》,该书是基于Spring3.2版本的,其中关于第二章容器的基本实现部分,目前spring5的实现方式已有较大改变。 Spring3.2的实现: public void testSimpleLoad(){...

Ilike_Java
今天
1
0
【王阳明心学语录】-001

1.“破山中贼易,破心中贼难。” 2.“夫万事万物之理不外于吾心。” 3.“心即理也。”“心外无理,心外无物,心外无事。” 4.“人心之得其正者即道心;道心之失其正者即人心。” 5.“无...

卯金刀GG
今天
2
0
OSChina 周三乱弹 —— 我们无法成为野兽

Osc乱弹歌单(2018)请戳(这里) 【今日歌曲】 @ _刚刚好: 霸王洗发水这波很骚 手机党少年们想听歌,请使劲儿戳(这里) hahahahahahh @嘻酱:居然忘了喝水。 让你喝可乐的话, 你准忘不了...

小小编辑
今天
15
0

没有更多内容

加载失败,请刷新页面

加载更多

返回顶部
顶部