ICode9

精准搜索请尝试: 精确搜索
首页 > 其他分享> 文章详细

HttpClient+Jsoup爬取页面数据

2021-10-22 18:33:01  阅读:163  来源: 互联网

标签:http title 爬取 Jsoup org apache import div HttpClient


为什么不使用爬虫框架?

  原本使用的WebMagic框架,但是报了协议版本不一致异常,百度很多方法没解决掉,而且也是自己写着玩,就换了方式; 

javax.net.ssl.SSLException: Received fatal alert: protocol_version

 

jar包依赖:

        <!--解析网页数据-->
        <dependency>
            <groupId>org.jsoup</groupId>
            <artifactId>jsoup</artifactId>
            <version>1.10.2</version>
        </dependency>

        <dependency>
            <groupId>org.apache.httpcomponents</groupId>
            <artifactId>httpclient</artifactId>
            <version>4.5.2</version>
        </dependency>

 

代码:

import com.maxinhai.world.utils.JdbcUtils;
import org.apache.http.HttpEntity;
import org.apache.http.StatusLine;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.ssl.SSLContexts;
import org.apache.http.util.EntityUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import javax.net.ssl.SSLContext;
import java.io.IOException;
import java.sql.*;
import java.sql.Date;
import java.util.*;

/**
 * @program: world
 * @description: 雅拉伊图片爬虫
 * @author: XinHai.Ma
 * @create: 2021-10-21 20:33
 */
public class YaLaYiImageReptile {

    // 解决问题: javax.net.ssl.SSLException: Received fatal alert: protocol_version
    private static final PoolingHttpClientConnectionManager HTTP_CLIENT_CONNECTION_MANAGER;
    private static final CloseableHttpClient HTTP_CLIENT;
    static {
        SSLContext ctx = SSLContexts.createSystemDefault();
        SSLConnectionSocketFactory fac =
                new SSLConnectionSocketFactory(ctx, new String[]{"SSLv2Hello", "TLSv1.2"}, null, SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);

        HTTP_CLIENT_CONNECTION_MANAGER = new PoolingHttpClientConnectionManager(RegistryBuilder
                .<ConnectionSocketFactory> create().register("http", PlainConnectionSocketFactory.getSocketFactory())
                .register("https", fac).build());
        HTTP_CLIENT_CONNECTION_MANAGER.setDefaultMaxPerRoute(100);
        HTTP_CLIENT_CONNECTION_MANAGER.setMaxTotal(200);
        RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(60000).setConnectTimeout(60000)
                .setSocketTimeout(60000).build();

        HTTP_CLIENT = HttpClientBuilder.create().setConnectionManager(HTTP_CLIENT_CONNECTION_MANAGER)
                .setDefaultRequestConfig(requestConfig).build();
    }

    /**
     * 报异常 => javax.net.ssl.SSLException: Received fatal alert: protocol_version
     * @param args
     */
    public static void main(String[] args) {
        // 要爬取的页面
        for(int i=1; i<=10; i++) {
            if(i != 1) {
                reptile("https://www.yalayi.com/gallery/index_" + i + ".html");
            } else {
                reptile("https://www.yalayi.com/gallery/");
            }
        }
    }

    private static void reptile(String url) {
        try {
            HttpGet httpGet = new HttpGet(url);
            CloseableHttpResponse httpResponse = HTTP_CLIENT.execute(httpGet);
            StatusLine statusLine = httpResponse.getStatusLine();
            if(statusLine.getStatusCode() == 200) {
                HttpEntity httpEntity = httpResponse.getEntity();
                String result = EntityUtils.toString(httpEntity, "utf-8");
                //System.out.println(statusLine + "=>" + result);

                Document document = Jsoup.parse(result);
                Elements urlElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.img-box > a");
                Elements imageElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.img-box > a > img");
                Elements titleElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.text-box > p > a");
                Elements sizeElements = document.select("body > div.main.bgf6 > div.gallery-list.list-box > div > ul > li > div.img-box > em");
                List<Map<String, String>> dataList = new ArrayList<>();
                for (int i = 0; i < titleElements.size(); i++) {
                    Element urlElement = urlElements.get(i);
                    Element imageElement = imageElements.get(i);
                    Element titleElement = titleElements.get(i);
                    Element sizeElement = sizeElements.get(i);
                    String href = urlElement.attr("href");
                    String src = imageElement.attr("src");
                    String title = titleElement.text();
                    String size = sizeElement.text();
                    System.out.println("标题:" + title + " 封面:" + src + " 画质:" + size + " 详情页:" + href);

                    Map<String, String> data = new HashMap<>();
                    data.put("pageUrl", href);
                    data.put("imageUrl", src);
                    data.put("title", title);
                    data.put("size", size);
                    dataList.add(data);
                }
                saveDate(dataList);
            }

        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /**
     * 保存爬取数据
     * @param dataList
     */
    private static void saveDate(List<Map<String, String>> dataList) {
        List<String> titleList = new ArrayList<>();
        for (int i = 0; i < dataList.size(); i++) {
            Map<String, String> data = dataList.get(i);
            String title = data.get("title");
            titleList.add(title);
        }

        // 查询已经爬取的数据,做筛选处理
        List<String> resultList = query(titleList);
        Iterator<Map<String, String>> iterator = dataList.iterator();
        while (iterator.hasNext()) {
            Map<String, String> next = iterator.next();
            if(resultList.contains(next.get("title"))) {
                iterator.remove();
            }
        }

        // 获取数据库连接对象
        Connection connection = JdbcUtils.getConnection();

        // 插入数据
        String insert_novel = "INSERT INTO `world`.`yalayi_image_reptile`(`is_active`, `is_delete`, `create_time`, `title`, `size`, `cover_url`, `page_url`) " +
                "VALUES (?, ?, ?, ?, ?, ?, ?)";

        try {
            PreparedStatement preparedStatement = connection.prepareStatement(insert_novel);
            for (int i = 0; i < dataList.size(); i++) {
                Map<String, String> data = dataList.get(i);
                preparedStatement.setInt(1, 0);
                preparedStatement.setInt(2, 0);
                preparedStatement.setDate(3, new Date(System.currentTimeMillis()));
                preparedStatement.setString(4, data.get("title"));
                preparedStatement.setString(5, data.get("size"));
                preparedStatement.setString(6, data.get("imageUrl"));
                preparedStatement.setString(7, data.get("pageUrl"));
                preparedStatement.addBatch();
            }

            int[] row = preparedStatement.executeBatch();
            System.out.println("插入" + row.length + "行");
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }


    /**
     * in范围查询
     * @param titleList
     * @return
     */
    private static List<String> query(List<String> titleList) {
        List<String> titles = new ArrayList<>();
        String sql = "SELECT title FROM `yalayi_image_reptile` where is_delete=0 and title in (?)";
        String stringFromList = getStringFromList(titleList);
        String formatSql = String.format(sql.replace("?", "%s"), stringFromList);
        try {
            Connection connection = JdbcUtils.getConnection();
            PreparedStatement preparedStatement = connection.prepareStatement(formatSql);
            ResultSet resultSet = preparedStatement.executeQuery();
            while (resultSet.next()) {
                String title = resultSet.getString(1);
                titles.add(title);
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return titles;
    }

    /**
     * 格式化参数
     * @param paramList
     * @return
     */
    private static String getStringFromList(List<String> paramList) {
        StringBuffer sb = new StringBuffer();
        for (int i = 0; i < paramList.size(); i++) {
            if(i == paramList.size()-1) {
                sb.append("'" + paramList.get(i) + "'");
            } else {
                sb.append("'" + paramList.get(i) + "'");
                sb.append(",");
            }
        }
        return sb.toString();
    }

}

 

Ps: 1. 上面代码中没有对数据库连接做关闭处理,是因为其他代码会用到这个连接对象,就没有关闭;

  2. document.select()方法里的css不会写没关系,在页面中找到你要爬取的dom节点,右键检查,就能看到html代码,选中要爬取的html标签,右键复制为selector即可;

标签:http,title,爬取,Jsoup,org,apache,import,div,HttpClient
来源: https://www.cnblogs.com/mxh-java/p/15440227.html

本站声明: 1. iCode9 技术分享网(下文简称本站)提供的所有内容,仅供技术学习、探讨和分享;
2. 关于本站的所有留言、评论、转载及引用,纯属内容发起人的个人观点,与本站观点和立场无关;
3. 关于本站的所有言论和文字,纯属内容发起人的个人观点,与本站观点和立场无关;
4. 本站文章均是网友提供,不完全保证技术分享内容的完整性、准确性、时效性、风险性和版权归属;如您发现该文章侵犯了您的权益,可联系我们第一时间进行删除;
5. 本站为非盈利性的个人网站,所有内容不会用来进行牟利,也不会利用任何形式的广告来间接获益,纯粹是为了广大技术爱好者提供技术内容和技术思想的分享性交流网站。

专注分享技术,共同学习,共同进步。侵权联系[81616952@qq.com]

Copyright (C)ICode9.com, All Rights Reserved.

ICode9版权所有