标签:common 单元 爬虫 tom mysq create commons 创建时间 rri
copy自:http://www.ayulong.cn/types/2 视频教程:https://www.bilibili.com/video/BV1cE411u7RA?p=1
虽然前面的知识看着比较零散, 第一次接触到爬虫的我不禁在想这些真的和爬虫有关系吗, 但是事实上前面的知识确实就是爬虫的核心内容, 当这些内容拼接成程序后, 能够获取并保存互联网上的数据, 这就是爬虫了
// 创建crawler数据库, 再创建表 CREATE TABLE `jd_item` ( `id` bigint(10) NOT NULL AUTO_INCREMENT COMMENT ‘主键id‘, `spu` bigint(15) DEFAULT NULL COMMENT ‘商品集合id‘, `sku` bigint(15) DEFAULT NULL COMMENT ‘商品最小品类单元id‘, `title` varchar(100) DEFAULT NULL COMMENT ‘商品标题‘, `price` bigint(10) DEFAULT NULL COMMENT ‘商品价格‘, `pic` varchar(200) DEFAULT NULL COMMENT ‘商品图片‘, `url` varchar(200) DEFAULT NULL COMMENT ‘商品详情地址‘, `created` datetime DEFAULT NULL COMMENT ‘创建时间‘, `updated` datetime DEFAULT NULL COMMENT ‘更新时间‘, PRIMARY KEY (`id`), KEY `sku` (`sku`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT=‘京东商品表‘;
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>2.0.2.RELEASE</version> </parent> <groupId>cn.ayulong</groupId> <artifactId>ayulong-crawler-jd</artifactId> <version>1.0-SNAPSHOT</version> <dependencies> <!--SpringMVC--> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-web</artifactId> </dependency> <!--SpringData Jpa--> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-data-jpa</artifactId> </dependency> <!--MySQL连接包--> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>8.0.11</version> </dependency> <!-- HttpClient --> <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> </dependency> <!--Jsoup--> <dependency> <groupId>org.jsoup</groupId> <artifactId>jsoup</artifactId> <version>1.10.3</version> </dependency> <!--工具包--> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-lang3</artifactId> </dependency> </dependencies> </project>
spring: # 数据库配置 mysql8 datasource: driver-class-name: com.mysql.cj.jdbc.Driver url: jdbc:mysql:///crawler?useSSL=false&serverTimezone=Asia/Shanghai username: root password: mima # jpa配置 jpa: database: MySQL show-sql: true
package com.ayulong.jd.pojo; import javax.persistence.*; import java.util.Date; @Entity @Table(name = "jd_item") public class Item { //主键 @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; //标准产品单位(商品集合) private Long spu; //库存量单位(最小品类单元) private Long sku; //商品标题 private String title; //商品价格 private Double price; //商品图片 private String pic; //商品详情地址 private String url; //创建时间 private Date created; //更新时间 private Date updated; // 生成get/set方法
创建Dao接口继承Jpa接口
package com.ayulong.jd.dao; import com.ayulong.jd.pojo.Item; import org.springframework.data.jpa.repository.JpaRepository; // Item的Dao接口 public interface ItemDao extends JpaRepository<Item, Long> { }
创建Service接口
package com.ayulong.jd.service; import com.ayulong.jd.pojo.Item; import java.util.List; // Item的Service接口 public interface ItemService { /** * 保存商品 * @param item */ public void save(Item item); /** * 根据条件查询商品 * @param item * @return */ public List<Item> findAll(Item item); }
实现Service接口
package com.ayulong.jd.service.impl; import com.ayulong.jd.dao.ItemDao; import com.ayulong.jd.pojo.Item; import com.ayulong.jd.service.ItemService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Example; import org.springframework.stereotype.Service; import java.util.List; @Service public class ItemServiceImpl implements ItemService { @Autowired private ItemDao itemDao; @Override public void save(Item item) { this.itemDao.save(item); } @Override public List<Item> findAll(Item item) { // 声明查询条件 Example<Item> example = Example.of(item); // 根据查询条件进行查询数据 List<Item> list = this.itemDao.findAll(example); return list; } }
package com.ayulong.jd; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.scheduling.annotation.EnableScheduling; @SpringBootApplication // 使用定时任务, 需要先开启定时任务, 需要添加注解 @EnableScheduling public class Application { public static void main(String[] args) { SpringApplication.run(Application.class, args); } }
ps: 创建前先创建好存放图片的目录, 直接使用绝对路径
package com.ayulong.jd.util; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.util.EntityUtils; import org.springframework.stereotype.Component; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.UUID; @Component public class HttpUtils { private PoolingHttpClientConnectionManager cm; public HttpUtils() { this.cm = new PoolingHttpClientConnectionManager(); // 设置最大连接数 this.cm.setMaxTotal(100); // 设置每个主机的最大连接数 this.cm.setDefaultMaxPerRoute(10); } /** * 根据请求地址下载页面数据 * @param url * @return 页面数据 */ public String doGetHtml(String url) { // 获取HttpClient对象 CloseableHttpClient httpClient = HttpClients.custom().setConnectionManager(this.cm).build(); // 创建httpGet请求对象, 设置url地址 HttpGet httpGet = new HttpGet(url); // 设置请求信息 httpGet.setConfig(getConfig()); // 设置请求头, 伪装用户 setHeaders(httpGet); CloseableHttpResponse response= null; try { // 使用HttpClient发起请求, 获取响应 response = httpClient.execute(httpGet); // 解析响应, 返回结果 if (response.getStatusLine().getStatusCode() == 200) { // 判断响应体Entity是否不为空, 如果不为空就可以使用EntityUtils if (response.getEntity() != null) { String content = EntityUtils.toString(response.getEntity(), "utf8"); return content; } } } catch (IOException e) { e.printStackTrace(); } finally { // 关闭response if (response != null) { try { response.close(); } catch (IOException e) { e.printStackTrace(); } } } // 返回空串 return ""; } /** * 下载图片 * @param url * @return 图片名称 */ public String doGetImage(String url) { // 获取HttpClient对象 CloseableHttpClient httpClient = HttpClients.custom().setConnectionManager(this.cm).build(); // 创建httpGet请求对象, 设置url地址 HttpGet httpGet = new HttpGet(url); // 设置请求信息 httpGet.setConfig(getConfig()); // 设置请求头, 伪装用户 setHeaders(httpGet); CloseableHttpResponse response= null; try { // 使用HttpClient发起请求, 获取响应 response = httpClient.execute(httpGet); // 解析响应, 返回结果 if (response.getStatusLine().getStatusCode() == 200) { // 判断响应体Entity是否不为空, 如果不为空就可以使用EntityUtils if (response.getEntity() != null) { // 下载图片 // 获取图片的后缀 String extName = url.substring(url.lastIndexOf(".")); // 创建图片名, 重命名图片 String picName = UUID.randomUUID().toString() + extName; // 下载图片 // 声明OutPutStream OutputStream outputStream = new FileOutputStream(new File("D:\\ideaWorkspace\\pachong\\ayulong-crawler-jd\\src\\main\\resources\\static\\images\\" + picName)); response.getEntity().writeTo(outputStream); // 返回图片名称 return picName; } } } catch (IOException e) { e.printStackTrace(); } finally { // 关闭response if (response != null) { try { response.close(); } catch (IOException e) { e.printStackTrace(); } } } // 如果下载失败, 返回空串 return ""; } // 设置请求信息 private RequestConfig getConfig() { RequestConfig config = RequestConfig.custom() .setConnectTimeout(1000) // 创建连接的最长时间 .setConnectionRequestTimeout(500) // 获取连接的最长时间 .setSocketTimeout(10000) // 数据传输的最长时间 .build(); return config; } // 设置请求头 private void setHeaders(HttpGet httpGet) { httpGet.setHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:6.0.2) Gecko/20100101 Firefox/6.0.2"); } }
注意: 使用HttpClient爬取数据时, 为了防止被网站拦截, 应该设置请求头
package com.ayulong.jd.task; import com.ayulong.jd.pojo.Item; import com.ayulong.jd.service.ItemService; import com.ayulong.jd.util.HttpUtils; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; import java.util.Date; import java.util.List; @Component public class ItemTask { @Autowired private HttpUtils httpUtils; @Autowired private ItemService itemService; private static final ObjectMapper MAPPER = new ObjectMapper(); // 当下载任务完成后, 间隔多长时间进行下一次的任务. @Scheduled(fixedDelay = 100 * 1000) public void itemTask() throws Exception { // 声明需要解析的初始地址 String url = "https://search.jd.com/Search?keyword=%E6%89%8B%E6%9C%BA&enc=utf-8" + "&pvid=f112521d94c04007826aa41adcbb0658&page="; // 按照页面对手机的搜索结果进行遍历解析 for (int i = 1; i < 2; i = i + 2) { String html = httpUtils.doGetHtml(url + i); // 解析页面, 获取商品数据并存储 this.parse(html); } System.out.println("手机数据抓取完成!"); } // 解析页面, 获取商品数据并存储 private void parse(String html) throws Exception { // 解析html获取Document对象 Document doc = Jsoup.parse(html); // 获取spu信息 Elements spuEles = doc.select("div#J_goodsList > ul > li"); for (Element spuEle : spuEles) { // 排除没有data-spu的值的广告 if (StringUtils.isNotEmpty(spuEle.attr("data-spu"))) { // 获取spu long spu = Long.parseLong(spuEle.attr("data-spu")); // 获取sku信息 Elements skuEles = spuEle.select("li.ps-item"); for (Element skuEle : skuEles) { // 获取sku long sku = Long.parseLong(skuEle.select("[data-sku]").first().attr("data-sku")); // 根据sku查询商品数据 Item item = new Item(); item.setSku(sku); List<Item> list = this.itemService.findAll(item); if (list.size() > 0) { // 如果商品存在, 就进行下一个循环, 该商品不保存, 因为已存在 continue; } // 设置商品的spu item.setSpu(spu); // 获取商品的详情的url String itemUrl = "https://item.jd.com/" + sku + ".html"; item.setUrl(itemUrl); // 获取商品的图片 String picUrl = "https:" + skuEle.select("img[data-sku]").first().attr("data-lazy-img"); picUrl = picUrl.replace("/n7/", "/n0/" ); String picName = this.httpUtils.doGetImage(picUrl); item.setPic(picName); // 获取商品的价格 String priceJson = this.httpUtils.doGetHtml("https://p.3.cn/prices/mgets?skuIds=J_" + sku); double price = MAPPER.readTree(priceJson).get(0).get("p").asDouble(); item.setPrice(price); // 获取商品的标题 String itemInfo = this.httpUtils.doGetHtml(item.getUrl()); String title = Jsoup.parse(itemInfo).select("div.sku-name").text(); item.setTitle(title); //item.setTitle(); item.setCreated(new Date()); item.setUpdated(item.getCreated()); // 保存商品数据到数据库中 this.itemService.save(item); } } } } }
标签:common 单元 爬虫 tom mysq create commons 创建时间 rri
原文地址:https://www.cnblogs.com/zouhong/p/13871510.html