小言_互联网的博客

爬虫+ElasticSearch+vue 实现数据爬取和检索小案例

433人阅读  评论(0)

说明

本篇是采用jsoup解析网页获取网页资源后实现数据爬取,将数据全部灌入到ES中实现数据检索,通过axios进行数据交互交互,vue实现渲染.
案例代码已经托管到github

引入依赖

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.2.5.RELEASE</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>com.cy</groupId>
    <artifactId>elsticsearch-jd</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>elsticsearch-jd</name>
    <description>Demo project for Spring Boot</description>

    <properties>
        <java.version>1.8</java.version>
        <elasticsearch.version>7.5.2</elasticsearch.version>
    </properties>

    <dependencies>
        <!--jsoup解析网页-->
        <dependency>
            <groupId>org.jsoup</groupId>
            <artifactId>jsoup</artifactId>
            <version>1.10.2</version>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.62</version>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-elasticsearch</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-thymeleaf</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-devtools</artifactId>
            <scope>runtime</scope>
            <optional>true</optional>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-configuration-processor</artifactId>
            <optional>true</optional>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <optional>true</optional>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
            <exclusions>
                <exclusion>
                    <groupId>org.junit.vintage</groupId>
                    <artifactId>junit-vintage-engine</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>

</project>

elasticsearch的安装见:
1、 docker安装elasticsearch
2、 官网下载elasticsearch

编写ES配置类

@Configuration
public class ESConfig {

    @Bean
    public RestHighLevelClient restHighLevelClient() {
        return new RestHighLevelClient(RestClient.builder(new HttpHost("127.0.0.1", 9200, "http")));
    }
}

抽取工具类-完成网页解析

@Component
public class HtmlParseUtil {
    public static void main(String[] args) throws Exception {
        new HtmlParseUtil().parseJD("编程").forEach(System.out::println);
    }

    public List<Content> parseJD(String keyWords)throws Exception{
        //获取请求 "&enc=utf-8"防止中文乱码
        String url = "https://search.jd.com/Search?keyword=" + keyWords +"&enc=utf-8";
        //解析网页  返回Dom对象
        Document document = Jsoup.parse(new URL(url), 30000);
        Element element = document.getElementById("J_goodsList");
        Elements elements = element.getElementsByTag("li");
		//选择辨识唯一性的特性
        List<Content> goodList = new ArrayList<>();
        for (Element el : elements) {
            String img = el.getElementsByTag("img").eq(0).attr("src");
            String price = el.select("div.p-price > strong").eq(0).text();
            String title = el.getElementsByClass("p-name").eq(0).text();
            String shop = el.getElementsByClass("p-shop").eq(0).text();
            if (shop.isEmpty()){  //bug 有些商品的店铺标签不为p-shop
                shop = el.getElementsByClass("p-shopnum").eq(0).text();

            }
            Content content = new Content(img,title,price,shop);
            goodList.add(content);
        }
        return goodList;
    }
}

统一数据类型

@Data
@AllArgsConstructor
@NoArgsConstructor
public class Content {
    private String title;
    private String img;
    private String price;
    private String shop;
}

编写业务层

@Service
public class ContentService {

    @Autowired
    private RestHighLevelClient restHighLevelClient;

	//将爬取到到数据灌入到ES中
    public Boolean parseContent(String Keywords) throws Exception {
        /**
         * 手动代码put索引或者自己创建索引
         * PUT
         * {
         * 	"settings":{
         * 		"number_of_shards":"5",
         * 		"number_of_replicas":"1"
         *        },
         * 	"mappings":{
         * 			"properties":{
         * 				"img":{
         * 					"type":"text",
         * 					"analyzer": "ik_max_word"
         *                },
         * 				"title":{
         * 					"type":"text",
         * 					"analyzer": "ik_max_word"
         *                },
         * 				"price":{
         * 					"type":"text",
         * 					"analyzer": "ik_max_word"
         *                },
         * 				"shop":{
         * 					"type":"text",
         * 					"analyzer": "ik_max_word"
         *                }
         *
         *            }
         *
         *    }
         * }
         *
         */
        List<Content> contents = new HtmlParseUtil().parseJD(Keywords);
        //把查询的数据放入ES中
        BulkRequest bulkRequest = new BulkRequest();
        bulkRequest.timeout("2m");
        for (int i = 0; i <contents.size() ; i++) {
            bulkRequest.add(new IndexRequest("jd_goods").source(JSON.toJSONString(contents.get(i)), XContentType.JSON));

        }
        BulkResponse bulk = restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT);
        return !bulk.hasFailures();
    }

    //获取数据实现搜索功能
    public List<Map<String,Object>> searchPage(String keyword,int pageNo,int pageSize) throws IOException {
        if (pageNo <= 1){
            pageNo = 1;
        }

        //条件搜索
        SearchRequest searchRequest = new SearchRequest("jd_goods");
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();

        //分页
        searchSourceBuilder.from(pageNo);
        searchSourceBuilder.size(pageSize);

        //精准匹配
        TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery("title", keyword);
        searchSourceBuilder.query(termQueryBuilder);
        searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.SECONDS));

        //执行搜索
        searchRequest.source(searchSourceBuilder);
        SearchResponse search = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);

        //解析结果
        ArrayList<Map<String,Object>> list = new ArrayList<>();
        for (SearchHit documentFields : search.getHits().getHits()) {
            list.add(documentFields.getSourceAsMap());
        }
        return list;
    }


    //获取数据实现高亮功能
    public List<Map<String,Object>> searchPageHighlightBuilder(String keyword,int pageNo,int pageSize) throws IOException {
        if (pageNo <= 1){
            pageNo = 1;
        }
        keyword= URLDecoder.decode(keyword, "UTF-8");
        //条件搜索
        SearchRequest searchRequest = new SearchRequest("jd_goods");
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        //分页
        searchSourceBuilder.from(pageNo);
        searchSourceBuilder.size(pageSize);
        //精准匹配
        TermQueryBuilder termQueryBuilder = QueryBuilders.termQuery("title", keyword);
        searchSourceBuilder.query(termQueryBuilder);
        searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.SECONDS));

        //高亮
        HighlightBuilder highlightBuilder = new HighlightBuilder();
        highlightBuilder.field("title");
        highlightBuilder.requireFieldMatch(true);//多个高亮显示
        highlightBuilder.preTags("<span style='color:red'>");
        highlightBuilder.postTags("</span>");
        searchSourceBuilder.highlighter(highlightBuilder);

        //执行搜索
        searchRequest.source(searchSourceBuilder);
        SearchResponse search = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);

        //解析结果
        ArrayList<Map<String,Object>> list = new ArrayList<>();
        for (SearchHit documentFields : search.getHits().getHits()) {

            //解析高亮的字段
            Map<String, HighlightField> highlightFields = documentFields.getHighlightFields();
            HighlightField title = highlightFields.get("title");
            Map<String, Object> sourceAsMap = documentFields.getSourceAsMap();
            if (title != null){
                Text[] fragments = title.fragments();
                String n_title = "";
                for (Text text : fragments) {
                    n_title += text;
                }
                sourceAsMap.put("title",n_title);
            }
            list.add(sourceAsMap);
        }
        return list;
    }
}


编写接口层

@RestController
public class ContentController {

    @Autowired
    private ContentService contentService;

    @GetMapping("/parse/{keyword}")
    public Boolean parse(@PathVariable ("keyword")String keyword) throws Exception {
        return contentService.parseContent(keyword);
    }

    @GetMapping("/search/{keyword}/{pageNo}/{pageSize}")
    public List<Map<String,Object>> search(@PathVariable ("keyword")String keyword,
                                           @PathVariable ("pageNo")int pageNo,
                                           @PathVariable("pageSize") int pageSize) throws Exception {
        contentService.parseContent(keyword);
        return contentService.searchPageHighlightBuilder(keyword,pageNo,pageSize);
    }
}
-------------------------------------------------------------------
@Controller
public class IndexController {


    @GetMapping({"/","/index"})
    public String index(){
        return "index";
    }
}

编写前端

<!DOCTYPE html>
<html xmlns:th="http://www.thymeleaf.org">

<head>
    <meta charset="utf-8"/>
    <title>elasticsearch-jd-demo</title>
    <link rel="stylesheet" th:href="@{/css/style.css}"/>
</head>

<body class="pg">
<!--绑定Vue模型-->
<div class="page" id="app">
    <div id="mallPage" class=" mallist tmall- page-not-market ">

        <!-- 头部搜索 -->
        <div id="header" class=" header-list-app">
            <div class="headerLayout">
                <div class="headerCon ">

                    <div class="header-extra" style="width: 600px">
                        <!--搜索-->
                        <div id="mallSearch" class="mall-search" >
                            <form name="searchTop" class="mallSearch-form clearfix">
                                <fieldset>
                                    <div class="mallSearch-input clearfix">
                                        <div class="s-combobox" id="s-combobox-685">
                                            <div class="s-combobox-input-wrap">
                                                <!--绑定输入的关键词-->
                                                <input v-model="keyword" type="text" autocomplete="off" value="dd" id="mq"
                                                       class="s-combobox-input" aria-haspopup="true">
                                            </div>
                                        </div>
                                         <!--绑定搜索事件-->
                                        <button type="submit" @click.prevent="searchKey()" id="searchbtn">搜索</button>
                                    </div>
                                </fieldset>
                            </form>

                        </div>

                    </div>
                    <!--                            爬取数据-->
                    <div style="float: right;margin-top: -45px">
                        <form >
                        	<!--绑定爬取关键词-->
                            <input v-model="keyword1" type="text" autocomplete="off"  >
                            <!--绑定爬取数据的事件-->
                            <button type="submit" @click.prevent="scrapyKey()" >爬取</button>
                        </form>
                    </div>

                </div>
            </div>
        </div>

        <!-- 商品详情页面 -->
        <div id="content">
            <div class="main">
                <!-- 商品详情 -->
                <div class="view grid-nosku">

                    <div class="product" v-for="result in results">
                        <div class="product-iWrap">
                            <!--商品封面-->
                            <div class="productImg-wrap">
                                <a class="productImg">
                                    <img :src="result.img">
                                </a>
                            </div>
                            <!--价格-->
                            <p class="productPrice">
                                <em>{{result.price}}</em>
                            </p>
                            <!--标题-->
                            <p class="productTitle">
                                <a v-html="result.title"></a>
                            </p>
                            <!-- 店铺名 -->
                            <div class="productShop">
                                <span>{{result.shop}} </span>
                            </div>
                        </div>
                    </div>
                </div>
            </div>
        </div>
    </div>
</div>

<!--引入依赖包-->
<script th:src="@{/js/axios.min.js}"></script>
<script th:src="@{/js/vue.min.js}"></script>

<script>
    new Vue({
        el: '#app',
        data: {
            keyword: '',
            keyword1: '',
            results: []
        },
        methods: {
	      //搜索事件
            searchKey(){
                let keyword = this.keyword;
                console.log(keyword);
                axios.get('search/'+keyword+"/1/30").then(response=>{
                    console.log(response);
                    this.results = response.data;
                })
            },
            //爬取事件
            scrapyKey(){
                let keyword1 = this.keyword1;
                // console.log(keyword);
                axios.get('parse/'+keyword1).then(response=>{
                    location.reload();
                })
            }

        }
    })
</script>
</body>
</html>

测试



转载:https://blog.csdn.net/qq_42252844/article/details/105989550
查看评论
* 以上用户言论只代表其个人观点,不代表本网站的观点或立场