设计满足开闭原则的策略模式

开闭原则是说不修改原有代码的基础上,对于新增加的功能采用实现接口的方法即可实现

策略模式将对象的行为抽象为一个通用的方法,不同的对象实现同一个方法进行重写,可以减少复杂的if  else的出现

代码如下

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.2.5.RELEASE</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>

    <groupId>com.ip</groupId>
    <artifactId>ip-service</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <java.version>1.8</java.version>
        <skipTests>true</skipTests>
    </properties>

    <dependencyManagement>

        <dependencies>
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-dependencies</artifactId>
                <version>Hoxton.SR3</version>
                <type>pom</type>
                <scope>import</scope>
            </dependency>

            <dependency>
                <groupId>com.alibaba.cloud</groupId>
                <artifactId>spring-cloud-alibaba-dependencies</artifactId>
                <version>2.1.0.RELEASE</version>
                <type>pom</type>
                <scope>import</scope>
            </dependency>



        </dependencies>
    </dependencyManagement>


    <dependencies>

        <!--客户端-->
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-starter-netflix-eureka-client</artifactId>
        </dependency>
        <!--tomcat容器-->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
        <!--添加fastjson依赖-->
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.7</version>
        </dependency>
        <!--lombok依赖-->
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <version>1.18.16</version>
        </dependency>

        <!--mysql驱动-->
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>5.1.6</version>
        </dependency>
        <!--springboot整合mybatis-->
        <dependency>
            <groupId>org.mybatis.spring.boot</groupId>
            <artifactId>mybatis-spring-boot-starter</artifactId>
            <version>2.1.2</version>
        </dependency>
        <!-- 热部署模块 -->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-devtools</artifactId>
            <optional>true</optional> <!-- 这个需要为 true 热部署才有效 -->
        </dependency>
        <!--java爬虫需要的jar包-->
        <dependency>
            <groupId>org.jsoup</groupId>
            <artifactId>jsoup</artifactId>
            <version>1.12.2</version>
        </dependency>
        <!--判断空的用法  -->
        <dependency>
            <groupId>commons-lang</groupId>
            <artifactId>commons-lang</artifactId>
            <version>2.6</version>
        </dependency>

        <!--时间日期工具类-->
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
            <version>3.7</version>
        </dependency>

        <dependency>
            <groupId>org.apache.httpcomponents</groupId>
            <artifactId>httpclient</artifactId>
            <version>4.5.8</version>
        </dependency>


        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.12</version>
        </dependency>
        <dependency>
            <groupId>com.squareup.okhttp3</groupId>
            <artifactId>okhttp</artifactId>
            <version>3.10.0</version>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter</artifactId>
        </dependency>

        <dependency>
            <groupId>org.lionsoul</groupId>
            <artifactId>ip2region</artifactId>
            <version>1.7.2</version>
        </dependency>
         <!--RateLimiter的底层是基于令牌桶算法来实现的,来自谷歌的Guava包中-->
        <dependency>
            <groupId>com.google.guava</groupId>
            <artifactId>guava</artifactId>
            <version>28.0-jre</version>
        </dependency>
        <dependency>
            <groupId>com.google.code.gson</groupId>
            <artifactId>gson</artifactId>
            <version>2.8.6</version>
        </dependency>

        <dependency>
            <groupId>org.lionsoul</groupId>
            <artifactId>ip2region</artifactId>
            <version>1.7.2</version>
        </dependency>


        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>

        </plugins>
        <finalName>server</finalName>
    </build>


</project>

  

package com.ip.controller.front;

import com.ip.bean.CrawlerDTO;
import com.ip.config.SpringBeanUtil;
import com.ip.handle.strategy.CrawlerHandleStrategy;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;

/**
 * @author yourheart
 * @Description
 * @create 2022-04-26 21:40
 */

@Controller
@RequestMapping("/crawler")
@Slf4j
public class CrawlerController {


    /**
     * 自动获取网页的名称
     * @param crawlerDTO
     * @return
     */
    @PostMapping("/crawlerAddress")
    @ResponseBody
    public CrawlerDTO  crawlerAddress(@RequestBody CrawlerDTO crawlerDTO){
        log.info("【自动获取网页的名称】入参:{}",crawlerDTO);
        String serverName=crawlerDTO.getUrlDatasource()+"_crawler";
        CrawlerHandleStrategy bean = SpringBeanUtil.getBean(serverName, CrawlerHandleStrategy.class);
        CrawlerDTO crawlerDto = bean.getCrawlerDto(crawlerDTO);
        return crawlerDto;
    }
}

  

package com.ip.config;

import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;

/**
 * @Description:
 * @Author: Yourheart
 * @Create: 2022/12/8 18:42
 */
@Component
public class SpringBeanUtil implements ApplicationContextAware {

    private static ApplicationContext context;

    @Override
    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
        if (context==null){
            context=applicationContext;
        }
    }

    public static ApplicationContext getContext(){
        return context;
    }

    /**
     * 通过name找到对应的实例
     * @param name
     * @return
     */
    public static Object getBean(String name){
        return getContext().getBean(name);
    }

    /**
     * 通过class获取实例
     * @param tClass
     * @param <T>
     * @return
     */
    public static <T> T getBean(Class<T> tClass){
        return getContext().getBean(tClass);
    }

    public static <T> T getBean(String name,Class<T> tClass){
        T bean = getContext().getBean(name, tClass);
        return bean;
    }


}

  

package com.ip.handle.strategy;

import com.ip.bean.CrawlerDTO;

/**
 * @Description: 处理爬虫的策略接口
 * @Author: Yourheart
 * @Create: 2022/12/8 15:15
 */
public interface CrawlerHandleStrategy {

    /**
     * 通过爬虫获取网页地址的名称
     * @param crawlerDTO
     * @return
     */
    CrawlerDTO getCrawlerDto(CrawlerDTO crawlerDTO);

}

  

package com.ip.handle.strategy.impl;

import com.ip.bean.CrawlerDTO;
import com.ip.handle.strategy.CrawlerHandleStrategy;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;

/**
 * @Description:
 * @Author: Yourheart
 * @Create: 2022/12/8 15:38
 */
@Slf4j
@Service("default_crawler")
public class DefaultCrawlerHandleStrategy implements CrawlerHandleStrategy {

    /**
     * 通过爬虫获取网页地址的名称
     *
     * @param crawlerDTO
     * @return
     */
    @Override
    public CrawlerDTO getCrawlerDto(CrawlerDTO crawlerDTO) {
        log.info("使用默认方法获取网页地址.....");
        return null;
    }
}

  

package com.ip.handle.strategy.impl;

import com.ip.bean.CrawlerDTO;
import com.ip.handle.strategy.CrawlerHandleStrategy;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;

/**
 * @Description:
 * @Author: Yourheart
 * @Create: 2022/12/8 15:37
 */
@Slf4j
@Service("jianshu_crawler")
public class JianShuCrawlerHandleStrategy implements CrawlerHandleStrategy {
    /**
     * 通过爬虫获取网页地址的名称
     *
     * @param crawlerDTO
     * @return
     */
    @Override
    public CrawlerDTO getCrawlerDto(CrawlerDTO crawlerDTO) {
        log.info("爬虫简书的地址...");
        return null;
    }
}

  

package com.ip.handle.strategy.impl;

import com.ip.bean.CrawlerDTO;
import com.ip.handle.strategy.CrawlerHandleStrategy;
import lombok.extern.slf4j.Slf4j;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.util.Arrays;

/**
 * @Description:
 * @Author: Yourheart
 * @Create: 2022/12/8 15:37
 */
@Slf4j
@Service("weixin_crawler")
public class WeiXinCrawlerHandleStrategy implements CrawlerHandleStrategy {
    /**
     * 通过爬虫获取网页地址的名称
     *
     * @param crawlerDTO
     * @return
     */
    @Override
    public CrawlerDTO getCrawlerDto(CrawlerDTO crawlerDTO) {
        log.info("爬虫微信的地址...");
        String url = crawlerDTO.getUrl();
        try {
            Document doc = Jsoup.connect(url).get();
            Elements select = doc.select("meta[property=twitter:title]");
            StringBuilder stringBuilder=new StringBuilder();
            for (Element element : select) {
                String string = element.toString();
                string = string.replace("<", "");
                string = string.replace(">", "");
                string = string.replace("\"", "");
                String[] split = string.split("=");
                Arrays.stream(split).forEach(a->{
                    log.info("a:{}",a);
                });
                stringBuilder.append(split[split.length - 1]);
            }
        } catch (IOException e) {
            log.error("【微信爬虫失败】:{}",e);
        }
        return null;
    }
}

  

posted @ 2022-12-08 19:10  不忘初心2021  阅读(34)  评论(0编辑  收藏  举报