javaCV

示例一:调用本地摄像头

参考地址:https://www.jianshu.com/p/9920d1636787

1.环境准备

<dependency>
	<groupId>org.bytedeco</groupId>
	<artifactId>javacv-platform</artifactId>
	<version>1.5</version>
</dependency>
<dependency>
	<groupId>cn.hutool</groupId>
	<artifactId>hutool-all</artifactId>
	<version>5.8.18</version>
</dependency>

2.调用本地摄像头并且显示在CanvasFrame里面

import cn.hutool.core.util.RuntimeUtil;
import org.bytedeco.javacv.CanvasFrame;
import org.bytedeco.javacv.FrameGrabber;
import org.bytedeco.javacv.OpenCVFrameGrabber;

import javax.swing.*;

public class T {

    /**
     * 帧采集器
     */
    private OpenCVFrameGrabber grabber;

    /**
     * 采集到的数据帧展示窗口
     */
    private CanvasFrame canvas;


    public T(String title, Integer deviceNumber) {
        grabber = new OpenCVFrameGrabber(deviceNumber);
        canvas = new CanvasFrame(title);

        // 配置关闭窗口就直接退出程序
        canvas.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);

        // 优雅停机
        RuntimeUtil.addShutdownHook(() -> {
            try {
                grabber.stop();
                grabber.release();
                System.out.println("######################### \t[ 释放资源 ]\t #########################");
            } catch (Exception e) {
                e.printStackTrace();
            }
        });
    }


    public void start() {
        try {
            // 开始获取摄像头数据
            grabber.start();
        } catch (FrameGrabber.Exception e) {
            e.printStackTrace();
        }

        while (true) {
            try {
                // 将摄像头的一帧视频图像绘制到窗口上显示
                canvas.showImage(grabber.grab());
                // 20毫秒抓取一次
                // Thread.sleep(20);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }


    public static void main(String[] args) {
        T t = new T("测试javaCV", 0);
        t.start();
    }
}

示例二:javacv实现直播流

参考地址:https://www.jianshu.com/p/238e52bc16c4

pom依赖

    <!-- 需要注意,javacv主要是一组API为主,还需要加入对应的实现 -->
    <dependency>
        <groupId>org.bytedeco</groupId>
        <artifactId>javacv</artifactId>
        <version>1.5.6</version>
    </dependency>

    <!-- 用到了 ffmpeg 需要把 ffmpeg 的平台实现依赖引入 -->
    <dependency>
        <groupId>org.bytedeco</groupId>
        <artifactId>ffmpeg-platform</artifactId>
        <version>4.4-1.5.6</version>
    </dependency>

    <!--所有平台实现,依赖非常大,几百MB吧-->
    <!--<dependency>
        <groupId>org.bytedeco</groupId>
        <artifactId>javacv-platform</artifactId>
        <version>1.5.6</version>
    </dependency>-->

    <dependency>
        <groupId>org.openjfx</groupId>
        <artifactId>javafx-controls</artifactId>
        <version>17.0.2</version>
    </dependency>

    <dependency>
        <groupId>org.openjfx</groupId>
        <artifactId>javafx-base</artifactId>
        <version>17.0.2</version>
    </dependency>

    <dependency>
        <groupId>org.openjfx</groupId>
        <artifactId>javafx-graphics</artifactId>
        <version>17.0.2</version>
    </dependency>

    <dependency>
        <groupId>org.openjfx</groupId>
        <artifactId>javafx-fxml</artifactId>
        <version>17.0.2</version>
    </dependency>

    <dependency>
        <groupId>org.openjfx</groupId>
        <artifactId>javafx-swing</artifactId>
        <version>17.0.2</version>
    </dependency>

    <dependency>
        <groupId>org.openjfx</groupId>
        <artifactId>javafx-web</artifactId>
        <version>17.0.2</version>
    </dependency>

    <dependency>
        <groupId>org.openjfx</groupId>
        <artifactId>javafx-media</artifactId>
        <version>17.0.2</version>
    </dependency>

</dependencies>

测试类

import cn.hutool.core.io.FileUtil;
import javafx.application.Application;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.Button;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
import javafx.stage.WindowEvent;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.javacv.*;

import javax.sound.sampled.*;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.Timer;
import java.util.TimerTask;


public class T extends Application {


    /**
     * 录制的帧率
     */
    private static final int FRAME_RATE = 24;
    private static boolean isStop = false;

    private static TargetDataLine line;

    private static final String TITLE = "桌面录屏";

    private static final Integer fitWidth = 1920;

    private static final Integer fitHeight = 1080;


    private static ImageView imageVideo;

    private static FrameGrabber grabber;

    private static FFmpegFrameRecorder recorder;

    /**
     * 配置可视化界面
     */
    private void configWindow(Stage primaryStage) {
        primaryStage.setTitle(TITLE);
        imageVideo = new ImageView();
        // 配置录制的界面大小
        imageVideo.setFitWidth(fitWidth);
        imageVideo.setFitHeight(fitHeight);
        // 停止按钮
        Button button = new Button("停止录制");
        button.setOnAction(new EventHandler<ActionEvent>() {
            @Override
            public void handle(ActionEvent event) {
                isStop = true;
                if (line != null) {// 马上停止声音录入
                    try {
                        line.close();
                    } catch (Exception e) {
                    }
                }
                Alert alert = new Alert(Alert.AlertType.INFORMATION);
                alert.setTitle("信息");
                alert.setHeaderText("已经停止录制");
                alert.setOnCloseRequest(event1 -> alert.hide());
                alert.showAndWait();
            }
        });

        VBox box = new VBox();
        box.getChildren().addAll(button, imageVideo);
        primaryStage.setScene(new Scene(box));
        primaryStage.setWidth(fitWidth);
        primaryStage.setHeight(fitHeight);
        primaryStage.show();
        // 停止录制时退出程序
        primaryStage.setOnCloseRequest(new EventHandler<WindowEvent>() {
            @Override
            public void handle(WindowEvent event) {
                isStop = true;
                System.exit(0);
            }
        });
    }

    /**
     * 配置帧收集器
     */
    private void configGrabber() {
        grabber = new FFmpegFrameGrabber("desktop");
        grabber.setFormat("gdigrab");
        // 帧获取间隔
        grabber.setFrameRate(FRAME_RATE);
        // 捕获指定区域,不设置则为全屏
        grabber.setImageWidth(fitWidth);
        grabber.setImageHeight(fitHeight);
        // 必须设置了大小才能指定区域起点,参数可参考 ffmpeg 入参
        grabber.setOption("offset_x", "0");
        grabber.setOption("offset_y", "0");
        try {
            grabber.start();
        } catch (FrameGrabber.Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 配置帧记录器
     */
    private void configRecorder() {
        // 一、直播推流
        // recorder = new FFmpegFrameRecorder("rtmp://10.8.4.191/live/livestream", grabber.getImageWidth(), grabber.getImageHeight(), 2);

        // 二、存到本地
        // recorder = new FFmpegFrameRecorder("video.flv", grabber.getImageWidth(), grabber.getImageHeight(), 2);
        // 视频格式 flv/avi

        String videoFormat = "avi";
        File file = FileUtil.newFile("video." + videoFormat);
        if (file.exists()) {
            file.delete();
        }

        // 三、recorder 用于存储视频 , 调用stop后,需要释放,就会在指定位置输出文件
        try {
            recorder = FFmpegFrameRecorder.createDefault(file, grabber.getImageWidth(), grabber.getImageHeight());
        } catch (FrameRecorder.Exception e) {
            e.printStackTrace();
        }
        recorder.setInterleaved(true);
        // https://trac.ffmpeg.org/wiki/StreamingGuide
        // 加速
        recorder.setVideoOption("tune", "zerolatency");

        // https://trac.ffmpeg.org/wiki/Encode/H.264
        recorder.setVideoOption("preset", "ultrafast");

        // 设置帧率,重要!
        recorder.setFrameRate(FRAME_RATE);

        // Key frame interval, in our case every 2 seconds -> 30 (fps) * 2 = 60
        recorder.setGopSize(FRAME_RATE * 2);

        // 编码,使用编码能让视频占用内存更小,根据实际自行选择
        recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);

        // https://trac.ffmpeg.org/wiki/Encode/H.264
        recorder.setVideoOption("crf", "28");

        // 2000 kb/s  720P
        recorder.setVideoBitrate(2000000);

        recorder.setFormat(videoFormat);

        // 添加音频录制
        // 不可变音频
        recorder.setAudioOption("crf", "0");
        // 最高音质
        recorder.setAudioQuality(0);
        // 192 Kbps
        recorder.setAudioBitrate(192000);
        recorder.setSampleRate(44100);
        recorder.setAudioChannels(2);
        recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);

        try {
            recorder.start();
        } catch (FrameRecorder.Exception e) {
            e.printStackTrace();
        }

    }

    /**
     * 开始记录音频
     */
    private void startRecordAudio() {
        // 44100  16声道
        AudioFormat audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
        DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
        // 可以捕捉不同声道
        try {
            line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
        } catch (LineUnavailableException e) {
            e.printStackTrace();
        }
        // 录制声音
        new Thread(() -> {
            try {
                line.open(audioFormat);
                line.start();

                final int sampleRate = (int) audioFormat.getSampleRate();
                final int numChannels = audioFormat.getChannels();

                // 缓冲区
                final int audioBufferSize = sampleRate * numChannels;
                final byte[] audioBytes = new byte[audioBufferSize];
                Timer timer = new Timer();
                timer.schedule(new TimerTask() {
                    @Override
                    public void run() {
                        try {
                            if (isStop) {// 停止录音
                                line.stop();
                                line.close();
                                System.out.println("已经停止!");
                                timer.cancel();
                            }

                            // 读取音频
                            // read会阻塞
                            int readLenth = 0;
                            while (readLenth == 0) {
                                readLenth = line.read(audioBytes, 0, line.available());
                            }
                            // audioFormat 定义了音频输入为16进制,需要将字节[]转为短字节[]
                            // FFmpegFrameRecorder.recordSamples 源码中的 AV_SAMPLE_FMT_S16
                            int rl = readLenth / 2;
                            short[] samples = new short[rl];

                            // short[] 转换为 ShortBuffer
                            ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
                            ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, rl);

                            // 记录
                            recorder.recordSamples(sampleRate, numChannels, sBuff);
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }, 1000, 1000 / FRAME_RATE);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }).start();
    }

    /**
     * 开始记录视频
     */
    private void startRecordVideo() {
        new Thread(() -> {
            try {
                // 获取屏幕捕捉的一帧
                Frame frame = null;
                // 屏幕录制,由于已经对音频进行了记录,需要对记录时间进行调整即可
                // 即上面调用了 recorder.recordSamples 需要重新分配时间,否则视频输出时长等于实际 的2倍
                while ((frame = grabber.grab()) != null) {
                    if (isStop) {
                        try {
                            // 停止
                            recorder.stop();
                            grabber.stop();
                            // 释放内存,我们都知道c/c++需要手动释放资源
                            recorder.release();
                            grabber.release();
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                        break;
                    }

                    // 将这帧放到录制
                    recorder.record(frame);
                    Image convert = new JavaFXFrameConverter().convert(frame);
                    imageVideo.setImage(convert);
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        }).start();
    }

    @Override
    public void start(Stage primaryStage) {
        // 配置窗口
        configWindow(primaryStage);
        // 配置帧收集器
        configGrabber();
        // 配置帧记录器
        configRecorder();
        // 开始录音
        startRecordAudio();
        // 开始录屏
        startRecordVideo();
    }

    public static void main(String[] args) {
        launch(args);
    }
}

什么是javaFX应用???

JavaFx类型应用启动引导类,解决启动报错

错误: 缺少 JavaFX 运行时组件, 需要使用该组件来运行此应用程序。

原因详解:https://www.cnblogs.com/hhddd-1024/p/16634369.html

/**
 * 用来引导 JavaFX应用解决控制台异常:错误: 缺少 JavaFX 运行时组件, 需要使用该组件来运行此应用程序。
 *
 * @author JHL
 * @version 1.0
 * @date 2022/8/25 18:03
 * @since : JDK 11
 */
public class JavaFXBootstrap {
    public static void main(String[] args) {
        Test.main(args);
    }
}

然后用docker起一个srs进行推流播放。

# 先启动
docker run -p 1935:1935 -p 1985:1985 -p 8080:8080 \
    ccr.ccs.tencentyun.com/ossrs/srs:4

最后消费者端即可拉流

示例三:javaCV 视频工具—截取视频缩略图、获取视频属性

https://www.jianshu.com/p/d691f0b68060

前言

通过javaCV 视频工具—截取视频缩略图、获取视频属性

依赖引入

<!--javaCV 视频工具-->
        <dependency>
            <groupId>org.bytedeco</groupId>
            <artifactId>javacv-platform</artifactId>
            <version>1.5</version>
        </dependency>

实现

@Slf4j
public class VideoUtils {

    private static final String IMAGEMAT = "png";
    private static final String ROTATE = "rotate";

    /**
     * 默认截取视频的中间帧为封面
     */
    public static final int MOD = 2;

    /**
     * 视频缩略图后缀
     */
    private static final String VIDEO_THUMBNAIL_SUF = "th.png";

    /**
     * 视频缩略图前缀
     */
    private static final String VIDEO_THUMBNAIL_PRE = "video/thumbnail/";

    private static final String SYMBOL = ".";

    /**
     * 获取视频缩略图
     * @param filePath:视频路径
     * @param mod:视频长度/mod获取第几帧
     * @throws Exception
     */
    public static String randomGrabberFFmpegImage(String filePath, int mod) {
        String targetFilePath = "";
        try{
            FFmpegFrameGrabber ff = FFmpegFrameGrabber.createDefault(filePath);
            ff.start();
            //图片位置是否正确
            String rotate = ff.getVideoMetadata(ROTATE);
            //获取帧数
            int ffLength = ff.getLengthInFrames();
            Frame f;
            int i = 0;
            //设置截取帧数
            int index = ffLength / mod;
            while (i < ffLength) {
                f = ff.grabImage();
                if(i == index){
                    if (null != rotate && rotate.length() > 1) {
                        OpenCVFrameConverter.ToIplImage converter = new OpenCVFrameConverter.ToIplImage();
                        IplImage src = converter.convert(f);
                        f = converter.convert(rotate(src, Integer.parseInt(rotate)));
                    }
                    targetFilePath = getImagePath(filePath, i);
                    doExecuteFrame(f, targetFilePath);
                    break;
                }
                i++;
            }
            ff.stop();
        }catch (Exception e){
            log.error("获取视频缩略图异常:" + e.getMessage());
        }
        return targetFilePath;
    }

    /**
     * 随机生成生成缩略图存放路径
     * @param filePath:视频路径
     * @param index:第几帧
     * @return:缩略图的存放路径
     */
    private static String getImagePath(String filePath, int index){
        String fileName = FileUtils.getName(filePath);
        //去后缀
        fileName = fileName.substring(0, fileName.indexOf(SYMBOL));
        return TencentCosConfig.baseUrl + VIDEO_THUMBNAIL_PRE + DateUtils.datePath() + "/" + fileName + "_" + index +  VIDEO_THUMBNAIL_SUF;
    }

    /**
     * 旋转图片
     * @param src
     * @param angle
     * @return
     */
    public static IplImage rotate(IplImage src, int angle) {
        IplImage img = IplImage.create(src.height(), src.width(), src.depth(), src.nChannels());
        opencv_core.cvTranspose(src, img);
        opencv_core.cvFlip(img, img, angle);
        return img;
    }

    /**
     * 截取缩略图
     * @param f
     * @param targerFilePath:封面图片
     */
    public static void doExecuteFrame(Frame f, String targerFilePath) {
        COSClient cosClient = TencentCosUtils.initCosClient();

        if (null == f || null == f.image) {
            return;
        }
        Java2DFrameConverter converter = new Java2DFrameConverter();
        BufferedImage bi = converter.getBufferedImage(f);
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        try {
            ImageIO.write(bi, IMAGEMAT, out);
            // 获取文件流
            InputStream bufferedImage = new ByteArrayInputStream(out.toByteArray());
            int length = out.size();
            ObjectMetadata objectMetadata = new ObjectMetadata();
            // 从输入流上传必须制定content length, 否则http客户端可能会缓存所有数据,存在内存OOM的情况
            objectMetadata.setContentLength(length);
            // 默认下载时根据cos路径key的后缀返回响应的contenttype, 上传时设置contenttype会覆盖默认值
            PutObjectRequest putObjectRequest = new PutObjectRequest(TencentCosConfig.bucket, targerFilePath, bufferedImage, objectMetadata);
            PutObjectResult putObjectResult = cosClient.putObject(putObjectRequest);
            log.info("腾讯COS上传视频缩略图成功:{}", putObjectResult.getETag());
            //关闭输入输出流
            bufferedImage.close();
            out.close();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            cosClient.shutdown();
        }
    }

    /**
     * 根据视频长度随机生成随机数集合
     * @param baseNum:基础数字,此处为视频长度
     * @param length:随机数集合长度
     * @return:随机数集合
     */
    public static List<Integer> random(int baseNum, int length) {
        List<Integer> list = new ArrayList<Integer>(length);
        while (list.size() < length) {
            Integer next = (int) (Math.random() * baseNum);
            if (list.contains(next)) {
                continue;
            }
            list.add(next);
        }
        Collections.sort(list);
        return list;
    }

    /**
     * 获取视频时长 单位/秒
     * @param video
     * @return
     */
    public static long getVideoDuration(File video) {
        long duration = 0L;
        FFmpegFrameGrabber ff = new FFmpegFrameGrabber(video);
        try {
            ff.start();
            duration = ff.getLengthInTime() / (1000 * 1000);
            ff.stop();
        } catch (FrameGrabber.Exception e) {
            e.printStackTrace();
        }
        return duration;
    }

    /**
     * 获取视频时长 单位/秒
     * @param inputStream 输入流
     * @return
     */
    public static long getVideoDuration(InputStream inputStream) {
        long duration = 0L;
        FFmpegFrameGrabber ff = new FFmpegFrameGrabber(inputStream);
        try {
            ff.start();
            duration = ff.getLengthInTime() / (1000 * 1000);
            ff.stop();
        } catch (FrameGrabber.Exception e) {
            e.printStackTrace();
        }
        return duration;
    }

    /**
     * 转换视频文件为mp4
     * @param file
     * @return
     */
    public static String convertToMp4(File file) {
        FFmpegFrameGrabber frameGrabber = new FFmpegFrameGrabber(file);
        String fileName = null;
        Frame captured_frame = null;
        FFmpegFrameRecorder recorder = null;

        try {
            frameGrabber.start();
            fileName = file.getAbsolutePath() + "__.mp4";
            recorder = new FFmpegFrameRecorder(fileName, frameGrabber.getImageWidth(), frameGrabber.getImageHeight(), frameGrabber.getAudioChannels());
            recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); //avcodec.AV_CODEC_ID_H264  //AV_CODEC_ID_MPEG4
            recorder.setFormat("mp4");
            recorder.setFrameRate(frameGrabber.getFrameRate());
            //recorder.setSampleFormat(frameGrabber.getSampleFormat()); //
            recorder.setSampleRate(frameGrabber.getSampleRate());

            recorder.setAudioChannels(frameGrabber.getAudioChannels());
            recorder.setFrameRate(frameGrabber.getFrameRate());
            recorder.start();
            while ((captured_frame = frameGrabber.grabFrame()) != null) {
                try {
                    recorder.setTimestamp(frameGrabber.getTimestamp());
                    recorder.record(captured_frame);

                } catch (FrameRecorder.Exception e) {
                    e.printStackTrace();
                }
            }
            recorder.stop();
            recorder.release();
            frameGrabber.stop();
        } catch (Exception | FrameRecorder.Exception e) {
            e.printStackTrace();
        }
        return fileName;
    }
}

示例四 javaCV将socket获取的视频流推到流媒体(RTMP)服务器(未测试,有待验证)

原文地址:https://blog.csdn.net/xiaojie11800/article/details/79043494

<dependency>
    <groupId>org.bytedeco</groupId>
    <artifactId>javacv-platform</artifactId>
    <version>1.5.7</version>
</dependency>
<dependency>
    <groupId>org.bytedeco</groupId>
    <artifactId>ffmpeg-platform</artifactId>
    <version>5.0-1.5.7</version>
</dependency>
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;

import java.io.InputStream;
import java.io.OutputStream;
import java.net.Socket;

/**
 * @author JHL
 * @version 1.0
 * @date 2022/10/8 16:15
 * @since : JDK 11
 */
public class Test {
    public static void main(String[] args) throws Exception {
        try {
            int count = 0;
            while (true) {
                // 创建Socket对象
                Socket socket = new Socket("192.168.0.80", 8080);
                // 获取一个输出流,向服务端发送信息
                OutputStream outputStream = socket.getOutputStream();
                outputStream.write("GET /ipcam/avc.cgi HTTP/1.1\r\nAuthorization: Basic YWRtaW46OTk5OQ====\r\n\r\n".getBytes());
                outputStream.flush();
                // 关闭输出流
                socket.shutdownOutput();
                // 获取一个输入流,接收服务端的信息
                InputStream inputStream = socket.getInputStream();
                String outputFile = "rtmp://localhost:1935/live/test2";
                Player.frameRecord(inputStream, outputFile, 1);
                count++;
                System.out.println("================================" + count + "次数");
                socket.close();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

class Player {
    /**
     * 按帧录制视频
     *
     * @param inputFile  -该地址可以是网络直播/录播地址,也可以是远程/本地文件路径
     * @param outputFile -该地址只能是文件地址,如果使用该方法推送流媒体服务器会报错,原因是没有设置编码格式
     */
    public static void frameRecord(InputStream inputFile, String outputFile, int audioChannel) throws Exception {
        //该变量建议设置为全局控制变量,用于控制录制结束
        boolean isStart = true;
        // 获取视频源
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(inputFile);
        grabber.setOption("rtsp_transport", "tcp");
        // 流媒体输出地址,分辨率(长,高),是否录制音频(0:不录制/1:录制)
        FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(outputFile, 1280, 720, audioChannel);
        recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
        recorder.setFormat("flv");
        // 开始取视频源
        recordByFrame(grabber, recorder, isStart);
    }

    private static void recordByFrame(FFmpegFrameGrabber grabber, FFmpegFrameRecorder recorder, Boolean status) throws Exception {
        try {
            //建议在线程中使用该方法
            grabber.start();
            recorder.start();
            Frame frame = null;
            while (status && (frame = grabber.grabFrame()) != null) {
                recorder.record(frame);
            }
            recorder.stop();
            grabber.stop();
        } finally {
            if (grabber != null) {
                grabber.stop();
            }
        }
    }
}
posted @ 2022-08-30 12:05  黄河大道东  阅读(747)  评论(0编辑  收藏  举报