GStreamer基础教程11 - 与QT集成(转载)
摘要
通常我们的播放引擎需要和GUI进行集成,在使用GStreamer时,GStreamre会负责媒体的播放及控制,GUI会负责处理用户的交互操作以及创建显示的窗口。本例中我们将结合QT介绍如何指定GStreamer将视频输出到指定窗口,以及如何利用GStreamer上报的信息去更新GUI。
与GUI集成
我们知道与GUI集成有两个方面需要注意:
- 显示窗口的管理。
由于显示窗口通常由GUI框架创建,所以我们需要将具体的窗口信息告诉GStreamer。由于各个平台使用不同的方式传递窗口句柄,GStreamer提供了一个抽象接口(GstVideoOverlay),用于屏蔽平台的差异,我们可以直接将GUI创建的窗口ID传递给GStreamer。
- GUI界面的更新
大多数GUI框架都需要在主线程中去做UI的刷新操作,但GStreamer内部可能会创建多个线程,这就需要通过GstBus及GUI自带的通信机制将所有GStreamer产生的消息传递到GUI主线程,再由GUI主线程对界面进行刷新。
下面我们将以QT为例来了解如何处理GStreamer与GUI框架的集成。
示例代码
qtoverlay.h
#ifndef _QTOVERLAY_ #define _QTOVERLAY_ #include <gst/gst.h> #include <QWidget> #include <QPushButton> #include <QHBoxLayout> #include <QVBoxLayout> #include <QSlider> #include <QTimer> class PlayerWindow : public QWidget { Q_OBJECT public: PlayerWindow(GstElement *p); WId getVideoWId() const ; static gboolean postGstMessage(GstBus * bus, GstMessage * message, gpointer user_data); private slots: void onPlayClicked() ; void onPauseClicked() ; void onStopClicked() ; void onAlbumAvaiable(const QString &album); void onState(GstState st); void refreshSlider(); void onSeek(); void onEos(); signals: void sigAlbum(const QString &album); void sigState(GstState st); void sigEos(); private: GstElement *pipeline; QPushButton *playBt; QPushButton *pauseBt; QPushButton *stopBt; QWidget *videoWindow; QSlider *slider; QHBoxLayout *buttonLayout; QVBoxLayout *playerLayout; QTimer *timer; GstState state; gint64 totalDuration; }; #endif
qtoverlay.cpp
#include <gst/video/videooverlay.h> #include <QApplication> #include "qtoverlay.h" PlayerWindow::PlayerWindow(GstElement *p) :pipeline(p) ,state(GST_STATE_NULL) ,totalDuration(GST_CLOCK_TIME_NONE) { playBt = new QPushButton("Play"); pauseBt = new QPushButton("Pause"); stopBt = new QPushButton("Stop"); videoWindow = new QWidget(); slider = new QSlider(Qt::Horizontal); timer = new QTimer(); connect(playBt, SIGNAL(clicked()), this, SLOT(onPlayClicked())); connect(pauseBt, SIGNAL(clicked()), this, SLOT(onPauseClicked())); connect(stopBt, SIGNAL(clicked()), this, SLOT(onStopClicked())); connect(slider, SIGNAL(sliderReleased()), this, SLOT(onSeek())); buttonLayout = new QHBoxLayout; buttonLayout->addWidget(playBt); buttonLayout->addWidget(pauseBt); buttonLayout->addWidget(stopBt); buttonLayout->addWidget(slider); playerLayout = new QVBoxLayout; playerLayout->addWidget(videoWindow); playerLayout->addLayout(buttonLayout); this->setLayout(playerLayout); connect(timer, SIGNAL(timeout()), this, SLOT(refreshSlider())); connect(this, SIGNAL(sigAlbum(QString)), this, SLOT(onAlbumAvaiable(QString))); connect(this, SIGNAL(sigState(GstState)), this, SLOT(onState(GstState))); connect(this, SIGNAL(sigEos()), this, SLOT(onEos())); } WId PlayerWindow::getVideoWId() const { return videoWindow->winId(); } void PlayerWindow::onPlayClicked() { GstState st = GST_STATE_NULL; gst_element_get_state (pipeline, &st, NULL, GST_CLOCK_TIME_NONE); if (st < GST_STATE_PAUSED) { // Pipeline stopped, we need set overlay again GstElement *vsink = gst_element_factory_make ("ximagesink", "vsink"); g_object_set(GST_OBJECT(pipeline), "video-sink", vsink, NULL); WId xwinid = getVideoWId(); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), xwinid); } gst_element_set_state (pipeline, GST_STATE_PLAYING); } void PlayerWindow::onPauseClicked() { gst_element_set_state (pipeline, GST_STATE_PAUSED); } void PlayerWindow::onStopClicked() { gst_element_set_state (pipeline, GST_STATE_NULL); } void PlayerWindow::onAlbumAvaiable(const QString &album) { setWindowTitle(album); } void PlayerWindow::onState(GstState st) { if (state != st) { state = st; if (state == GST_STATE_PLAYING){ timer->start(1000); } if (state < GST_STATE_PAUSED){ timer->stop(); } } } void PlayerWindow::refreshSlider() { gint64 current = GST_CLOCK_TIME_NONE; if (state == GST_STATE_PLAYING) { if (!GST_CLOCK_TIME_IS_VALID(totalDuration)) { if (gst_element_query_duration (pipeline, GST_FORMAT_TIME, &totalDuration)) { slider->setRange(0, totalDuration/GST_SECOND); } } if (gst_element_query_position (pipeline, GST_FORMAT_TIME, ¤t)) { g_print("%ld / %ld\n", current/GST_SECOND, totalDuration/GST_SECOND); slider->setValue(current/GST_SECOND); } } } void PlayerWindow::onSeek() { gint64 pos = slider->sliderPosition(); g_print("seek: %ld\n", pos); gst_element_seek_simple (pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH , pos * GST_SECOND); } void PlayerWindow::onEos() { gst_element_set_state (pipeline, GST_STATE_NULL); } gboolean PlayerWindow::postGstMessage(GstBus * bus, GstMessage * message, gpointer user_data) { PlayerWindow *pw = NULL; if (user_data) { pw = reinterpret_cast<PlayerWindow*>(user_data); } switch (GST_MESSAGE_TYPE(message)) { case GST_MESSAGE_STATE_CHANGED: { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (message, &old_state, &new_state, &pending_state); pw->sigState(new_state); break; } case GST_MESSAGE_TAG: { GstTagList *tags = NULL; gst_message_parse_tag(message, &tags); gchar *album= NULL; if (gst_tag_list_get_string(tags, GST_TAG_ALBUM, &album)) { pw->sigAlbum(album); g_free(album); } gst_tag_list_unref(tags); break; } case GST_MESSAGE_EOS: { pw->sigEos(); break; } default: break; } return TRUE; } int main(int argc, char *argv[]) { gst_init (&argc, &argv); QApplication app(argc, argv); app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ())); // prepare the pipeline GstElement *pipeline = gst_parse_launch ("playbin uri=file:///home/john/video/sintel_trailer-480p.webm", NULL); // prepare the ui PlayerWindow *window = new PlayerWindow(pipeline); window->resize(900, 600); window->show(); // seg window id to gstreamer GstElement *vsink = gst_element_factory_make ("ximagesink", "vsink"); WId xwinid = window->getVideoWId(); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), xwinid); g_object_set(GST_OBJECT(pipeline), "video-sink", vsink, NULL); // connect to interesting signals GstBus *bus = gst_element_get_bus(pipeline); gst_bus_add_watch(bus, &PlayerWindow::postGstMessage, window); gst_object_unref(bus); // run the pipeline GstStateChangeReturn sret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (sret == GST_STATE_CHANGE_FAILURE) { gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); // Exit application QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit())); } int ret = app.exec(); window->hide(); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return ret; }
qtoverlay.pro
QT += core gui widgets TARGET = qtoverlay INCLUDEPATH += /usr/include/glib-2.0 INCLUDEPATH += /usr/lib/x86_64-linux-gnu/glib-2.0/include INCLUDEPATH += /usr/include/gstreamer-1.0 INCLUDEPATH += /usr/lib/x86_64-linux-gnu/gstreamer-1.0/include LIBS += -lgstreamer-1.0 -lgobject-2.0 -lglib-2.0 -lgstvideo-1.0 SOURCES += qtoverlay.cpp HEADERS += qtoverlay.h
分别保存以上内容到各个文件,执行下列命令即可得到可执行程序。如果找不到头文件及库文件,需要根据实际路径修改qtoverlay.pro文件中的内容。
qmake -o Makefile qtoverlay.pro make
源码分析
// prepare the pipeline GstElement *pipeline = gst_parse_launch ("playbin uri=file:///home/jleng/video/sintel_trailer-480p.webm", NULL); // prepare the ui PlayerWindow *window = new PlayerWindow(pipeline); window->resize(900, 600); window->show();
在main函数中对GStreamer进行初始化及创建了QT的应用对象后,构造了Pipline,构造GUI窗口对象。在PlayerWindow的构造函数中初始化按钮及窗口,同时创建定时刷新进度条的Timer。
// seg window id to gstreamer GstElement *vsink = gst_element_factory_make ("ximagesink", "vsink"); WId xwinid = window->getVideoWId(); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), xwinid); g_object_set(GST_OBJECT(pipeline), "video-sink", vsink, NULL); ... gst_bus_add_watch(bus, &PlayerWindow::postGstMessage, window); ... GstStateChangeReturn sret = gst_element_set_state (pipeline, GST_STATE_PLAYING); ... int ret = app.exec(); ...
接着我们单独创建了ximagesink用于视频渲染,同时我们将Qt创建的视频窗口ID设置给GStreamer,让GStreamer得到渲染的窗口ID,接着使用g_object_set()将自定义的Sink通过“video-sink”属性设置到playbin中。
同时,我们设置了GStreamer的消息处理函数,所有的消息都会在postGstMessage函数中被转发。为了后续调用GUI对象中的接口,我们需要将GUI窗口指针作为user-data,在postGstMessage中再转换为GUI对象。
接着设置Pipeline的状态为PLAYING开始播放。
最后调用GUI框架的事件循环,exec()会一直执行,直到关闭窗口。
由于GStreamer的GstBus会默认使用GLib的主循环及事件处理机制,所以必须要保证GLi默认的MainLoop在某个线程中运行。在本例中,Qt在Linux下会自动使用GLib的主循环,所以我们无需额外进行处理。
gboolean PlayerWindow::postGstMessage(GstBus * bus, GstMessage * message, gpointer user_data) { PlayerWindow *pw = NULL; if (user_data) { pw = reinterpret_cast<PlayerWindow*>(user_data); } switch (GST_MESSAGE_TYPE(message)) { case GST_MESSAGE_STATE_CHANGED: { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (message, &old_state, &new_state, &pending_state); pw->sigState(new_state); break; } case GST_MESSAGE_TAG: { GstTagList *tags = NULL; gst_message_parse_tag(message, &tags); gchar *album= NULL; if (gst_tag_list_get_string(tags, GST_TAG_ALBUM, &album)) { pw->sigAlbum(album); g_free(album); } gst_tag_list_unref(tags); break; } case GST_MESSAGE_EOS: { pw->sigEos(); break; } default: break; } return TRUE; }
我们在转换后GUI对象后,再根据消息类型进行处理。在postGstMessage中我们没有直接更新GUI,因为GStreamer的Bus处理线程与GUI主线程可能为不同线程,直接更新GUI会出错或无效。因此利用Qt的signal-slot机制在相应的槽函数中就行GUI信息的更新。这里只处理了3种消息STATE_CHANGED(状态变化),TAG(媒体元数据及编码信息),EOS(播放结束),GStreamer所支持的消息可查看官方文档GstMessage。
void PlayerWindow::onPlayClicked() { GstState st = GST_STATE_NULL; gst_element_get_state (pipeline, &st, NULL, GST_CLOCK_TIME_NONE); if (st < GST_STATE_PAUSED) { // Pipeline stopped, we need set overlay again GstElement *vsink = gst_element_factory_make ("ximagesink", "vsink"); g_object_set(GST_OBJECT(pipeline), "video-sink", vsink, NULL); WId xwinid = getVideoWId(); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), xwinid); } gst_element_set_state (pipeline, GST_STATE_PLAYING); }
当点击Play按钮时,onPlayClicked函数会被调用,我们在此直接调用GStreamer的接口设置Pipeline的状态。当播放结束或点击Stop时,GStreamer会在状态切换到NULL时释放所有资源,所以我们在此需要重新设置playbin的vido-sink,并指定视频输出窗口。
Pause,Stop的处理类似,直接调用gst_element_set_state ()将Pipeline设置为相应状态。
void PlayerWindow::refreshSlider() { gint64 current = GST_CLOCK_TIME_NONE; if (state == GST_STATE_PLAYING) { if (!GST_CLOCK_TIME_IS_VALID(totalDuration)) { if (gst_element_query_duration (pipeline, GST_FORMAT_TIME, &totalDuration)) { slider->setRange(0, totalDuration/GST_SECOND); } } if (gst_element_query_position (pipeline, GST_FORMAT_TIME, ¤t)) { g_print("%ld / %ld\n", current/GST_SECOND, totalDuration/GST_SECOND); slider->setValue(current/GST_SECOND); } } } void PlayerWindow::onSeek() { gint64 pos = slider->sliderPosition(); g_print("seek: %ld\n", pos); gst_element_seek_simple (pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH , pos * GST_SECOND); }
我们在构造函数中创建了Timer用于每秒刷新进度条,在refreshSlider被调用时,我们通过gst_element_query_duration() 和gst_element_query_position ()得到文件的总时间和当前时间,并刷新进度条。由于GStreamer返回时间单位为纳秒,所以我们需要通过GST_SECOND将其转换为秒用于时间显示。
我们同样处理了用户的Seek操作,在拉动进度条到某个位置时,获取Seek的位置,调用gst_element_seek_simple ()跳转到指定位置。我们不用关心对GStreamer的调用是处于哪个线程,GStreamer内部会自动进行处理。
总结
通过本文,我们学习到:
- 如何使用gst_video_overlay_set_window_handle ()将GUI的窗口句柄传递给GStremaer。
- 如何使用信号槽传递消息到GUI主线程。
- 如何使用Timer定时刷新GUI。
引用
https://gstreamer.freedesktop.org/documentation/video/gstvideooverlay.html?gi-language=c
https://gstreamer.freedesktop.org/documentation/tutorials/basic/toolkit-integration.html?gi-language=c
https://doc.qt.io/qt-5/qmake-manual.html
posted on 2019-10-31 16:49 qq575654643 阅读(550) 评论(0) 编辑 收藏 举报