以下程序:获取一个媒体文件中流的数目以及流的信息,并且可以切换音轨。
可以理解为:使用playbin播放媒体,然后从playbin中获取以上信息
#include "pch.h"
#include<string.h>
#include<stdio.h>
#include <gst/gst.h>
typedef struct _CustomData{
GstElement *playbin;
gint n_video;
gint n_audio;
gint n_text;
gint current_video;
gint current_audio;
gint current_text;
GMainLoop *main_loop;
}CustomData;
typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), //we want video output
GST_PALY_FLAG_AUDIO = (1 << 1),
GST_PALY_FLAG_TEXT = (1 << 2)
}GstPlayFlags;
static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data);
static gboolean handle_keyboard(GIOChannel *source, GIOCondition cond, CustomData *data);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstStateChangeReturn ret;
gint flags;
GIOChannel *io_stdin;
gst_init(&argc, &argv);
data.playbin = gst_element_factory_make("playbin", "playbin");
if (!data.playbin) {
g_printerr("could not create playbin\n");
return -1;
}
//file:///C:\Users\lenovo\Desktop\testVideo\[PGS][Tintin-004][DTS-AC3][5PGS].mkv
g_object_set(data.playbin, "uri", "file:///C:/Users/lenovo/Desktop/testVideo/[PGS][Tintin-004][DTS-AC3][5PGS].mkv", NULL);
g_object_get(data.playbin, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIDEO | GST_PALY_FLAG_AUDIO;
flags &= ~GST_PALY_FLAG_TEXT;
g_object_set(data.playbin, "flags", flags, NULL);
//connection-speed设置网络的最大连接速度
g_object_set(data.playbin, "connection-speed", 56, NULL);
// 我们逐个的设置这些属性,但我们也可以仅调用g_object_set()一次,来设置uri,flags,connect-speed
bus = gst_element_get_bus(data.playbin);
gst_bus_add_watch(bus, (GstBusFunc)handle_message, &data);
//这几行连接了一个标准输入(键盘)和一个回调函数。这里使用的机制是GLib的,并非是基于GStreamer的.
#ifdef G_OS_WIN32
io_stdin = g_io_channel_win32_new_fd(_fileno(stdin));
#else
io_stdin = g_io_channel_unix_new(fileno(stdin));
#endif
g_io_add_watch(io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
ret = gst_element_set_state(data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("could not set sate to playing\n");
gst_object_unref(data.playbin);
return -1;
}
//为了交互,不再手动轮询gstreamer总线,我们创建main_loop,并且使用了g_main_loop_run函数让它运行起来。
//,直到调用g_main_loop_quit()才被返回
data.main_loop = g_main_loop_new(NULL,false);
g_main_loop_run(data.main_loop);
g_main_loop_unref(data.main_loop);
g_io_channel_unref(io_stdin);
gst_object_unref(bus);
gst_element_set_state(data.playbin, GST_STATE_NULL);
g_object_unref(data.playbin);
return 0;
}
static void analyze_streams(CustomData *data) {
gint i;
GstTagList *tags;
gchar *str;
guint rate;
g_object_get(data->playbin, "n-video", &data->n_video, NULL);
g_object_get(data->playbin, "n-audio", &data->n_audio, NULL);
g_object_get(data->playbin, "n-text", &data->n_text, NULL);
g_print("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
data->n_video, data->n_audio, data->n_text);
g_print("\n");
for (i = 0; i < data->n_video; i++) {
tags = NULL;
//现在,对于每一个流来说,我们需要获得它的元数据。元数据是存在一个GstTagList的结构体里面,
//这个GstTagList通过g_signal_emit_by_name()可以把流里面对应的tag都取出来。
//然后可以用gst_tag_list_get_*这一类函数来访问这些tag,这个例子中用的就是gst_tag_list_get_string()方法。
//playbin定义了2个action信号来获得元数据:get-video-tags,get-audio-tags和get-text-tags。
//在这个例子中我们关注的是GST_TAG_LANGUAGE_CODE这个tag和GST_TAG_ * _CODEC(audio,video和text)。
g_signal_emit_by_name(data->playbin, "get-video-tags", i, &tags);
if (tags) {
g_print("video stream %d:\n", i);
gst_tag_list_get_string(tags, GST_TAG_VIDEO_CODEC, &str);
g_print("codec:%s\n", str ? str : "unknown");
g_free(str);
gst_tag_list_free(tags);
}
}
g_print("\n");
for (i = 0; i < data->n_audio; i++) {
tags = NULL;
g_signal_emit_by_name(data->playbin, "get-audio-tags", i, &tags);
if (tags) {
g_print("audio stream %d:\n", i);
if (gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &str)) {
g_print("codec:%s\n", str);
g_free(str);
}
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print(" language: %s\n", str);
g_free(str);
}
if (gst_tag_list_get_uint(tags, GST_TAG_BITRATE, &rate)) {
g_print(" bitrate: %d\n", rate);
}
gst_tag_list_free(tags);
}
}
g_print("\n");
for (i = 0; i < data->n_text; i++) {
tags = NULL;
/* Retrieve the stream's subtitle tags */
g_signal_emit_by_name(data->playbin, "get-text-tags", i, &tags);
if (tags) {
g_print("subtitle stream %d:\n", i);
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print(" language: %s\n", str);
g_free(str);
}
gst_tag_list_free(tags);
}
}
g_object_get(data->playbin, "current-video", &data->current_video, NULL);
g_object_get(data->playbin, "current-audio", &data->current_audio, NULL);
g_object_get(data->playbin, "current-text", &data->current_text, NULL);
g_print("\n");
g_print("Currently playing video stream %d, audio stream %d and text stream %d\n",
data->current_video, data->current_audio, data->current_text);
g_print("Type any number and hit ENTER to select a different audio stream\n");
}
static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error recived from element %s:%s\n", GST_OBJECT_NAME(msg->src), err->message);
g_printerr("debugging information:%s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
break;
case GST_MESSAGE_EOS:
g_print("end of stream reched\n");
g_main_loop_quit(data->main_loop);
break;
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->playbin)) {
if (new_state == GST_STATE_PLAYING) {
analyze_streams(data);
}
}
break;
}
}
return TRUE;
}
static gboolean handle_keyboard(GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line(source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
int index = g_ascii_strtoull(str, NULL, 0);
if (index < 0 || index >= data->n_audio) {
g_printerr("Index out of bounds\n");
}
else {
/* If the input was a valid audio stream index, set the current audio stream */
g_print("Setting current audio stream to %d\n", index);
g_object_set(data->playbin, "current-audio", index, NULL);
}
}
g_free(str);
return TRUE;
}
代码分解:
1、定义了一个结构体,包括playbin组件,音轨、视频、字幕的数目等,以及一个main_loop。把这些信息作为一个结构体来定义,是为了方便访问,比如作为回调函数的参数来传递就很方便。
typedef struct _CustomData{
GstElement *playbin;
gint n_video;
gint n_audio;
gint n_text;
gint current_video;
gint current_audio;
gint current_text;
GMainLoop *main_loop;
}CustomData;
2、定义一个枚举类型,包括playbin的一些标志(flag),1<<0 ,就代表1的二进制数左移0位。没太理解这里定义枚举类型的意义?
typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), //we want video output
GST_PALY_FLAG_AUDIO = (1 << 1),
GST_PALY_FLAG_TEXT = (1 << 2)
}GstPlayFlags;
3、mian函数,首先初始化,然后创建playbin
CustomData data;
GstBus *bus;
GstStateChangeReturn ret;
gint flags;
GIOChannel *io_stdin;
gst_init(&argc, &argv);
data.playbin = gst_element_factory_make("playbin", "playbin");
if (!data.playbin) {
g_printerr("could not create playbin\n");
return -1;
}
4、设置playbin中的一些属性:uri 、flags、connection-speed。
uri就是媒体地址啦,可以是网络地址,也可以是本地地址。
flags :还没有弄清楚?????
GST_PLAY_FLAG_VIDEO | 允许视频渲染,如果这个标志没有设置,则没有视频输出 |
GST_PLAY_FLAG_AUDIO | 允许音频渲染,如果这个标志没有设置,则没有音频输出 |
GST_PLAY_FLAG_TEXT | 允许字幕渲染,如果这个标志没有设置,则没有字幕显示 |
GST_PLAY_FLAG_VIS | 允许在没有视频流时进行可视化渲染,后面教程会讲到 |
GST_PLAY_FLAG_DOWNLOAD | 参见《GStreamer基础教程12——流》以及后续教程 |
GST_PLAY_FLAG_BUFFERING | 参见《GStreamer基础教程12——流》以及后续教程 |
GST_PLAY_FLAG_DEINTERLACE | 如果视频是隔行扫描的,那么在显示时改成逐行扫描 |
connect-speed:设置网络的最大连接速度,文档说的是 为了防止服务器有多个版本的媒体文件,playbin会选择合适的。这里也没有很清楚。
g_object_set(data.playbin, "uri", "file:///C:/Users/lenovo/Desktop/testVideo/[PGS][Tintin-004][DTS-AC3][5PGS].mkv", NULL);
g_object_get(data.playbin, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIDEO | GST_PALY_FLAG_AUDIO;
flags &= ~GST_PALY_FLAG_TEXT;
g_object_set(data.playbin, "flags", flags, NULL);
//connection-speed设置网络的最大连接速度
g_object_set(data.playbin, "connection-speed", 56, NULL);
当然也可以使用一个g_object_set函数来设置所有的属性:
g_object_set(data->playbin, "uri", "file:///c:/filename", "flags", flag, "connect_speed", 56, NULL)
5、监听总线,设置回调函数handle_message。
bus = gst_element_get_bus(data.playbin);
gst_bus_add_watch(bus, (GstBusFunc)handle_message, &data);
6、看一下回调函数handle_message。
有三个参数:bus、msg、data。从main函数中传回来就是gst_bus_add_watch中的bus,?,data。这个msg是从哪里来的,一直没搞清楚。
使用switch判断msg,如果是GST_MESSAGE_ERROR或者GST_MESSAGE_EOS就调用g_main_loop_quit()结束loop。
如果是GST_MESSAGE_STATE_CHANGED,也就是状态改变到playing,相当于初次打开媒体文件,并且开始播放。就调用analyze_streams()来分析流信息。
analyze_stream()接下来分析。g_main_loop_quit()接下来也会解释。
static gboolean handle_message(GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error recived from element %s:%s\n", GST_OBJECT_NAME(msg->src), err->message);
g_printerr("debugging information:%s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
g_main_loop_quit(data->main_loop);
break;
case GST_MESSAGE_EOS:
g_print("end of stream reched\n");
g_main_loop_quit(data->main_loop);
break;
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->playbin)) {
if (new_state == GST_STATE_PLAYING) {
analyze_streams(data);
}
}
break;
}
}
return TRUE;
}
7、这几行连接了一个标准输入(键盘)和一个回调函数。这里使用的机制是GLib的,并非是基于GStreamer的.
最后是设置了回调函数handle_keyboard。
主要是这样的:在播放过程中随时等待用户在命令行输入内容。handle_keyboard的作用就是在获取到输入以后做出判断并执行相应操作。
#ifdef G_OS_WIN32
io_stdin = g_io_channel_win32_new_fd(_fileno(stdin));
#else
io_stdin = g_io_channel_unix_new(fileno(stdin));
#endif
g_io_add_watch(io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
8、将状态设置为playing,并且设置了main_loop。使用g_main_loop_rund的意思就是一直循环着,直到遇到g_main_loop_quit()才结束。
ret = gst_element_set_state(data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("could not set sate to playing\n");
gst_object_unref(data.playbin);
return -1;
}
//为了交互,不再手动轮询gstreamer总线,我们创建main_loop,并且使用了g_main_loop_run函数让它运行起来。
//,直到调用g_main_loop_quit()才被返回
data.main_loop = g_main_loop_new(NULL,false);
g_main_loop_run(data.main_loop);
9、最后就是释放资源
g_main_loop_unref(data.main_loop);
g_io_channel_unref(io_stdin);
gst_object_unref(bus);
gst_element_set_state(data.playbin, GST_STATE_NULL);
g_object_unref(data.playbin);
10、analyze_stream()就是真正获取流信息的地方。
主要是用g_object_get()函数来获取流数目,当前流。
用g_signal_emit_by_name()来获取流的tags,然后用gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)来获取tags中的语言等信息。
static void analyze_streams(CustomData *data) {
gint i;
GstTagList *tags;
gchar *str;
guint rate;
g_object_get(data->playbin, "n-video", &data->n_video, NULL);
g_object_get(data->playbin, "n-audio", &data->n_audio, NULL);
g_object_get(data->playbin, "n-text", &data->n_text, NULL);
g_print("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
data->n_video, data->n_audio, data->n_text);
g_print("\n");
for (i = 0; i < data->n_video; i++) {
tags = NULL;
//现在,对于每一个流来说,我们需要获得它的元数据。元数据是存在一个GstTagList的结构体里面,
//这个GstTagList通过g_signal_emit_by_name()可以把流里面对应的tag都取出来。
//然后可以用gst_tag_list_get_*这一类函数来访问这些tag,这个例子中用的就是gst_tag_list_get_string()方法。
//playbin定义了2个action信号来获得元数据:get-video-tags,get-audio-tags和get-text-tags。
//在这个例子中我们关注的是GST_TAG_LANGUAGE_CODE这个tag和GST_TAG_ * _CODEC(audio,video和text)。
g_signal_emit_by_name(data->playbin, "get-video-tags", i, &tags);
if (tags) {
g_print("video stream %d:\n", i);
gst_tag_list_get_string(tags, GST_TAG_VIDEO_CODEC, &str);
g_print("codec:%s\n", str ? str : "unknown");
g_free(str);
gst_tag_list_free(tags);
}
}
g_print("\n");
for (i = 0; i < data->n_audio; i++) {
tags = NULL;
g_signal_emit_by_name(data->playbin, "get-audio-tags", i, &tags);
if (tags) {
g_print("audio stream %d:\n", i);
if (gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &str)) {
g_print("codec:%s\n", str);
g_free(str);
}
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print(" language: %s\n", str);
g_free(str);
}
if (gst_tag_list_get_uint(tags, GST_TAG_BITRATE, &rate)) {
g_print(" bitrate: %d\n", rate);
}
gst_tag_list_free(tags);
}
}
g_print("\n");
for (i = 0; i < data->n_text; i++) {
tags = NULL;
/* Retrieve the stream's subtitle tags */
g_signal_emit_by_name(data->playbin, "get-text-tags", i, &tags);
if (tags) {
g_print("subtitle stream %d:\n", i);
if (gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print(" language: %s\n", str);
g_free(str);
}
gst_tag_list_free(tags);
}
}
g_object_get(data->playbin, "current-video", &data->current_video, NULL);
g_object_get(data->playbin, "current-audio", &data->current_audio, NULL);
g_object_get(data->playbin, "current-text", &data->current_text, NULL);
g_print("\n");
g_print("Currently playing video stream %d, audio stream %d and text stream %d\n",
data->current_video, data->current_audio, data->current_text);
g_print("Type any number and hit ENTER to select a different audio stream\n");
}