网站建设:上海珍岛网络推广员好做吗
这里写目录标题
- 前言
- 个人的总结
- v4l2src插件的简单使用
前言
因为涉及很多细节的GStreamer官方论坛有详细解链接: GStreamer官网,这里不做说明,以下只是涉及到个人的理解和认知,方便后续的查阅。
个人的总结
1)了解pipeline的使用,简单说就是通道的概念, 因为涉及到数据的复用和拓展, 里面有一个filter(tee插件), 通过tee和后续拓展
的绑定关系复用数据,很像许多Soc编解码芯片中的vpss通道的概念。 数据复用为视频编码通道, 数据jpeg通道以及对数据处理的通道。
2)重点是去了解Pad Templates 中的Availability,这个关系到link的方式,另外关注Element Properties的属性设置。
3)工作使用最多的不是作为播放器,而是将采集数据获取后做后续的图像处理,因此以下是从v4l2src插件中获取到数据的方式:
4)GStreamer复用的拓展性导致其复杂性, 还涉及到glib的学习,因此学习成本还是比较高的, 但现主流的流媒体芯片或边缘化芯片大部分都用到GStreamer,
比如NVIDIA和瑞芯微,特别是瑞芯微直接将GStreamer作为框架,增加自己的硬编解码。
5) GStreamer的许多控件非常适合拓展, 比如推流rtp/rtsp/rtmp/webrtc开发非常快,但走API这套的调试以及对API的理解和学习周期比较长。
这里只花了3天时间学习, 做一个记录。
v4l2src插件的简单使用
/*** @brief 实现从v4l2src中拿到数据,方便后续数据的操作,比如用作重新硬编解码用appsink关键属性: emit-signals默认是关闭emit-signals : Emit new-preroll and new-sample signalsflags: readable, writableBoolean. Default: falseElement Signals:"eos" : void user_function (GstElement* object,gpointer user_data);"new-preroll" : GstFlowReturn user_function (GstElement* object,gpointer user_data);"new-sample" : GstFlowReturn user_function (GstElement* object,gpointer user_data);Element Actions:"pull-preroll" : GstSample * user_function (GstElement* object);"pull-sample" : GstSample * user_function (GstElement* object);"try-pull-preroll" : GstSample * user_function (GstElement* object,guint64 arg0);"try-pull-sample" : GstSample * user_function (GstElement* object,guint64 arg0);*/
#include <unistd.h>
#include <gst/gst.h>
#include <pthread.h>#include <time.h>
#include <sys/time.h>#define TEST_MODULE//#define VIDEO_X_RAWtypedef struct ExtraCtrl_
{int s32brightness;int s32Contrast;int s32Saturation;int s32Hue;int s32Sharpness;}Extra_Ctrl_S;/*
* @brief 设置ISP图像质量,主要是图像增强类@prams[in] source - SRC plugin extraCtrl - 外部的图像喜好@return null@test:1. 通过inspect查看插件支持的图像属性有哪些
*/
void setCameraQuality(GstElement *source, const Extra_Ctrl_S *extraCtrl)
{g_object_set(source, "do-timestamp", TRUE, NULL);
#if 0GstStructure* extraCtrls = gst_structure_new("logitech_controls","brightness", extraCtrl->s32brightness,"contrast", G_TYPE_INT, extraCtrl->s32Contrast,"saturation", G_TYPE_INT, extraCtrl->s32Saturation,"hue", G_TYPE_INT, extraCtrl->s32Hue,"sharpness", G_TYPE_INT, extraCtrl->s32Sharpness,// "focus_auto", G_TYPE_BOOLEAN, FALSE,// "white_balance_temperature_auto", G_TYPE_BOOLEAN, FALSE,// "white_balance_temperature", G_TYPE_INT, 3500,NULL);
#else /*gst-1.0 无sharpness默认值 brightness 0, hue 0 contrast 36 saturation:54*/int brightness = 1, hue = 1, contrast = 1, saturation = 1;g_object_get(source , "brightness", &brightness, "hue", &hue, "contrast", &contrast, "saturation", &saturation, NULL);g_print("get: brightness:%d, hue:%d, contrast:%d, saturation:%d\n", brightness, hue, contrast, saturation);#if 1int setBrightness = 0;GstStructure* extraCtrls = gst_structure_new("logitech_controls","brightness", G_TYPE_INT, brightness ,"hue", G_TYPE_INT, hue,"contrast", G_TYPE_INT, contrast,"saturation", G_TYPE_INT, saturation,//160,NULL);g_object_set(source, "extra-controls" , extraCtrls, NULL);#endif
#endif g_object_get(source , "brightness", &brightness, "hue", &hue, "contrast", &contrast, "saturation", &saturation, NULL);g_print("get: brightness:%d, hue:%d, contrast:%d, saturation:%d\n", brightness, hue, contrast, saturation);
}TEST_MODULE void *ispCtlProc(void *arg)
{while(1){GstElement * element = (GstElement*)arg;if(element){static int rec = 0;g_print("rec:%d\n", rec++);Extra_Ctrl_S s;memset(&s, 0, sizeof(s));static int count = 0;s.s32brightness = count++ % 255;s.s32Contrast = 128;s.s32Saturation = 128;s.s32Hue = 128;s.s32Sharpness = 128;setCameraQuality(element, &s);}usleep(1000*100);}return NULL;
}/*@brief 原始数据回调
*/
GstFlowReturn cb_appsink_new_sample(GstElement* appsink, gpointer user_data){// LOG_INFO_MSG ("cb_appsink_new_sample called, user data: %p", user_data);
#if 0SinkPipeline* sp = reinterpret_cast<SinkPipeline*> (user_data);
#endifGstSample* sample = NULL;GstBuffer* buffer = NULL;GstMapInfo map;const GstStructure* info = NULL;GstCaps* caps = NULL;GstFlowReturn ret = GST_FLOW_OK;int sample_width = 0;int sample_height = 0;// equals to gst_app_sink_pull_sample (GST_APP_SINK_CAST (appsink), sample);g_signal_emit_by_name (appsink, "pull-sample", &sample, &ret);if (ret != GST_FLOW_OK) {g_print ("can't pull GstSample.\n");return ret;}if (sample) {buffer = gst_sample_get_buffer (sample);if ( buffer == NULL ) {g_print ("get buffer is null\n");goto exit;}gboolean mapret= gst_buffer_map (buffer, &map, GST_MAP_READ);if(mapret == FALSE){goto exit;}/**/caps = gst_sample_get_caps (sample);if ( caps == NULL ) {g_print ("get caps is null\n");goto exit;}info = gst_caps_get_structure (caps, 0);if ( info == NULL ) {g_print ("get info is null\n");goto exit;}// -------- Read frame and convert to opencv format --------// convert gstreamer data to OpenCV Mat, you could actually// resolve height / width from caps...gst_structure_get_int (info, "width", &sample_width);gst_structure_get_int (info, "height", &sample_height);g_print("format:%s, width:%d, height:%d\n", gst_structure_get_string(info, "format"), sample_width, sample_height);if(NULL == map.data){g_print("appsink buffer data empty\n");goto exit;} static struct timeval s, e;static int flag = 1;if(flag){gettimeofday(&s, 0); e = s;flag = 0;}gettimeofday(&e, 0);static int frameNo = 0;frameNo++;unsigned int timespan = (e.tv_sec - s.tv_sec )*1000*1000 + e.tv_usec - s.tv_usec;if(timespan >= 1000000){g_print("frame siz:%d, max_siz:%d, %02d FPS\n", map.size, map.maxsize, frameNo);frameNo = 0;gettimeofday(&s, 0);}//FILE *fp =
#if 0// customized user action{// init a cv::Mat with gst buffer address: deep copy// sometime you may got a empty bufferif (map.data == NULL) {LOG_ERROR_MSG("appsink buffer data empty\n");return GST_FLOW_OK;}cv::Mat img (sample_height, sample_width, CV_8UC3,(unsigned char*)map.data, cv::Mat::AUTO_STEP);img = img.clone();// redirection outside operation: for decoupling useif (sp->m_putDataFunc) {sp->m_putDataFunc(std::make_shared<cv::Mat> (img),sp->m_putDataArgs);} else {goto exit;}}#endifexit:if (buffer) {gst_buffer_unmap (buffer, &map);}if (sample) {gst_sample_unref (sample);}return GST_FLOW_OK;}}
/*@brief 创造线程设置图像数据, TEST 模块
*/
TEST_MODULE int ispInit(GstElement *source)
{int ret = 0;pthread_t ispControlThread;pthread_create(&ispControlThread, 0, ispCtlProc, (void *)source);
}int main(int argc, char *argv[])
{GstElement *pipeline, *source;GstElement *converter,*appsink; GstBus *bus;GstMessage *msg;/* Initialize GStreamer */gst_init (&argc, &argv);/* Create the elements *//*src(always)*/source = gst_element_factory_make ("v4l2src", "v4l2src");converter= gst_element_factory_make("videoconvert","converter01");//sinkappsink = gst_element_factory_make("appsink", "appsink");/* Create the empty pipeline */pipeline = gst_pipeline_new ("test-pipeline");if ( !pipeline || !source || !appsink ) {g_printerr ("Not all elements could be created.\n");return -1;}// set appsink
g_object_set(appsink, "emit-signals", TRUE, NULL);g_signal_connect (appsink, "new-sample", G_CALLBACK (cb_appsink_new_sample), NULL);/* Link all elements that can be automatically linked because they have "Always" pads */gst_bin_add_many (GST_BIN (pipeline), source, converter, appsink, NULL);// TEST_MODULE ispInit(source); /*@note:1. 需要在gst_bin_add_many之后调用先需要知道摄像头支持的Pixel Format,通过命令可查,v4l2-ctl --list-formats-ext --device /dev/video0可以查看支持的格式只有YUYV和MJPG,另外可以查看到具体的分辨率和帧率
*/
#if defined(VIDEO_X_RAW)GstCaps *caps = gst_caps_new_simple ("video/x-raw", // "format", G_TYPE_STRING, "YV12", //<<< IF THIS IS SET TO ARGB (THE FORMAT I WANT IT FAILS ON LINKING) // "format", G_TYPE_STRING, "NV21","framerate", GST_TYPE_FRACTION, 25, 1, // "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, "width", G_TYPE_INT, 320, "height", G_TYPE_INT, 240, NULL);
#elseGstCaps *caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 2,"systemstream", G_TYPE_BOOLEAN, TRUE,NULL); #endif gst_element_link_filtered(source, converter, caps);gst_caps_unref(caps);/*这里做了转换,需要逐一连接*/gst_element_link(source, converter);gst_element_link(converter, appsink);/* Start playing the pipeline */gst_element_set_state (pipeline, GST_STATE_PLAYING);/* Wait until error or EOS */bus = gst_element_get_bus (pipeline);msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);if(msg != NULL)
{GError *err;gchar *debug_info;switch (GST_MESSAGE_TYPE (msg)) {case GST_MESSAGE_ERROR:gst_message_parse_error (msg, &err, &debug_info);g_printerr ("Error received from element %s: %s\n",GST_OBJECT_NAME (msg->src), err->message);g_printerr ("Debugging information: %s\n",debug_info ? debug_info : "none");g_clear_error (&err);g_free (debug_info);break;case GST_MESSAGE_EOS:g_print ("End-Of-Stream reached.\n");break;default:/* We should not reach here because we only asked for ERRORs and EOS */g_printerr ("Unexpected message received.\n");break;}gst_message_unref (msg);
}else
{g_printerr("error.");
}/* Release the request pads from the Tee, and unref them *//* Free resources */if (msg != NULL)gst_message_unref (msg);gst_object_unref (bus);gst_element_set_state (pipeline, GST_STATE_NULL);gst_object_unref (pipeline);return 0;
}
编译:
cmake_minimum_required(VERSION 3.0)
project(test_project)find_package(PkgConfig REQUIRED)
pkg_search_module(GST1 REQUIRED gstreamer-1.0)#查找到gstreamer-1.0 并用GST1代表,GST1自己定义add_executable(a.out v4l2src_appsink.c)target_include_directories(a.out PRIVATE ${GST1_INCLUDE_DIRS})
target_link_libraries(a.out ${GST1_LIBRARIES} -lpthread)message("GST1_LIBRARIES
${GST1_LIBRARIES}
")
运行结果:
format:YUY2, width:1920, height:1080
format:YUY2, width:1920, height:1080
format:YUY2, width:1920, height:1080
format:YUY2, width:1920, height:1080
format:YUY2, width:1920, height:1080
format:YUY2, width:1920, height:1080
format:YUY2, width:1920, height:1080
frame siz:4147200, max_siz:4147200, 07 FPS
format:YUY2, width:1920, height:1080
format:YUY2, width:1920, height:1080
format:YUY2, width:1920, height:1080
format:YUY2, width:1920, height:1080