This forum uses cookies
This forum makes use of cookies to store your login information if you are registered, and your last visit if you are not. Cookies are small text documents stored on your computer; the cookies set by this forum can only be used on this website and pose no security risk. Cookies on this forum also track the specific topics you have read and when you last read them. Please confirm whether you accept or reject these cookies being set.

A cookie will be stored in your browser regardless of choice to prevent you being asked this question again. You will be able to change your cookie settings at any time using the link in the footer.

Thread Rating:
  • 0 Vote(s) - 0 Average
  • 1
  • 2
  • 3
  • 4
  • 5
Yellowish video from second session
#1
Bug 
I have a very simple and straightforward C++ app that captures and dumps encoded frames. The issue is on the same run, for the first session the encoded video file is good and there is no color problem. But from the second session, the dumped encoded file has a color issue and it looks yellowish.

If we stop the app and run again, the pattern is the same, the first video is good, the next videos have the color issue.

The code is given below

Code:
#include <signal.h>
#include <stdio.h>

#include "gstreamer-1.0/gst/gst.h"
#include "gstreamer-1.0/gst/app/app.h"

GstElement *pipeline;
GstBus     *bus;
GMainLoop  *loop;

GstElement *cameraSource;
GstElement *camCapsFilter;
GstElement *encoder;
GstElement *appsink;
FILE* fpVideo = NULL;

bool init();
bool release();
bool start();
bool stop();

GstFlowReturn new_frame_callback(GstAppSink *appsink, gpointer data);
bool GstMessageParser(GstBus* bus, GstMessage* msg, gpointer udata);

void stopHandler(int sigCode)
{
 printf("Program received exit command, code %d\n", sigCode);
 stop();
}

int main()
{
   signal(SIGINT, stopHandler);
   signal(SIGTSTP, stopHandler);

   int i = 0;
   char filename[32];

   while(i++ < 3) {

     sprintf(filename, "video_%d.h264", i);

     fpVideo = fopen(filename, "w+");
     if(fpVideo == NULL) {
         printf("Failed to open file, error %d", errno);
     }

     init();
     start();
     release();

     fclose(fpVideo);
   }

   return 0;
}

bool init()
{
   //Init pipeline
   gst_init(NULL, NULL);

   pipeline = gst_pipeline_new("VideoPipeline");
   cameraSource = gst_element_factory_make("rkcamsrc", NULL);
   camCapsFilter = gst_element_factory_make("capsfilter", NULL);
   encoder = gst_element_factory_make("mpph264enc", NULL);
   appsink = gst_element_factory_make("appsink", NULL);

   //Config
   g_object_set(G_OBJECT(cameraSource), "device", "/dev/video0", NULL);
   g_object_set(G_OBJECT(cameraSource), "io-mode", 4, NULL);
   g_object_set(G_OBJECT(cameraSource), "isp-mode", 1, NULL);
   g_object_set(G_OBJECT(cameraSource), "tuning-xml-path", "/etc/cam_iq/IMX219.xml", NULL);

   g_object_set(G_OBJECT(appsink), "name", "sink", NULL);

   g_object_set(G_OBJECT(camCapsFilter), "caps", gst_caps_from_string("video/x-raw,format=NV12,width=640,height=480,framerate=30/1"), NULL);

   gst_bin_add_many(GST_BIN(pipeline), cameraSource, camCapsFilter, encoder, appsink, NULL);

   GstElement *sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");

   gst_app_sink_set_emit_signals((GstAppSink *)sink, true);
   gst_app_sink_set_max_buffers((GstAppSink *)sink, 1);

   GstAppSinkCallbacks callbacks = {NULL, NULL, new_frame_callback};

   gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, NULL, NULL);

   //Link
   gst_element_link(cameraSource, camCapsFilter);
   gst_element_link(camCapsFilter, encoder);
   gst_element_link(encoder, appsink);
}

bool release()
{
   g_main_loop_unref(loop);
   gst_object_unref(bus);
   gst_element_set_state(pipeline, GST_STATE_NULL);
   gst_object_unref(pipeline);
}

bool start()
{
   //Start the capture
   gst_element_set_state(pipeline, GST_STATE_READY);
   gst_element_set_state(pipeline, GST_STATE_PLAYING);
   bus = gst_element_get_bus(pipeline);

   gst_bus_add_watch(bus, (GstBusFunc)GstMessageParser, NULL);

   loop = g_main_loop_new(NULL, FALSE);
   g_main_loop_run(loop);
}

bool stop()
{
   //Close the pipeline
   if (g_main_loop_is_running(loop)) {
       g_main_loop_quit(loop);
   }
   gst_element_set_state(appsink, GST_STATE_NULL);
   gst_element_set_state(encoder, GST_STATE_NULL);
   gst_element_set_state(cameraSource, GST_STATE_NULL);
}

GstFlowReturn new_frame_callback(GstAppSink *appsink, gpointer data)
{
   auto gstSample = gst_sample_ref(gst_app_sink_pull_sample(appsink));

   GstBuffer *buffer = gst_sample_get_buffer(gstSample);

   GstMapInfo map;
   gst_buffer_map(buffer, &map, GST_MAP_READ);

   if(fpVideo) {
       fwrite(map.data, 1, map.size, fpVideo);
   } else {
       printf("Received frame with size %d\n", map.size);
   }

   gst_buffer_unmap(buffer, &map);
   gst_buffer_unref(buffer);
   gst_sample_unref(gstSample);

   return GST_FLOW_OK;
}

bool GstMessageParser(GstBus* bus, GstMessage* msg, gpointer udata)
{
 if (msg != NULL)
 {
   GError *err = 0;
   gchar *debug_info = 0;

   switch (GST_MESSAGE_TYPE(msg))
   {
   case GST_MESSAGE_ERROR:
     gst_message_parse_error(msg, &err, &debug_info);
     g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
     g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
     g_clear_error(&err);
     g_free(debug_info);
     break;

   case GST_MESSAGE_WARNING:
     gst_message_parse_warning(msg, &err, &debug_info);
     g_printerr("Warning received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
     g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
     g_clear_error(&err);
     g_free(debug_info);
     break;

   case GST_MESSAGE_INFO:
     gst_message_parse_info(msg, &err, &debug_info);
     g_printerr("Info received from element %s: %s\n", GST_OBJECT_NAME(msg->src), err->message);
     g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
     g_clear_error(&err);
     g_free(debug_info);
     break;

   case GST_MESSAGE_EOS:
     {
       g_print("End-Of-Stream reached.\n");
     }
     break;

   case GST_MESSAGE_STATE_CHANGED:
     GstState old_state, new_state;
     gst_message_parse_state_changed(msg, &old_state, &new_state, 0);
     g_print("Element %s changed state from %s to %s.\n", GST_OBJECT_NAME(msg->src), gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
     break;

   case GST_MESSAGE_QOS:
     break;

   case GST_MESSAGE_STREAM_STATUS:

     GstStreamStatusType stream_status_type;
     GstElement *owner;
     const gchar *stream_status_type_string;
     gst_message_parse_stream_status(msg, &stream_status_type, &owner);

     switch (stream_status_type)
     {
     case GST_STREAM_STATUS_TYPE_CREATE:
       stream_status_type_string = "CREATE";
       break;
     case GST_STREAM_STATUS_TYPE_ENTER:
       stream_status_type_string = "ENTER";
       break;
     case GST_STREAM_STATUS_TYPE_LEAVE:
       stream_status_type_string = "LEAVE";
       break;
     case GST_STREAM_STATUS_TYPE_DESTROY:
       stream_status_type_string = "DESTROY";
       break;

     case GST_STREAM_STATUS_TYPE_START:
       stream_status_type_string = "START";
       break;
     case GST_STREAM_STATUS_TYPE_PAUSE:
       stream_status_type_string = "PAUSE";
       break;
     case GST_STREAM_STATUS_TYPE_STOP:
       stream_status_type_string = "STOP";
       break;
     }

     g_printerr("STREAM STATUS received from element %s: %s\n", GST_OBJECT_NAME(owner), stream_status_type_string);
     break;

   default:
     g_printerr("Unparsed message received of type: %s\n", gst_message_type_get_name(GST_MESSAGE_TYPE(msg)));
     break;
   }
 }
 return true;
}

Attached the CMake build script for convenience.

.txt   CMakeLists.txt (Size: 1.49 KB / Downloads: 1)  

Please suggest if there are issues on the code that is causing the color issue. Or are there issues on mpph264enc or rkcamsrc Gstreamer plugin?

Thanks in advance.
Reply


Forum Jump:


Users browsing this thread: 1 Guest(s)