From 68c9a7b5ebc7610aa938d72aacf083a63f9cbc0d Mon Sep 17 00:00:00 2001 From: Breit Date: Fri, 9 Mar 2018 13:10:50 -0500 Subject: [PATCH] new feature: use your own gst-launch-1.0 pipeline. Use -parse and paste the command, and the PipelineHelper will parse it and replace your original source with the InstantCameraAppSrc. Also added some more notes to the user about using pylongstreamer with Nvidia TX1/TX2. --- source/CInstantCameraAppSrc.cpp | 4 ++ source/CPipelineHelper.cpp | 56 +++++++++++++++++ source/CPipelineHelper.h | 3 + source/pylongstreamer.cpp | 105 +++++++++++++++++++++++--------- 4 files changed, 140 insertions(+), 28 deletions(-) diff --git a/source/CInstantCameraAppSrc.cpp b/source/CInstantCameraAppSrc.cpp index fbebc19..ef0d242 100644 --- a/source/CInstantCameraAppSrc.cpp +++ b/source/CInstantCameraAppSrc.cpp @@ -421,7 +421,11 @@ bool CInstantCameraAppSrc::retrieve_image() m_Image.GetImageSize(), NULL, NULL); +<<<<<<< HEAD + +======= +>>>>>>> 6898823b90b0451beb75dd6fea694e4a1c042efa // Push the gst buffer wrapping the image buffer to the source pads of the AppSrc element, where it's picked up by the rest of the pipeline GstFlowReturn ret; g_signal_emit_by_name(m_appsrc, "push-buffer", m_gstBuffer, &ret); diff --git a/source/CPipelineHelper.cpp b/source/CPipelineHelper.cpp index d9d23c9..986615f 100644 --- a/source/CPipelineHelper.cpp +++ b/source/CPipelineHelper.cpp @@ -124,6 +124,17 @@ bool CPipelineHelper::build_pipeline_display() g_object_set(G_OBJECT(filter), "caps", filter_caps, NULL); gst_caps_unref(filter_caps); + // if you are using nvidia tx1/tx2, the built-in video sink that is found by autovideosink does not advertise it needs conversion (it does not support RGB). + // so we must use a filter such that the converter knows to convert the image format. + // if you are using a video sink that supports RGB, then you do not need to convert to i420 and you can remove this filter and save some cpu load. + GstElement *filter; + GstCaps *filter_caps; + filter = gst_element_factory_make("capsfilter", "filter"); + filter_caps = gst_caps_new_simple("video/x-raw","format", G_TYPE_STRING, "I420", NULL); + + g_object_set(G_OBJECT(filter), "caps", filter_caps, NULL); + gst_caps_unref(filter_caps); + // add and link the pipeline elements gst_bin_add_many(GST_BIN(m_pipeline), m_source, m_videoScaler, m_videoScalerCaps, convert, filter, sink, NULL); gst_element_link_many(m_source, m_videoScaler, m_videoScalerCaps, convert, filter, sink, NULL); @@ -356,3 +367,48 @@ bool CPipelineHelper::build_pipeline_h264file(string fileName) return false; } } + +// example of how to create a pipeline from a string that you would use with gst-launch-1.0 +bool CPipelineHelper::build_pipeline_parsestring(string pipelineString) +{ + try + { + if (m_pipelineBuilt == true) + { + cout << "Cancelling -parsestring. Another pipeline has already been built." << endl; + return false; + } + + if (build_videoscaler() == false) + return false; + + cout << "Applying this Pipeline to the CInstantCameraAppsc: " << pipelineString << "..." << endl; + + string strPipeline = ""; + if (pipelineString.find("gst-launch") != std::string::npos) + { + std::size_t start = pipelineString.find("!"); + strPipeline = pipelineString.substr(start); + } + else + strPipeline = pipelineString; + + GstElement *userPipeline; + userPipeline = gst_parse_bin_from_description(strPipeline.c_str(), true, NULL); + + // add and link the pipeline elements + gst_bin_add_many(GST_BIN(m_pipeline), m_source, userPipeline, NULL); + gst_element_link_many(m_source, userPipeline, NULL); + + cout << "Pipeline Made." << endl; + + m_pipelineBuilt = true; + + return true; + } + catch (std::exception &e) + { + cerr << "An exception occurred in build_pipeline_parsestring(): " << endl << e.what() << endl; + return false; + } +} \ No newline at end of file diff --git a/source/CPipelineHelper.h b/source/CPipelineHelper.h index ebceebb..f0247c7 100644 --- a/source/CPipelineHelper.h +++ b/source/CPipelineHelper.h @@ -46,6 +46,9 @@ class CPipelineHelper // example of how to create a pipeline for encoding images in h264 format and streaming to local video file bool build_pipeline_h264file(string fileName); + // example of how to create a pipeline from a string that you would use with gst-launch-1.0 + bool build_pipeline_parsestring(string pipelineString); + private: bool m_pipelineBuilt; bool m_scaleVideo; diff --git a/source/pylongstreamer.cpp b/source/pylongstreamer.cpp index ba7b378..38d147c 100644 --- a/source/pylongstreamer.cpp +++ b/source/pylongstreamer.cpp @@ -22,30 +22,53 @@ IS OUTSIDE THE SCOPE OF THIS LICENSE. -Usage: -pylongstreamer -camera -width -height -framerate -ondemand -usetrigger - - -Example: -pylongstreamer -camera 12345678 -width 320 -height 240 -framerate 15 -h264file mymovie.h264 - -Quick-Start Example (use first camera found, display in window, 640x480, 30fps): -pylongstreamer -display - -Note: --camera: If not used, we will use first detected camera. --ondemand: Instead of freerunning, camera will be software triggered with each need-data signal. May lower CPU load, but may be less 'real-time'. --usetrigger: Camera will expect to be hardware triggered by user via IO ports (cannot be used with -ondemand). --framebuffer (directs raw image stream to Linux framebuffer, e.g. /dev/fb0). Useful when using additional displays - -Pipeline Examples (pick one): --h264stream (Encodes images as h264 and transmits stream to another PC running a GStreamer receiving pipeline.) --h264file (Encodes images as h264 and saves stream to local file.) --display (displays the raw image stream in a window on the local machine.) --framebuffer (directs raw image stream to Linux framebuffer, e.g. /dev/fb0) - -Note: -Some GStreamer elements (plugins) used in the pipeline examples may not be available on all systems. Consult GStreamer for more information: -https://gstreamer.freedesktop.org/ + PylonGStreamer: + Demo of InstantCameraAppSrc class (and PipelineHelper). + + Concept Overview: + <--------------- InstantCameraAppSrc --------------> <------------ PipelineHelper -----------> + +--------------------------------------------------+ +---------+ +---------+ +---------+ + | source | | element | | element | | sink | + | (camera + driver + GstAppSrc + rescale + rotate) | | | | | | | + | src--sink src--sink src--sink | + +--------------------------------------------------+ +---------+ +---------+ +---------+ + + Usage: + pylongstreamer -options -pipeline + + Options: + -camera (Use a specific camera. If not specified, will use first camera found.) + -aoi (Camera's Area Of Interest. If not specified, will use camera's maximum.) + -rescale (Will rescale the image for the pipeline if desired.) + -rotate (Will rotate 90, 180, 270 degrees clockwise) + -framerate (If not specified, will use camera's maximum under current settings.) + -ondemand (Will software trigger the camera when needed instead of using continuous free run. May lower CPU load.) + -usetrigger (Will configure the camera to expect a hardware trigger on IO Line 1. eg: TTL signal.) + + Pipeline Examples (pick one): + -h264stream (Encodes images as h264 and transmits stream to another PC running a GStreamer receiving pipeline.) + -h264file (Encodes images as h264 and records stream to local file.) + -window (displays the raw image stream in a window on the local machine.) + -framebuffer (directs raw image stream to Linux framebuffer. eg: /dev/fb0) + -parse (try your existing gst-launch-1.0 pipeline string. We will replace the original pipeline source with the Basler camera.) + + Examples: + pylongstreamer -window + pylongstreamer -camera 12345678 -aoi 640 480 -framerate 15 -rescale 320 240 -h264file mymovie.h264 + pylongstreamer -rescale 320 240 -parse "gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! autovideosink" + + Quick-Start Example: + pylongstreamer -window + + NVIDIA TX1/TX2 Note: + When using autovideosink for display, the system-preferred built-in videosink plugin does advertise the formats it supports. So the image must be converted manually. + For an example of how to do this, see CPipelineHelper::build_pipeline_display(). + If you are using pylongstreamer with the -parse argument in order to use your own pipeline, add a caps filter after the normal videoconvert and before autovideosink: + ./pylongstreamer -parse "gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! video/x-raw,format=I420 ! autovideosink" + + Note: + Some GStreamer elements (plugins) used in the pipeline examples may not be available on all systems. Consult GStreamer for more information: + https://gstreamer.freedesktop.org/ */ @@ -160,12 +183,14 @@ bool h264stream = false; bool h264file = false; bool display = false; bool framebuffer = false; +bool parsestring = false; bool onDemand = false; bool useTrigger = false; string serialNumber = ""; string ipaddress = ""; string filename = ""; string fbdev = ""; +string pipelineString = ""; int ParseCommandLine(gint argc, gchar *argv[]) { @@ -202,13 +227,24 @@ int ParseCommandLine(gint argc, gchar *argv[]) cout << " -h264file (Encodes images as h264 and records stream to local file.)" << endl; cout << " -window (displays the raw image stream in a window on the local machine.)" << endl; cout << " -framebuffer (directs raw image stream to Linux framebuffer. eg: /dev/fb0)" << endl; + cout << " -parse (try your existing gst-launch-1.0 pipeline string. We will replace the original pipeline source with the Basler camera if needed.)" << endl; cout << endl; - cout << "Example: " << endl; + cout << "Examples: " << endl; + cout << " pylongstreamer -framebuffer /dev/fb0" << endl; + cout << " pylongstreamer -rescale 640 480 -h264stream 172.17.1.199" << endl; cout << " pylongstreamer -camera 12345678 -aoi 640 480 -framerate 15 -rescale 320 240 -h264file mymovie.h264" << endl; + cout << " pylongstreamer -rescale 320 240 -parse \"gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! autovideosink\"" << endl; + cout << " pylongstreamer -rescale 320 240 -parse \"videoflip method=vertical-flip ! videoconvert ! autovideosink\"" << endl; cout << endl; - cout << "Quick-Start Example:" << endl; + cout << "Quick-Start Example to display stream:" << endl; cout << " pylongstreamer -window" << endl; cout << endl; + cout << "NVIDIA TX1/TX2 Note:" << endl; + cout << "When using autovideosink for display, the system-preferred built-in videosink plugin does advertise the formats it supports. So the image must be converted manually." << endl; + cout << "For an example of how to do this, see CPipelineHelper::build_pipeline_display()." << endl; + cout << "If you are using pylongstreamer with the -parse argument in order to use your own pipeline, add a caps filter after the normal videoconvert and before autovideosink:" << endl; + cout << "./pylongstreamer -parse \"gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! video/x-raw,format=I420 ! autovideosink\"" << endl; + cout << endl; cout << "Note:" << endl; cout << " Some GStreamer elements (plugins) used in the pipeline examples may not be available on all systems. Consult GStreamer for more information:" << endl; cout << " https://gstreamer.freedesktop.org/" << endl; @@ -335,6 +371,17 @@ int ParseCommandLine(gint argc, gchar *argv[]) return -1; } } + else if (string(argv[i]) == "-parse") + { + parsestring = true; + if (argv[i + 1] != NULL) + pipelineString = string(argv[i + 1]); + else + { + cout << "pipeline string not specified. Use one of these format with quotes: \"gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! autovideosink\" or \"videoflip method=vertical-flip ! videoconvert ! autovideosink\"" << endl; + return -1; + } + } // deprecated else if (string(argv[i]) == "-display") { @@ -364,7 +411,7 @@ int ParseCommandLine(gint argc, gchar *argv[]) } } - if (display == false && framebuffer == false && h264file == false && h264stream == false) + if (display == false && framebuffer == false && h264file == false && h264stream == false && parsestring == false) { cout << "No pipeline specified." << endl; return -1; @@ -457,6 +504,8 @@ gint main(gint argc, gchar *argv[]) pipelineBuilt = myPipelineHelper.build_pipeline_h264file(filename.c_str()); else if (framebuffer == true) pipelineBuilt = myPipelineHelper.build_pipeline_framebuffer(fbdev.c_str()); + else if (parsestring == true) + pipelineBuilt = myPipelineHelper.build_pipeline_parsestring(pipelineString.c_str()); if (pipelineBuilt == false) { @@ -470,7 +519,7 @@ gint main(gint argc, gchar *argv[]) exitCode = -1; throw std::runtime_error("Could not start camera!"); } - + // Start the pipeline. cout << "Starting pipeline..." << endl; gst_element_set_state(pipeline, GST_STATE_PLAYING);