Skip to content

Commit

Permalink
new feature: use your own gst-launch-1.0 pipeline. Use -parse and pas…
Browse files Browse the repository at this point in the history
…te the command, and the PipelineHelper will parse it and replace your original source with the InstantCameraAppSrc. Also added some more notes to the user about using pylongstreamer with Nvidia TX1/TX2.
  • Loading branch information
Breit authored and Breit committed Mar 9, 2018
1 parent 6898823 commit 68c9a7b
Show file tree
Hide file tree
Showing 4 changed files with 140 additions and 28 deletions.
4 changes: 4 additions & 0 deletions source/CInstantCameraAppSrc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -421,7 +421,11 @@ bool CInstantCameraAppSrc::retrieve_image()
m_Image.GetImageSize(),
NULL,
NULL);
<<<<<<< HEAD

=======

>>>>>>> 6898823b90b0451beb75dd6fea694e4a1c042efa
// Push the gst buffer wrapping the image buffer to the source pads of the AppSrc element, where it's picked up by the rest of the pipeline
GstFlowReturn ret;
g_signal_emit_by_name(m_appsrc, "push-buffer", m_gstBuffer, &ret);
Expand Down
56 changes: 56 additions & 0 deletions source/CPipelineHelper.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,17 @@ bool CPipelineHelper::build_pipeline_display()
g_object_set(G_OBJECT(filter), "caps", filter_caps, NULL);
gst_caps_unref(filter_caps);

// if you are using nvidia tx1/tx2, the built-in video sink that is found by autovideosink does not advertise it needs conversion (it does not support RGB).
// so we must use a filter such that the converter knows to convert the image format.
// if you are using a video sink that supports RGB, then you do not need to convert to i420 and you can remove this filter and save some cpu load.
GstElement *filter;
GstCaps *filter_caps;
filter = gst_element_factory_make("capsfilter", "filter");
filter_caps = gst_caps_new_simple("video/x-raw","format", G_TYPE_STRING, "I420", NULL);

g_object_set(G_OBJECT(filter), "caps", filter_caps, NULL);
gst_caps_unref(filter_caps);

// add and link the pipeline elements
gst_bin_add_many(GST_BIN(m_pipeline), m_source, m_videoScaler, m_videoScalerCaps, convert, filter, sink, NULL);
gst_element_link_many(m_source, m_videoScaler, m_videoScalerCaps, convert, filter, sink, NULL);
Expand Down Expand Up @@ -356,3 +367,48 @@ bool CPipelineHelper::build_pipeline_h264file(string fileName)
return false;
}
}

// example of how to create a pipeline from a string that you would use with gst-launch-1.0
bool CPipelineHelper::build_pipeline_parsestring(string pipelineString)
{
try
{
if (m_pipelineBuilt == true)
{
cout << "Cancelling -parsestring. Another pipeline has already been built." << endl;
return false;
}

if (build_videoscaler() == false)
return false;

cout << "Applying this Pipeline to the CInstantCameraAppsc: " << pipelineString << "..." << endl;

string strPipeline = "";
if (pipelineString.find("gst-launch") != std::string::npos)
{
std::size_t start = pipelineString.find("!");
strPipeline = pipelineString.substr(start);
}
else
strPipeline = pipelineString;

GstElement *userPipeline;
userPipeline = gst_parse_bin_from_description(strPipeline.c_str(), true, NULL);

// add and link the pipeline elements
gst_bin_add_many(GST_BIN(m_pipeline), m_source, userPipeline, NULL);
gst_element_link_many(m_source, userPipeline, NULL);

cout << "Pipeline Made." << endl;

m_pipelineBuilt = true;

return true;
}
catch (std::exception &e)
{
cerr << "An exception occurred in build_pipeline_parsestring(): " << endl << e.what() << endl;
return false;
}
}
3 changes: 3 additions & 0 deletions source/CPipelineHelper.h
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ class CPipelineHelper
// example of how to create a pipeline for encoding images in h264 format and streaming to local video file
bool build_pipeline_h264file(string fileName);

// example of how to create a pipeline from a string that you would use with gst-launch-1.0
bool build_pipeline_parsestring(string pipelineString);

private:
bool m_pipelineBuilt;
bool m_scaleVideo;
Expand Down
105 changes: 77 additions & 28 deletions source/pylongstreamer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,30 +22,53 @@
IS OUTSIDE THE SCOPE OF THIS LICENSE.
Usage:
pylongstreamer -camera <serialnumber> -width <columns> -height <rows> -framerate <fps> -ondemand -usetrigger -<pipeline> <options>
Example:
pylongstreamer -camera 12345678 -width 320 -height 240 -framerate 15 -h264file mymovie.h264
Quick-Start Example (use first camera found, display in window, 640x480, 30fps):
pylongstreamer -display
Note:
-camera: If not used, we will use first detected camera.
-ondemand: Instead of freerunning, camera will be software triggered with each need-data signal. May lower CPU load, but may be less 'real-time'.
-usetrigger: Camera will expect to be hardware triggered by user via IO ports (cannot be used with -ondemand).
-framebuffer <fbdevice> (directs raw image stream to Linux framebuffer, e.g. /dev/fb0). Useful when using additional displays
Pipeline Examples (pick one):
-h264stream <ipaddress> (Encodes images as h264 and transmits stream to another PC running a GStreamer receiving pipeline.)
-h264file <filename> <number of images> (Encodes images as h264 and saves stream to local file.)
-display (displays the raw image stream in a window on the local machine.)
-framebuffer <fbdevice> (directs raw image stream to Linux framebuffer, e.g. /dev/fb0)
Note:
Some GStreamer elements (plugins) used in the pipeline examples may not be available on all systems. Consult GStreamer for more information:
https://gstreamer.freedesktop.org/
PylonGStreamer:
Demo of InstantCameraAppSrc class (and PipelineHelper).
Concept Overview:
<--------------- InstantCameraAppSrc --------------> <------------ PipelineHelper ----------->
+--------------------------------------------------+ +---------+ +---------+ +---------+
| source | | element | | element | | sink |
| (camera + driver + GstAppSrc + rescale + rotate) | | | | | | |
| src--sink src--sink src--sink |
+--------------------------------------------------+ +---------+ +---------+ +---------+
Usage:
pylongstreamer -options -pipeline
Options:
-camera <serialnumber> (Use a specific camera. If not specified, will use first camera found.)
-aoi <width> <height> (Camera's Area Of Interest. If not specified, will use camera's maximum.)
-rescale <width> <height> (Will rescale the image for the pipeline if desired.)
-rotate <degrees clockwise> (Will rotate 90, 180, 270 degrees clockwise)
-framerate <fps> (If not specified, will use camera's maximum under current settings.)
-ondemand (Will software trigger the camera when needed instead of using continuous free run. May lower CPU load.)
-usetrigger (Will configure the camera to expect a hardware trigger on IO Line 1. eg: TTL signal.)
Pipeline Examples (pick one):
-h264stream <ipaddress> (Encodes images as h264 and transmits stream to another PC running a GStreamer receiving pipeline.)
-h264file <filename> <number of images> (Encodes images as h264 and records stream to local file.)
-window (displays the raw image stream in a window on the local machine.)
-framebuffer <fbdevice> (directs raw image stream to Linux framebuffer. eg: /dev/fb0)
-parse <string> (try your existing gst-launch-1.0 pipeline string. We will replace the original pipeline source with the Basler camera.)
Examples:
pylongstreamer -window
pylongstreamer -camera 12345678 -aoi 640 480 -framerate 15 -rescale 320 240 -h264file mymovie.h264
pylongstreamer -rescale 320 240 -parse "gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! autovideosink"
Quick-Start Example:
pylongstreamer -window
NVIDIA TX1/TX2 Note:
When using autovideosink for display, the system-preferred built-in videosink plugin does advertise the formats it supports. So the image must be converted manually.
For an example of how to do this, see CPipelineHelper::build_pipeline_display().
If you are using pylongstreamer with the -parse argument in order to use your own pipeline, add a caps filter after the normal videoconvert and before autovideosink:
./pylongstreamer -parse "gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! video/x-raw,format=I420 ! autovideosink"
Note:
Some GStreamer elements (plugins) used in the pipeline examples may not be available on all systems. Consult GStreamer for more information:
https://gstreamer.freedesktop.org/
*/


Expand Down Expand Up @@ -160,12 +183,14 @@ bool h264stream = false;
bool h264file = false;
bool display = false;
bool framebuffer = false;
bool parsestring = false;
bool onDemand = false;
bool useTrigger = false;
string serialNumber = "";
string ipaddress = "";
string filename = "";
string fbdev = "";
string pipelineString = "";

int ParseCommandLine(gint argc, gchar *argv[])
{
Expand Down Expand Up @@ -202,13 +227,24 @@ int ParseCommandLine(gint argc, gchar *argv[])
cout << " -h264file <filename> <number of images> (Encodes images as h264 and records stream to local file.)" << endl;
cout << " -window (displays the raw image stream in a window on the local machine.)" << endl;
cout << " -framebuffer <fbdevice> (directs raw image stream to Linux framebuffer. eg: /dev/fb0)" << endl;
cout << " -parse <string> (try your existing gst-launch-1.0 pipeline string. We will replace the original pipeline source with the Basler camera if needed.)" << endl;
cout << endl;
cout << "Example: " << endl;
cout << "Examples: " << endl;
cout << " pylongstreamer -framebuffer /dev/fb0" << endl;
cout << " pylongstreamer -rescale 640 480 -h264stream 172.17.1.199" << endl;
cout << " pylongstreamer -camera 12345678 -aoi 640 480 -framerate 15 -rescale 320 240 -h264file mymovie.h264" << endl;
cout << " pylongstreamer -rescale 320 240 -parse \"gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! autovideosink\"" << endl;
cout << " pylongstreamer -rescale 320 240 -parse \"videoflip method=vertical-flip ! videoconvert ! autovideosink\"" << endl;
cout << endl;
cout << "Quick-Start Example:" << endl;
cout << "Quick-Start Example to display stream:" << endl;
cout << " pylongstreamer -window" << endl;
cout << endl;
cout << "NVIDIA TX1/TX2 Note:" << endl;
cout << "When using autovideosink for display, the system-preferred built-in videosink plugin does advertise the formats it supports. So the image must be converted manually." << endl;
cout << "For an example of how to do this, see CPipelineHelper::build_pipeline_display()." << endl;
cout << "If you are using pylongstreamer with the -parse argument in order to use your own pipeline, add a caps filter after the normal videoconvert and before autovideosink:" << endl;
cout << "./pylongstreamer -parse \"gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! video/x-raw,format=I420 ! autovideosink\"" << endl;
cout << endl;
cout << "Note:" << endl;
cout << " Some GStreamer elements (plugins) used in the pipeline examples may not be available on all systems. Consult GStreamer for more information:" << endl;
cout << " https://gstreamer.freedesktop.org/" << endl;
Expand Down Expand Up @@ -335,6 +371,17 @@ int ParseCommandLine(gint argc, gchar *argv[])
return -1;
}
}
else if (string(argv[i]) == "-parse")
{
parsestring = true;
if (argv[i + 1] != NULL)
pipelineString = string(argv[i + 1]);
else
{
cout << "pipeline string not specified. Use one of these format with quotes: \"gst-launch-1.0 videotestsrc ! videoflip method=vertical-flip ! videoconvert ! autovideosink\" or \"videoflip method=vertical-flip ! videoconvert ! autovideosink\"" << endl;
return -1;
}
}
// deprecated
else if (string(argv[i]) == "-display")
{
Expand Down Expand Up @@ -364,7 +411,7 @@ int ParseCommandLine(gint argc, gchar *argv[])
}
}

if (display == false && framebuffer == false && h264file == false && h264stream == false)
if (display == false && framebuffer == false && h264file == false && h264stream == false && parsestring == false)
{
cout << "No pipeline specified." << endl;
return -1;
Expand Down Expand Up @@ -457,6 +504,8 @@ gint main(gint argc, gchar *argv[])
pipelineBuilt = myPipelineHelper.build_pipeline_h264file(filename.c_str());
else if (framebuffer == true)
pipelineBuilt = myPipelineHelper.build_pipeline_framebuffer(fbdev.c_str());
else if (parsestring == true)
pipelineBuilt = myPipelineHelper.build_pipeline_parsestring(pipelineString.c_str());

if (pipelineBuilt == false)
{
Expand All @@ -470,7 +519,7 @@ gint main(gint argc, gchar *argv[])
exitCode = -1;
throw std::runtime_error("Could not start camera!");
}

// Start the pipeline.
cout << "Starting pipeline..." << endl;
gst_element_set_state(pipeline, GST_STATE_PLAYING);
Expand Down

0 comments on commit 68c9a7b

Please sign in to comment.