pypylon icon indicating copy to clipboard operation
pypylon copied to clipboard

gstreamer+pypylon+python-how to stream

Open zoldaten opened this issue 4 years ago • 1 comments

Here is a ready-go solution how to stream with gstreamer with minimum latency. Using usb3.0 camera acA4112-20um, raspberry pi 4b, raspbian buster.

import sys;import traceback;import argparse;import typing as typ;import random;import time;from fractions import Fraction;import numpy as np

from gstreamer import GstContext, GstPipeline, GstApp, Gst, GstVideo, GLib, GstVideoSink
import gstreamer.utils as utils
from pypylon import pylon

VIDEO_FORMAT = "GRAY8" #BGR #GRAY8 #I420
WIDTH, HEIGHT = 800, 3000 #1440
FPS = Fraction(25)
GST_VIDEO_FORMAT = GstVideo.VideoFormat.from_string(VIDEO_FORMAT)

def fraction_to_str(fraction: Fraction) -> str:
    """Converts fraction to str"""
    return '{}/{}'.format(fraction.numerator, fraction.denominator)

FPS_STR = fraction_to_str(FPS)
DEFAULT_CAPS = "video/x-raw,format={VIDEO_FORMAT},width={WIDTH},height={HEIGHT},framerate={FPS_STR}".format(**locals())
#x-rtp-stream,encoding-name=JPEG

camera = pylon.InstantCamera(pylon.TlFactory.GetInstance().CreateFirstDevice())
camera.StartGrabbing(pylon.GrabStrategy_LatestImageOnly) 
converter = pylon.ImageFormatConverter()
converter.OutputPixelFormat = pylon.PixelType_BGR8packed
converter.OutputBitAlignment = pylon.OutputBitAlignment_MsbAligned

DEFAULT_PIPELINE = utils.to_gst_string([
    "appsrc emit-signals=True is-live=True caps={DEFAULT_CAPS}".format(**locals()),
    "queue",
    "videoscale",
    "videoconvert",
    "x264enc tune=zerolatency bitrate=500 speed-preset=superfast",
    "rtph264pay", 
    #"autovideosink"
    "udpsink host=192.168.1.116 port=5200"    
    
    
])
command = DEFAULT_PIPELINE
NUM_BUFFERS=10000
GST_VIDEO_FORMAT = GstVideo.VideoFormat.from_string(VIDEO_FORMAT)
CHANNELS = utils.get_num_channels(GST_VIDEO_FORMAT)
DTYPE = utils.get_np_dtype(GST_VIDEO_FORMAT)
CAPS = DEFAULT_CAPS

with GstContext():  # create GstContext (hides MainLoop)
    pipeline = GstPipeline(command)
    def on_pipeline_init(self):
        """Setup AppSrc element"""
        appsrc = self.get_by_cls(GstApp.AppSrc)[0]  # get AppSrc        
        appsrc.set_property("format", Gst.Format.TIME) # instructs appsrc that we will be dealing with timed buffer
        appsrc.set_property("block", True)# instructs appsrc to block pushing buffers until ones in queue are preprocessed # allows to avoid huge queue internal queue size in appsrc        
        appsrc.set_caps(Gst.Caps.from_string(CAPS)) # set input format (caps)    
    pipeline._on_pipeline_init = on_pipeline_init.__get__(pipeline) # override on_pipeline_init to set specific properties before launching pipeline
    try:
        pipeline.startup()
        appsrc = pipeline.get_by_cls(GstApp.AppSrc)[0]  # GstApp.AppSrc
        pts = 0  # buffers presentation timestamp
        duration = 10**9 / (FPS.numerator / FPS.denominator)  # frame duration
        for _ in range(NUM_BUFFERS):
            grabResult = camera.RetrieveResult(500, pylon.TimeoutHandling_ThrowException) #5000
            if grabResult.GrabSucceeded():
                #image = converter.Convert(grabResult)
                #array = image.GetArray()
                array = grabResult.GetArray()
                #print(array.size)
                # create random np.ndarray
                ##array = np.random.randint(low=0, high=255,
                ##                        size=(HEIGHT, WIDTH, CHANNELS), dtype=DTYPE)            
            gst_buffer = utils.ndarray_to_gst_buffer(array)# convert np.ndarray to Gst.Buffer            
            pts += duration  # # set pts and duration to be able to record video, calculate fps #Increase pts by duration
            gst_buffer.pts = pts
            gst_buffer.duration = duration            
            appsrc.emit("push-buffer", gst_buffer) # emit <push-buffer> event with Gst.Buffer        
        appsrc.emit("end-of-stream")# emit <end-of-stream> event

        while not pipeline.is_done:
            time.sleep(.1)
    except Exception as e:
        print("Error: ", e)
    finally:
        pipeline.shutdown()

After start the pipeline looks like (put to code ip and port to send stream to):

#sender
gst-launch-1.0 appsrc emit-signals=True is-live=True caps=video/x-raw,format=GRAY8,width=800,height=1440,framerate=25/1 ! queue ! videoscale ! videoconvert ! x264enc tune=zerolatency bitrate=500 speed-preset=superfast ! rtph264pay ! udpsink host=ip_of_receiver port=receivers_port

To view the stream on another PC (receiver):

gst-launch-1.0 -v udpsrc port=5200 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay   ! decodebin  ! videoconvert  ! autovideosink

Or see it local (just uncomment/comment code lines keeps "autovideosink"):

local view gst-launch-1.0 appsrc emit-signals=True is-live=True caps=video/x-raw,format=GRAY8,width=800,height=1440,framerate=15/1 ! queue ! videoconvert ! autovideosink

Have a nice day!

zoldaten avatar Dec 30 '21 12:12 zoldaten

Thanks for sharing your good solution with us.

SMA2016a avatar Mar 18 '22 08:03 SMA2016a