44# SPDX-License-Identifier: Apache-2.0
55#
66import gi
7- gi .require_version ('Gst' , '1.0' )
7+
8+ gi .require_version ("Gst" , "1.0" )
89from gi .repository import Gst
910
1011from src .server .webrtc .gstreamer_webrtc_stream import GStreamerWebRTCStream
1112from src .server .common .utils import logging
13+
14+
1215class GStreamerWebRTCManager :
1316
1417 _source_mediamtx = "appsrc name=webrtc_source format=GST_FORMAT_TIME "
15- _WebRTCVideoPipeline = " ! videoconvert ! gvawatermark " \
16- " ! x264enc speed-preset=ultrafast name=h264enc" \
17- " ! video/x-h264,profile=baseline " \
18- " ! whipclientsink signaller::whip-endpoint="
19- _WebRTCVideoPipeline_jpeg = " ! jpegdec ! videoconvert ! gvawatermark " \
20- " ! x264enc speed-preset=ultrafast name=h264enc " \
21- " ! video/x-h264,profile=baseline " \
22- " ! whipclientsink signaller::whip-endpoint="
23-
18+ _WebRTCVideoPipeline = (
19+ " ! videoconvert ! gvawatermark "
20+ " ! x264enc speed-preset=ultrafast name=h264enc"
21+ " ! video/x-h264,profile=baseline "
22+ " ! whipclientsink signaller::whip-endpoint="
23+ )
24+ _WebRTCVideoPipeline_jpeg = (
25+ " ! jpegdec ! videoconvert ! gvawatermark "
26+ " ! x264enc speed-preset=ultrafast name=h264enc "
27+ " ! video/x-h264,profile=baseline "
28+ " ! whipclientsink signaller::whip-endpoint="
29+ )
30+
2431 # GPU pipeline variants for hardware-accelerated buffers
25- _WebRTCVideoPipeline_VAMemory = " ! videoconvert ! gvawatermark " \
26- " ! vah264enc name=h264enc " \
27- " ! h264parse " \
28- " ! whipclientsink signaller::whip-endpoint="
29-
30- _WebRTCVideoPipeline_jpeg_VAMemory = " ! vajpegdec ! videoconvert ! gvawatermark " \
31- " ! vah264enc name=h264enc " \
32- " ! h264parse " \
33- " ! whipclientsink signaller::whip-endpoint="
32+ _WebRTCVideoPipeline_VAMemory = (
33+ " ! videoconvert ! gvawatermark "
34+ " ! vah264enc name=h264enc "
35+ " ! h264parse "
36+ " ! whipclientsink signaller::whip-endpoint="
37+ )
38+
39+ _WebRTCVideoPipeline_jpeg_VAMemory = (
40+ " ! vajpegdec ! videoconvert ! gvawatermark "
41+ " ! vah264enc name=h264enc "
42+ " ! h264parse "
43+ " ! whipclientsink signaller::whip-endpoint="
44+ )
3445
3546 def __init__ (self , whip_endpoint ):
36- self ._logger = logging .get_logger (' GStreamerWebRTCManager' , is_static = True )
47+ self ._logger = logging .get_logger (" GStreamerWebRTCManager" , is_static = True )
3748 self ._whip_endpoint = whip_endpoint
3849 self ._streams = {}
3950
@@ -47,17 +58,30 @@ def _peerid_in_use(self, peer_id):
4758 def add_stream (self , peer_id , frame_caps , destination_instance , overlay ):
4859 stream_caps = self ._select_caps (frame_caps .to_string ())
4960 if not self ._peerid_in_use (peer_id ):
50- launch_string = self ._get_launch_string (stream_caps , peer_id ,overlay )
51- self ._streams [peer_id ] = GStreamerWebRTCStream (peer_id , stream_caps , launch_string , destination_instance ,
52- self ._whip_endpoint )
61+ launch_string = self ._get_launch_string (stream_caps , peer_id , overlay )
62+ self ._streams [peer_id ] = GStreamerWebRTCStream (
63+ peer_id ,
64+ stream_caps ,
65+ launch_string ,
66+ destination_instance ,
67+ self ._whip_endpoint ,
68+ )
69+ # pylint: disable=consider-using-f-string
5370 self ._logger .info ("Starting WebRTC Stream for peer_id:{}" .format (peer_id ))
5471 self ._streams [peer_id ].start ()
5572
5673 def _select_caps (self , caps ):
57- split_caps = caps .split (',' )
74+ split_caps = caps .split ("," )
5875 new_caps = []
59- selected_caps = ['image/jpeg' , 'video/x-raw' , 'width' , 'height' , 'framerate' ,
60- 'layout' , 'format' ]
76+ selected_caps = [
77+ "image/jpeg" ,
78+ "video/x-raw" ,
79+ "width" ,
80+ "height" ,
81+ "framerate" ,
82+ "layout" ,
83+ "format" ,
84+ ]
6185 for cap in split_caps :
6286 for selected in selected_caps :
6387 if selected in cap :
@@ -68,7 +92,7 @@ def _is_gpu_buffer(self, caps):
6892 """
6993 Check if the caps indicate a GPU buffer type.
7094 """
71- caps_string = ',' .join (caps )
95+ caps_string = "," .join (caps )
7296 if "memory:VASurface" in caps_string :
7397 return True , "VASurface"
7498 elif "memory:DMABuf" in caps_string :
@@ -77,47 +101,89 @@ def _is_gpu_buffer(self, caps):
77101 return True , "VAMemory"
78102 return False , None
79103
80- def _get_launch_string (self , stream_caps , peer_id ,overlay ):
81- s_src = "{} caps=\" {}\" " .format (self ._source_mediamtx , ',' .join (stream_caps ))
82-
104+ def _get_launch_string (self , stream_caps , peer_id , overlay ):
105+ # pylint: disable=consider-using-f-string, too-many-branches
106+ s_src = '{} caps="{}"' .format (self ._source_mediamtx , "," .join (stream_caps ))
107+
83108 is_gpu , buffer_type = self ._is_gpu_buffer (stream_caps )
84-
109+
85110 # Look for vah264enc element. Reported in some Xeon platforms as missing.
86- # When incoming buffers are from GPU and vah264enc is not present,
87- # we will use the software encoder to ensure that the pipeline can still function without it.
111+ # When incoming buffers are from GPU and vah264enc is not present, we look for
112+ # alternate vah264lpenc.
113+ # If neither is found and GPU buffers are used, we will use the software
114+ # encoder to ensure that the pipeline can still function without it.
88115 vah264enc_present = Gst .ElementFactory .find ("vah264enc" )
89-
116+ vah264lpenc_present = Gst .ElementFactory .find ("vah264lpenc" )
117+
90118 if "image/jpeg" in stream_caps :
91- if is_gpu and buffer_type == "VAMemory" and vah264enc_present :
92- video_pipeline = self ._WebRTCVideoPipeline_jpeg_VAMemory
119+ # GPU buffers with jpeg input
120+ if is_gpu and buffer_type == "VAMemory" :
121+ if vah264enc_present :
122+ video_pipeline = self ._WebRTCVideoPipeline_jpeg_VAMemory
123+ elif vah264lpenc_present :
124+ self ._logger .warning (
125+ "vah264enc not found, but vah264lpenc is present. "
126+ + "Using vah264lpenc for encoding."
127+ )
128+ video_pipeline = self ._WebRTCVideoPipeline_jpeg_VAMemory .replace (
129+ "vah264enc" , "vah264lpenc"
130+ )
131+ else :
132+ self ._logger .warning (
133+ "vah264enc and vah264lpenc not found, "
134+ + "using software encoding"
135+ )
136+ video_pipeline = self ._WebRTCVideoPipeline_jpeg
137+ # CPU buffers with jpeg input
93138 else :
94- if is_gpu and buffer_type == "VAMemory" and not vah264enc_present : # warn if GPU buffer and no vah264enc
95- self ._logger .warning ("vah264enc not found, using software encoding" )
96139 video_pipeline = self ._WebRTCVideoPipeline_jpeg
97140 else :
98- if is_gpu and buffer_type == "VAMemory" and vah264enc_present :
99- video_pipeline = self ._WebRTCVideoPipeline_VAMemory
141+ # GPU buffers with raw video input
142+ if is_gpu and buffer_type == "VAMemory" :
143+ if vah264enc_present :
144+ video_pipeline = self ._WebRTCVideoPipeline_VAMemory
145+ elif vah264lpenc_present :
146+ self ._logger .warning (
147+ "vah264enc not found, but vah264lpenc is present. "
148+ + "Using vah264lpenc for encoding."
149+ )
150+ video_pipeline = self ._WebRTCVideoPipeline_VAMemory .replace (
151+ "vah264enc" , "vah264lpenc"
152+ )
153+ else :
154+ self ._logger .warning (
155+ "vah264enc and vah264lpenc not found, "
156+ + "using software encoding"
157+ )
158+ video_pipeline = self ._WebRTCVideoPipeline
159+ # CPU buffers with raw video input
100160 else :
101- if is_gpu and buffer_type == "VAMemory" and not vah264enc_present : # warn if GPU buffer and no vah264enc
102- self ._logger .warning ("vah264enc not found, using software encoding" )
103161 video_pipeline = self ._WebRTCVideoPipeline
104162 if overlay is False :
105163 video_pipeline = video_pipeline .replace ("! gvawatermark " , "" )
106164 elif overlay is True :
107165 video_pipeline = video_pipeline
108166 pipeline_launch = " {} {} " .format (s_src , video_pipeline )
109- pipeline_launch = pipeline_launch + self ._whip_endpoint + "/" + peer_id + "/whip"
167+ pipeline_launch = (
168+ pipeline_launch + self ._whip_endpoint + "/" + peer_id + "/whip"
169+ )
110170 self ._logger .info ("Final launch WebRTC Streams {}" .format (pipeline_launch ))
111171 return pipeline_launch
112172
113173 def remove_stream (self , peer_id ):
114174 if peer_id in self ._streams :
115- self ._logger .info ("Stopping WebRTC Stream for peer_id {id}" .format (id = peer_id ))
175+ # pylint: disable=consider-using-f-string, logging-format-interpolation
176+ self ._logger .info (
177+ "Stopping WebRTC Stream for peer_id {id}" .format (id = peer_id )
178+ )
116179 self ._streams [peer_id ].stop ()
117180 del self ._streams [peer_id ]
118- self ._logger .debug ("Remaining set of WebRTC Streams {}" .format (self ._streams ))
181+ # pylint: disable=consider-using-f-string, logging-format-interpolation
182+ self ._logger .debug (
183+ "Remaining set of WebRTC Streams {}" .format (self ._streams )
184+ )
119185
120186 def stop (self ):
121187 for peer_id in list (self ._streams ):
122188 self .remove_stream (peer_id )
123- self ._streams = None
189+ self ._streams = None
0 commit comments