-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathVideoPushTool2.java
374 lines (323 loc) · 15.8 KB
/
VideoPushTool2.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
import lombok.extern.slf4j.Slf4j;
import org.bytedeco.ffmpeg.avcodec.AVCodecParameters;
import org.bytedeco.ffmpeg.avformat.AVFormatContext;
import org.bytedeco.ffmpeg.avformat.AVStream;
import org.bytedeco.ffmpeg.global.avformat;
import org.bytedeco.javacv.*;
import java.io.*;
import java.util.HashMap;
import java.util.Map;
import static org.bytedeco.ffmpeg.global.avcodec.AV_CODEC_ID_AAC;
import static org.bytedeco.ffmpeg.global.avcodec.AV_CODEC_ID_H264;
import static org.bytedeco.ffmpeg.global.avutil.AV_LOG_DEBUG;
/**
* 视频推送工具类
* 使用前提:构建好需要推送协议的服务器,比如RTMP、RTSP服务
*/
@Slf4j
public class VideoPushTool {
/**
* 默认视频帧率
*/
public static final int DEFAULT_VIDEO_FRAME_RATE = 30;
public static Map<String, BufferedInputStream> inputStreamMap = new HashMap<>();
public static Map<String, BufferedOutputStream> outputStreamMap = new HashMap<>();
/**
* 不断调用本方法将视频字节数据按照指定协议推送到指定地址
* 调用本方法时必须保证始终在同一个线程,否则内部管道流关闭会导致出错。
*
* @param data 视频字节数据数据
* @param pushAddress 推流地址(协议://地址)
* @param protocol 协议,如 rtsp
*/
public static void pushVideo(byte[] data, String pushAddress, String protocol) {
try {
synchronized (outputStreamMap) {
BufferedInputStream inputStream = inputStreamMap.get(pushAddress);
BufferedOutputStream outputStream = outputStreamMap.get(pushAddress);
if (inputStream == null || outputStream == null) {
PipedInputStream pipedInputStream = new PipedInputStream();
inputStream = new BufferedInputStream(pipedInputStream);
outputStream = new BufferedOutputStream(new PipedOutputStream(pipedInputStream));
inputStreamMap.put(pushAddress, inputStream);
outputStreamMap.put(pushAddress, outputStream);
BufferedInputStream finalInputStream = inputStream;
Thread readerThread = new Thread(() -> {
try {
//开始视频推流
VideoPushTool.pushVideo(finalInputStream, pushAddress, protocol);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
readerThread.start();
}
outputStream.write(data);
}
} catch (IOException e) {
log.info("清理管道流|{}", pushAddress);
outputStreamMap.remove(pushAddress);
inputStreamMap.remove(pushAddress);
e.printStackTrace();
}
}
/**
* 按照指定协议将视频流数据推送到指定地址
* 本方法只需调用一次。
*
* @param inputStream 获取视频数据的输入流
* @param pushAddress 推流地址(协议://地址)
* @param pushPotocol 协议,如 rtsp
* @throws Exception
*/
public static synchronized void pushVideo(InputStream inputStream, String pushAddress, String pushPotocol) throws Exception {
FFmpegLogCallback.set();
FFmpegLogCallback.setLevel(AV_LOG_DEBUG);
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(inputStream, 0);
pushVideo4(pushAddress, pushPotocol, grabber);
}
/**
* 推送视频流
*
* @param sourceAddress 源视频流地址(支持文件路径|网络流地址)
* @param pushAddress 目标视频流地址(支持RTMP|RTSP)
* @param pushPotocol 目标视频流协议(支持RTMP|RTSP)
* @throws Exception
*/
public static synchronized void pushVideo(String sourceAddress, String pushAddress, String pushPotocol) throws Exception {
FFmpegLogCallback.set();
FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(sourceAddress);
pushVideo4(pushAddress, pushPotocol, grabber);
}
private static void pushVideo(String pushAddress, String pushPotocol, FFmpegFrameGrabber grabber) throws FrameRecorder.Exception, FrameGrabber.Exception {
long startTime = System.currentTimeMillis();
log.info("开始初始化帧抓取器");
// 增加超时参数
grabber.setOption("stimeout", "5000");
//grabber.setVideoOption("threads", "1");
// 初始化帧抓取器
grabber.start();
log.info("帧抓取器初始化完成,耗时[{}]毫秒", System.currentTimeMillis() - startTime);
// grabber.start方法中,初始化的解码器信息存在放在grabber的成员变量oc中
AVFormatContext avFormatContext = grabber.getFormatContext();
// 数据源含有几个媒体流(一般是视频流+音频流)
int streamNum = avFormatContext.nb_streams();
if (streamNum < 1) {
log.error("数据源中未找到媒体流!");
return;
}
int frameRate = (int) grabber.getVideoFrameRate();
if (0 == frameRate) {
log.info("【捕获器】视频帧率为0,采用默认帧率:{}", DEFAULT_VIDEO_FRAME_RATE);
frameRate = DEFAULT_VIDEO_FRAME_RATE;
}
log.info("【捕获器】视频帧率[{}],视频时长[{}]秒,媒体流数量[{}]",
frameRate,
avFormatContext.duration() / 1000000,
avFormatContext.nb_streams());
for (int i = 0; i < streamNum; i++) {
AVStream avStream = avFormatContext.streams(i);
AVCodecParameters avCodecParameters = avStream.codecpar();
log.info("【捕获器】流的索引[{}],编码器类型[{}],编码器ID[{}]", i, avCodecParameters.codec_type(), avCodecParameters.codec_id());
}
int frameWidth = grabber.getImageWidth();
int frameHeight = grabber.getImageHeight();
int audioChannels = grabber.getAudioChannels();
log.info("【捕获器】格式 [{}] | 视频宽度 [{}] | 视频高度 [{}] | 视频编码 [id:{}、名称:{}]| 视频码率 [{}]| 视频帧率 [{}]",
grabber.getFormat(),
frameWidth,
frameHeight,
grabber.getVideoCodec(),
grabber.getVideoCodecName(),
grabber.getVideoBitrate(),
grabber.getVideoFrameRate());
log.info("【捕获器】 音频编码 [{}] | 音频码率 [{}] | 音频通道数 [{}]",
grabber.getAudioCodecName(),
grabber.getAudioBitrate(),
audioChannels);
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(pushAddress,
frameWidth,
frameHeight,
audioChannels);
recorder.setInterleaved(true);
switch (pushPotocol) {
case "rtsp" -> {
recorder.setFormat("rtsp");
recorder.setVideoCodec(grabber.getVideoCodec());
}
case "rtmp" -> {
recorder.setAudioCodec(AV_CODEC_ID_AAC);
recorder.setVideoCodec(AV_CODEC_ID_H264);
recorder.setFormat("flv");
}
}
recorder.setFrameRate(frameRate);
// 使用原始视频的码率
recorder.setVideoBitrate(grabber.getVideoBitrate());
//不能直接用grabber的音频码率,因为如果为0的话会报错
//recorder.setAudioBitrate(grabber.getAudioBitrate());
recorder.setGopSize(frameRate * 2);
startTime = System.currentTimeMillis();
log.info("开始初始化帧抓取器");
avFormatContext.max_interleave_delta(0);
avFormatContext.flags(avformat.AVFMT_TS_NONSTRICT);
//recorder.setTimestamp(0);
log.info("no timestamp");
log.info("【录制器】格式 [{}] | 视频宽度 [{}] | 视频高度 [{}] | 视频编码 [{}]| 视频码率 [{}]",
recorder.getFormat(),
recorder.getImageWidth(),
recorder.getImageHeight(),
recorder.getVideoCodecName(),
recorder.getVideoBitrate());
log.info("【录制器】 | 音频编码 [{}] | 音频码率 [{}] | 音频通道数 [{}]",
recorder.getAudioCodecName(),
recorder.getAudioBitrate(),
recorder.getAudioChannels());
recorder.start();
log.info("帧录制初始化完成,耗时[{}]毫秒", System.currentTimeMillis() - startTime);
log.info("开始向推流({})", pushAddress);
Frame frame;
while ((frame = grabber.grabFrame()) != null) {
// 将帧写入记录器
recorder.record(frame);
log.info("record");
}
log.info("推送完成,耗时[{}]秒。({})",
(System.currentTimeMillis() - startTime) / 1000, pushAddress);
// 关闭帧录制器
// 关闭帧抓取器
recorder.close();
grabber.close();
log.info("清理缓存管道流|{}", pushAddress);
outputStreamMap.remove(pushAddress);
inputStreamMap.remove(pushAddress);
}
private static void pushVideo4(String pushAddress, String pushPotocol, FFmpegFrameGrabber grabber) throws FrameRecorder.Exception, FrameGrabber.Exception {
long startTime = System.currentTimeMillis();
log.info("开始初始化帧抓取器");
// 增加超时参数
grabber.setOption("stimeout", "5000");
grabber.setVideoOption("threads", "1");
// 初始化帧抓取器
grabber.start();
log.info("帧抓取器初始化完成,耗时[{}]毫秒", System.currentTimeMillis() - startTime);
// grabber.start方法中,初始化的解码器信息存在放在grabber的成员变量oc中
AVFormatContext avFormatContext = grabber.getFormatContext();
// 数据源含有几个媒体流(一般是视频流+音频流)
int streamNum = avFormatContext.nb_streams();
if (streamNum < 1) {
log.error("数据源中未找到媒体流!");
return;
}
int frameRate = (int) grabber.getVideoFrameRate();
if (0 == frameRate) {
log.info("【捕获器】视频帧率为0,采用默认帧率:{}", DEFAULT_VIDEO_FRAME_RATE);
frameRate = DEFAULT_VIDEO_FRAME_RATE;
}
log.info("【捕获器】视频帧率[{}],视频时长[{}]秒,媒体流数量[{}]",
frameRate,
avFormatContext.duration() / 1000000,
avFormatContext.nb_streams());
for (int i = 0; i < streamNum; i++) {
AVStream avStream = avFormatContext.streams(i);
AVCodecParameters avCodecParameters = avStream.codecpar();
log.info("【捕获器】流的索引[{}],编码器类型[{}],编码器ID[{}]", i, avCodecParameters.codec_type(), avCodecParameters.codec_id());
}
int frameWidth = grabber.getImageWidth();
int frameHeight = grabber.getImageHeight();
int audioChannels = grabber.getAudioChannels();
String grabberVideoCodecName = grabber.getVideoCodecName();
log.info("【捕获器】格式 [{}] | 视频宽度 [{}] | 视频高度 [{}] | 视频编码 [id:{}、名称:{}]| 视频码率 [{}]| 视频帧率 [{}]",
grabber.getFormat(),
frameWidth,
frameHeight,
grabber.getVideoCodec(),
grabberVideoCodecName,
grabber.getVideoBitrate(),
grabber.getVideoFrameRate());
log.info("【捕获器】 音频编码 [{}] | 音频码率 [{}] | 音频通道数 [{}]",
grabber.getAudioCodecName(),
grabber.getAudioBitrate(),
audioChannels);
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(pushAddress,
frameWidth,
frameHeight,
audioChannels);
recorder.setInterleaved(true);
recorder.setSampleRate(grabber.getSampleRate());
recorder.setVideoCodec(grabber.getVideoCodec());
switch (pushPotocol) {
case "rtsp" -> {
recorder.setFormat("rtsp");
}
case "rtmp" -> {
//flv只支持h264
if (!"h264".equals(grabberVideoCodecName)) {
recorder.setVideoCodec(AV_CODEC_ID_H264);
}
recorder.setAudioCodec(grabber.getAudioCodec());
recorder.setFormat("flv");
}
}
recorder.setFrameRate(frameRate);
// 使用原始视频的码率
recorder.setVideoBitrate(grabber.getVideoBitrate());
//不能直接用grabber的音频码率,因为如果为0的话会报错
//recorder.setAudioBitrate(grabber.getAudioBitrate());
//Group of Pictures,越小延迟越小,不过画质可能会降低,越大延迟越大,画质可能提升。
recorder.setGopSize(3);
startTime = System.currentTimeMillis();
log.info("开始初始化帧抓取器");
avFormatContext.max_interleave_delta(0);
avFormatContext.flags(avformat.AVFMT_TS_NONSTRICT);
//recorder.setTimestamp(0);
log.info("no timestamp");
log.info("【录制器】格式 [{}] | 视频宽度 [{}] | 视频高度 [{}] | 视频编码 [{}]| 视频码率 [{}]",
recorder.getFormat(),
recorder.getImageWidth(),
recorder.getImageHeight(),
recorder.getVideoCodecName(),
recorder.getVideoBitrate());
log.info("【录制器】 | 音频编码 [{}] | 音频码率 [{}] | 音频通道数 [{}]",
recorder.getAudioCodecName(),
recorder.getAudioBitrate(),
recorder.getAudioChannels());
recorder.start();
log.info("帧录制初始化完成,耗时[{}]毫秒", System.currentTimeMillis() - startTime);
log.info("开始向推流({})", pushAddress);
Frame frame;
int countFrames = 0;
long time1 = System.currentTimeMillis();
long countCosumedMs = 0;
long totalCosumedMs = 0;
while ((frame = grabber.grabFrame()) != null) {
// 将帧写入记录器
recorder.record(frame);
if (isVideoFrame(frame)) {
countFrames++;
}
if (countFrames == frameRate) {
countCosumedMs++;
long consumedMs = System.currentTimeMillis() - time1;
totalCosumedMs += consumedMs;
log.info("记录{}帧(视频),耗时:{} ms,第{}秒,平均耗时{}ms", countFrames, consumedMs, countCosumedMs, totalCosumedMs / countCosumedMs);
time1 = System.currentTimeMillis();
countFrames = 0;
}
}
log.info("推送完成,耗时[{}]秒。({})",
(System.currentTimeMillis() - startTime) / 1000, pushAddress);
// 关闭帧录制器
// 关闭帧抓取器
recorder.close();
grabber.close();
log.info("清理缓存管道流|{}", pushAddress);
outputStreamMap.remove(pushAddress);
inputStreamMap.remove(pushAddress);
}
public static boolean isVideoFrame(Frame frame) {
int channels = frame.imageChannels;
int sampleRate = frame.sampleRate;
return channels > 0 && sampleRate == 0;
}
}