Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Don't wait video frame for audio only mcu #55

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 41 additions & 28 deletions FilterPlugin/src/main/java/io/antmedia/filter/FilterAdaptor.java
Original file line number Diff line number Diff line change
Expand Up @@ -333,18 +333,7 @@ public Result update()
* Set the listener of video filter graph. FilterGrapah calls the listener for the filtered output frame
*/
videoFilterGraph.setListener((streamId, frame)->{
if(frame != null && currentOutStreams.containsKey(streamId)) {
IFrameListener frameListener = currentOutStreams.get(streamId);
if(frameListener != null) {
if(!firstVideoReceived) {
firstVideoReceived = true;
}
//rescale the pts if the filter timebase is different
frame.pts(av_rescale_q_rnd(frame.pts(), videoSinkFiltersMap.get(streamId).getFilterContext().inputs(0).time_base(), Utils.TIME_BASE_FOR_MS, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
//framelistener is a custombroadcast
frameListener.onVideoFrame(streamId, frame);
}
}
onFilteredVideoFrame(videoSinkFiltersMap, streamId, frame);
});

if(prevVideoFilterGraph != null) {
Expand All @@ -368,22 +357,7 @@ public Result update()
return result;
}
audioFilterGraph.setCurrentPts(currentAudioPts);
audioFilterGraph.setListener((streamId, frame)->{
if(frame != null && currentOutStreams.containsKey(streamId)) {
IFrameListener frameListener = currentOutStreams.get(streamId);
if(frameListener != null) {
if(!firstVideoReceived) {
audioVideoOffset = frame.pts();
}
else {
frame.pts(frame.pts()-audioVideoOffset);

//framelistener is a custombroadcast
frameListener.onAudioFrame(streamId, frame);
}
}
}
});
audioFilterGraph.setListener(this::onFilteredAudioFrame);

if(prevAudioFilterGraph != null) {
prevAudioFilterGraph.close();
Expand All @@ -394,6 +368,42 @@ public Result update()
return result;
}

public void onFilteredVideoFrame(Map<String, Filter> videoSinkFiltersMap, String streamId, AVFrame frame) {
if(frame != null && currentOutStreams.containsKey(streamId)) {
IFrameListener frameListener = currentOutStreams.get(streamId);
if(frameListener != null) {
if(!firstVideoReceived) {
firstVideoReceived = true;
}
//rescale the pts if the filter timebase is different
frame.pts(av_rescale_q_rnd(frame.pts(), getFilterTimebase(videoSinkFiltersMap, streamId), Utils.TIME_BASE_FOR_MS, AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
//framelistener is a custombroadcast
frameListener.onVideoFrame(streamId, frame);
}
}
}

public AVRational getFilterTimebase(Map<String, Filter> videoSinkFiltersMap, String streamId) {
return videoSinkFiltersMap.get(streamId).getFilterContext().inputs(0).time_base();
}

public void onFilteredAudioFrame(String streamId, AVFrame frame) {
if(frame != null && currentOutStreams.containsKey(streamId)) {
IFrameListener frameListener = currentOutStreams.get(streamId);
if(frameListener != null) {
if(filterConfiguration.isVideoEnabled() && !firstVideoReceived) {
audioVideoOffset = frame.pts();
}
else {
frame.pts(frame.pts()-audioVideoOffset);

//framelistener is a custombroadcast
frameListener.onAudioFrame(streamId, frame);
}
}
}
}

/*
* This method is used for the creation and also for the update of the filter
* For example new inputs mat be added as an update
Expand Down Expand Up @@ -604,4 +614,7 @@ public void setFilterConfiguration(FilterConfiguration filterConfiguration) {
this.filterConfiguration = filterConfiguration;
}

public Map<String, IFrameListener> getCurrentOutStreams() {
return currentOutStreams;
}
}
109 changes: 70 additions & 39 deletions FilterPlugin/src/test/java/io/antmedia/test/FilterAdaptorUnitTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,24 @@
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.*;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;

import io.antmedia.filter.utils.Filter;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.bytedeco.ffmpeg.avcodec.AVCodecParameters;
import org.bytedeco.ffmpeg.avfilter.AVFilterContext;
import org.bytedeco.ffmpeg.avutil.AVChannelLayout;
import org.bytedeco.ffmpeg.avutil.AVFrame;
import org.bytedeco.ffmpeg.avutil.AVRational;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacpp.PointerPointer;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
Expand All @@ -47,7 +45,7 @@
import io.vertx.core.Vertx;

public class FilterAdaptorUnitTest {

@Rule
public TestRule watcher = new TestWatcher() {
protected void starting(Description description) {
Expand All @@ -62,7 +60,7 @@ protected void finished(Description description) {
System.out.println("Finishing test: " + description.getMethodName());
};
};

private static Vertx vertx;

@AfterClass
Expand All @@ -75,7 +73,7 @@ public static void beforeClass() {
vertx = Vertx.vertx();
}


@Test
public void testFilterGraphVideoFeed() {
FilterAdaptor filterAdaptor = spy(new FilterAdaptor(RandomStringUtils.randomAlphanumeric(12), false));
Expand All @@ -87,10 +85,10 @@ public void testFilterGraphVideoFeed() {

AntMediaApplicationAdapter app = mock(AntMediaApplicationAdapter.class);
when(app.getVertx()).thenReturn(vertx);

filterAdaptor.createOrUpdateFilter(filterConf, app);


String streamId = "stream"+RandomUtils.nextInt(0, 10000);
AVFrame frame = new AVFrame();
FilterGraph filterGraph = mock(FilterGraph.class);
Expand All @@ -114,12 +112,12 @@ public void testFilterGraphVideoFeed() {
vsi.setCodecParameters(mock(AVCodecParameters.class));
vsi.setTimeBase(Utils.TIME_BASE_FOR_MS);
filterAdaptor.setVideoStreamInfo(streamId, vsi);

filterAdaptor.onVideoFrame(streamId, frame);
verify(filterGraph, timeout(3000)).doFilter(eq(streamId), any(), Mockito.anyBoolean());
}


@Test
public void testFilterGraphAudioFeed() {
FilterAdaptor filterAdaptor = spy(new FilterAdaptor(RandomStringUtils.randomAlphanumeric(12), false));
Expand All @@ -130,10 +128,10 @@ public void testFilterGraphAudioFeed() {

AntMediaApplicationAdapter app = mock(AntMediaApplicationAdapter.class);
when(app.getVertx()).thenReturn(vertx);

filterAdaptor.createOrUpdateFilter(filterConf, app);


String streamId = "stream"+RandomUtils.nextInt(0, 10000);
AVFrame frame = new AVFrame();
FilterGraph filterGraph = mock(FilterGraph.class);
Expand All @@ -156,29 +154,29 @@ public void testFilterGraphAudioFeed() {
filterAdaptor.onAudioFrame(streamId, frame);
verify(filterGraph, timeout(3000)).doFilter(eq(streamId), any(), anyBoolean());
}

@Test
public void testVideoAudioFiltering() {
testFiltering(true, "[in0][in1][in2]vstack=inputs=3[out0]", true, "[in0][in1][in2]amix=inputs=3[out0]");
}

//use some of the inputs in the filter
@Test
@Test
public void testPartialVideoAudioFiltering() {
testFiltering(true, "[in0][in2]vstack=inputs=2[out0]", true, "[in0][in2]amix=inputs=2[out0]");
}

@Test
public void testVideoOnlyFiltering() {
testFiltering(true, "[in0][in1][in2]vstack=inputs=3[out0]", false, "dummy");
}

@Test
public void testAudioOnlyFiltering() {
testFiltering(false, "dummy", true, "[in0][in1][in2]amix=inputs=3[out0]");
}


public void testFiltering(boolean videoEnabled, String videoFilter, boolean audioEnabled, String audioFilter) {
FilterAdaptor filterAdaptor = spy(new FilterAdaptor(RandomStringUtils.randomAlphanumeric(12), false));
AntMediaApplicationAdapter app = mock(AntMediaApplicationAdapter.class);
Expand All @@ -187,7 +185,7 @@ public void testFiltering(boolean videoEnabled, String videoFilter, boolean audi
String stream1 = "inStream1";
String stream2 = "inStream2";
String stream3 = "inStream3";

String output1 = "outStream1";


Expand All @@ -199,39 +197,39 @@ public void testFiltering(boolean videoEnabled, String videoFilter, boolean audi
StreamParametersInfo asi2 = getStreamInfo();
StreamParametersInfo asi3 = getStreamInfo();


filterAdaptor.setVideoStreamInfo(stream1, vsi1);
filterAdaptor.setAudioStreamInfo(stream1, asi1);

filterAdaptor.setVideoStreamInfo(stream2, vsi2);
filterAdaptor.setAudioStreamInfo(stream2, asi2);

filterAdaptor.setVideoStreamInfo(stream3, vsi3);
filterAdaptor.setAudioStreamInfo(stream3, asi3);

FilterConfiguration conf = new FilterConfiguration();
conf.setVideoEnabled(videoEnabled);
conf.setAudioEnabled(audioEnabled);
conf.setVideoFilter(videoFilter);
conf.setAudioFilter(audioFilter);
conf.setInputStreams(Arrays.asList(stream1, stream2, stream3));
conf.setOutputStreams(Arrays.asList(output1));

assertTrue(filterAdaptor.createOrUpdateFilter(conf, app).isSuccess());

filterAdaptor.close(app);

//increase coverage and checking not throwing exception
filterAdaptor.close(app);

}

public StreamParametersInfo getStreamInfo() {
StreamParametersInfo si = new StreamParametersInfo();
AVCodecParameters codecParams = mock(AVCodecParameters.class);
when(codecParams.height()).thenReturn(360);
when(codecParams.width()).thenReturn(640);

AVChannelLayout channelLayout = new AVChannelLayout();
avutil.av_channel_layout_default(channelLayout, 2);
when(codecParams.ch_layout()).thenReturn(channelLayout);
Expand All @@ -242,16 +240,16 @@ public StreamParametersInfo getStreamInfo() {
when(tb.num()).thenReturn(1);
when(tb.den()).thenReturn(1000);


si.setEnabled(true);
si.setCodecParameters(codecParams);


si.setTimeBase(tb);

return si;
}

/*
* In synchronous mode output frame pts should be the same with input,
* because we apply filter on going original stream without creating a new stream.
Expand Down Expand Up @@ -311,4 +309,37 @@ public void testTimeBaseInSyncMode() {
assertEquals(filteredFrame.pts(), frame.pts());
}
}

@Test
public void testOnFilteredAudioFrame() {
onFilteredAudioFrameTest(true);
onFilteredAudioFrameTest(false);
}

public void onFilteredAudioFrameTest(boolean videoEnabled) {

FilterAdaptor filterAdaptor = spy(new FilterAdaptor(RandomStringUtils.randomAlphanumeric(12), false));
String streamId = "stream"+RandomUtils.nextInt(0, 10000);
IFrameListener frameListener = mock(IFrameListener.class);
filterAdaptor.getCurrentOutStreams().put(streamId, frameListener);
AVFrame frame = mock(AVFrame.class);
when(frame.pts(anyLong())).thenReturn(frame);

FilterConfiguration filterConfiguration = new FilterConfiguration();
filterConfiguration.setAudioEnabled(true);
filterConfiguration.setVideoEnabled(videoEnabled);
filterAdaptor.setFilterConfiguration(filterConfiguration);

Map<String, Filter> videoSinkFiltersMap = new HashMap<>();
Filter filter = mock(Filter.class);
doReturn(Utils.TIME_BASE_FOR_MS).when(filterAdaptor).getFilterTimebase(videoSinkFiltersMap, streamId);
videoSinkFiltersMap.put(streamId, filter);
for (int i = 0; i < 20; i++) {
if(i==10) {
filterAdaptor.onFilteredVideoFrame(videoSinkFiltersMap, streamId, frame);
}
filterAdaptor.onFilteredAudioFrame(streamId, frame);
}
verify(frameListener, times(videoEnabled?10:20)).onAudioFrame(streamId, frame);
}
}