Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Maximize FOV on video call #2297

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Maximize FOV on video call
1) Use 4:3 video aspect ratio to prevent cropping to 16:9
2) Disable EIS
3) Zoom out
ASerbinski committed Sep 20, 2024
commit 35a403e732b151a41b88d0d767d2f4ba2606240f
10 changes: 8 additions & 2 deletions app/src/main/java/com/nextcloud/talk/activities/CallActivity.kt
Original file line number Diff line number Diff line change
@@ -156,6 +156,7 @@ import org.webrtc.CameraVideoCapturer.CameraSwitchHandler
import org.webrtc.DefaultVideoDecoderFactory
import org.webrtc.DefaultVideoEncoderFactory
import org.webrtc.EglBase
import org.webrtc.ExtCamera2Enumerator
import org.webrtc.Logging
import org.webrtc.MediaConstraints
import org.webrtc.MediaStream
@@ -232,6 +233,9 @@ class CallActivity : CallBaseActivity() {

private val callTimeHandler = Handler(Looper.getMainLooper())

private var disableEIS = true
private var zoomOut = true

// push to talk
private var isPushToTalkActive = false
private var pulseAnimation: PulseAnimation? = null
@@ -708,7 +712,9 @@ class CallActivity : CallBaseActivity() {
} catch (t: Throwable) {
Log.w(TAG, "Camera2Enumerator threw an error", t)
}
cameraEnumerator = if (camera2EnumeratorIsSupported) {
cameraEnumerator = if (camera2EnumeratorIsSupported && (disableEIS || zoomOut)){
ExtCamera2Enumerator(this, disableEIS, zoomOut)
} else if (camera2EnumeratorIsSupported) {
Camera2Enumerator(this)
} else {
Camera1Enumerator(WebRTCUtils.shouldEnableVideoHardwareAcceleration())
@@ -2071,7 +2077,7 @@ class CallActivity : CallBaseActivity() {

private fun startVideoCapture() {
if (videoCapturer != null) {
videoCapturer!!.startCapture(1280, 720, 30)
videoCapturer!!.startCapture(1280, 960, 30)
}
}

42 changes: 42 additions & 0 deletions app/src/main/java/org/webrtc/ExtCamera2Capturer.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package org.webrtc;

import android.content.Context;
import android.hardware.camera2.CameraManager;

import org.jetbrains.annotations.Nullable;

public class ExtCamera2Capturer extends Camera2Capturer {

@Nullable private final CameraManager cameraManager;
private final boolean disableEIS, zoomOut;

public ExtCamera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler,
boolean disableEIS, boolean zoomOut) {
super(context, cameraName, eventsHandler);
cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
this.disableEIS = disableEIS;
this.zoomOut = zoomOut;
}

@Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
int framerate) {

CameraSession.CreateSessionCallback myCallback = new CameraSession.CreateSessionCallback() {
@Override
public void onDone(CameraSession cameraSession) {
createSessionCallback.onDone(cameraSession);
}

@Override
public void onFailure(CameraSession.FailureType failureType, String s) {
createSessionCallback.onFailure(failureType, s);
}
};

ExtCamera2Session.create(myCallback, events, applicationContext, cameraManager,
surfaceTextureHelper, cameraName, width, height, framerate, disableEIS, zoomOut);
}
}
27 changes: 27 additions & 0 deletions app/src/main/java/org/webrtc/ExtCamera2Enumerator.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
package org.webrtc;

import android.content.Context;
import android.hardware.camera2.CameraManager;

import org.jetbrains.annotations.Nullable;

public class ExtCamera2Enumerator extends Camera2Enumerator {

final Context context;
@Nullable final CameraManager cameraManager;
private final boolean disableEIS, zoomOut;

public ExtCamera2Enumerator(Context context, boolean disableEIS, boolean zoomOut) {
super(context);
this.context = context;
this.disableEIS = disableEIS;
this.zoomOut = zoomOut;
this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
}

@Override
public CameraVideoCapturer createCapturer(String deviceName,
CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new ExtCamera2Capturer(context, deviceName, eventsHandler, disableEIS, zoomOut);
}
}
375 changes: 375 additions & 0 deletions app/src/main/java/org/webrtc/ExtCamera2Session.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,375 @@
package org.webrtc;
import android.content.Context;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.os.Build;
import android.os.Handler;
import android.util.Range;
import android.view.Surface;
import androidx.annotation.Nullable;

import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;

class ExtCamera2Session implements CameraSession {
private static final String TAG = "Camera2Session";
private static final Histogram camera2StartTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
private static final Histogram camera2StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
"WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
private static enum SessionState { RUNNING, STOPPED }
private final Handler cameraThreadHandler;
private final CreateSessionCallback callback;
private final Events events;
private final Context applicationContext;
private final CameraManager cameraManager;
private final SurfaceTextureHelper surfaceTextureHelper;
private final String cameraId;
private final int width;
private final int height;
private final int framerate;
// Initialized at start
private CameraCharacteristics cameraCharacteristics;
private int cameraOrientation;
private boolean isCameraFrontFacing;
private int fpsUnitFactor;
private boolean isEISDisabled = false;
private boolean isZoomedOutMax = false;
private CaptureFormat captureFormat;
// Initialized when camera opens
@Nullable private CameraDevice cameraDevice;
@Nullable private Surface surface;
// Initialized when capture session is created
@Nullable private CameraCaptureSession captureSession;
// State
private SessionState state = SessionState.RUNNING;
private boolean firstFrameReported;
// Used only for stats. Only used on the camera thread.
private final long constructionTimeNs; // Construction time of this class.
private class CameraStateCallback extends CameraDevice.StateCallback {
private String getErrorDescription(int errorCode) {
switch (errorCode) {
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
return "Camera device has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
return "Camera device could not be opened due to a device policy.";
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
return "Camera device is in use already.";
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
return "Camera service has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
return "Camera device could not be opened because"
+ " there are too many other open camera devices.";
default:
return "Unknown camera error: " + errorCode;
}
}
@Override
public void onDisconnected(CameraDevice camera) {
checkIsOnCameraThread();
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
state = SessionState.STOPPED;
stopInternal();
if (startFailure) {
callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
} else {
events.onCameraDisconnected(ExtCamera2Session.this);
}
}
@Override
public void onError(CameraDevice camera, int errorCode) {
checkIsOnCameraThread();
reportError(getErrorDescription(errorCode));
}
@Override
public void onOpened(CameraDevice camera) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera opened.");
cameraDevice = camera;
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
try {
camera.createCaptureSession(
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to create capture session. " + e);
return;
}
}
@Override
public void onClosed(CameraDevice camera) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera device closed.");
events.onCameraClosed(ExtCamera2Session.this);
}
}
private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
@Override
public void onConfigureFailed(CameraCaptureSession session) {
checkIsOnCameraThread();
session.close();
reportError("Failed to configure capture session.");
}
@Override
public void onConfigured(CameraCaptureSession session) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera capture session configured.");
captureSession = session;
try {
/*
* The viable options for video capture requests are:
* TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
* post-processing.
* TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
* quality.
*/
final CaptureRequest.Builder captureRequestBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// Set auto exposure fps range.
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
captureFormat.framerate.max / fpsUnitFactor));
captureRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
chooseStabilizationMode(captureRequestBuilder);
setMinZoomRatio(captureRequestBuilder);
chooseFocusMode(captureRequestBuilder);
captureRequestBuilder.addTarget(surface);
session.setRepeatingRequest(
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to start capture request. " + e);
return;
}
surfaceTextureHelper.startListening((VideoFrame frame) -> {
checkIsOnCameraThread();
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
return;
}
if (!firstFrameReported) {
firstFrameReported = true;
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera2StartTimeMsHistogram.addSample(startTimeMs);
}
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
// Also, undo camera orientation, we report it as rotation instead.
final VideoFrame modifiedFrame =
new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix(
(TextureBufferImpl) frame.getBuffer(),
/* mirror= */ isCameraFrontFacing,
/* rotation= */ -cameraOrientation),
/* rotation= */ getFrameOrientation(), frame.getTimestampNs());
events.onFrameCaptured(ExtCamera2Session.this, modifiedFrame);
modifiedFrame.release();
});
Logging.d(TAG, "Camera device successfully started.");
callback.onDone(ExtCamera2Session.this);
}
private void setMinZoomRatio(CaptureRequest.Builder captureRequestBuilder) {
if (isZoomedOutMax && Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
captureRequestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, (float) 0.5);
}
}
// Prefers optical stabilization over software stabilization if available. Only enables one of
// the stabilization modes at a time because having both enabled can cause strange results.
private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableOpticalStabilization = cameraCharacteristics.get(
CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
if (availableOpticalStabilization != null) {
for (int mode : availableOpticalStabilization) {
if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
Logging.d(TAG, "Using optical stabilization.");
return;
}
}
}
// If no optical mode is available, try software.
final int[] availableVideoStabilization = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
if (availableVideoStabilization != null) {
for (int mode : availableVideoStabilization) {
if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
isEISDisabled ?
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF :
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
Logging.d(TAG, "Using video stabilization.");
return;
}
}
}
Logging.d(TAG, "Stabilization not available.");
}
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableFocusModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
for (int mode : availableFocusModes) {
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
captureRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
Logging.d(TAG, "Using continuous video auto-focus.");
return;
}
}
Logging.d(TAG, "Auto-focus is not available.");
}
}
private static class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
@Override
public void onCaptureFailed(
CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
Logging.d(TAG, "Capture failed: " + failure);
}
}
public static void create(CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
int framerate, boolean disableEIS, boolean zoomOut) {
new ExtCamera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
cameraId, width, height, framerate, disableEIS, zoomOut);
}
private ExtCamera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
int width, int height, int framerate, boolean disableEIS, boolean zoomOut) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
constructionTimeNs = System.nanoTime();
this.cameraThreadHandler = new Handler();
this.callback = callback;
this.events = events;
this.applicationContext = applicationContext;
this.cameraManager = cameraManager;
this.surfaceTextureHelper = surfaceTextureHelper;
this.cameraId = cameraId;
this.width = width;
this.height = height;
this.framerate = framerate;
this.isEISDisabled = disableEIS;
this.isZoomedOutMax = zoomOut;
start();
}
private void start() {
checkIsOnCameraThread();
Logging.d(TAG, "start");
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (CameraAccessException | IllegalArgumentException e) {
reportError("getCameraCharacteristics(): " + e.getMessage());
return;
}
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_FRONT;
findCaptureFormat();
if (captureFormat == null) {
// findCaptureFormat reports an error already.
return;
}
openCamera();
}
private void findCaptureFormat() {
checkIsOnCameraThread();
Range<Integer>[] fpsRanges =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
List<CaptureFormat.FramerateRange> framerateRanges =
Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
Logging.d(TAG, "Available preview sizes: " + sizes);
Logging.d(TAG, "Available fps ranges: " + framerateRanges);
if (framerateRanges.isEmpty() || sizes.isEmpty()) {
reportError("No supported capture formats.");
return;
}
final CaptureFormat.FramerateRange bestFpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
Logging.d(TAG, "Using capture format: " + captureFormat);
}
private void openCamera() {
checkIsOnCameraThread();
Logging.d(TAG, "Opening camera " + cameraId);
events.onCameraOpening();
try {
cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
} catch (CameraAccessException | IllegalArgumentException | SecurityException e) {
reportError("Failed to open camera: " + e);
return;
}
}
@Override
public void stop() {
Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
checkIsOnCameraThread();
if (state != SessionState.STOPPED) {
final long stopStartTime = System.nanoTime();
state = SessionState.STOPPED;
stopInternal();
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera2StopTimeMsHistogram.addSample(stopTimeMs);
}
}
private void stopInternal() {
Logging.d(TAG, "Stop internal");
checkIsOnCameraThread();
surfaceTextureHelper.stopListening();
if (captureSession != null) {
captureSession.close();
captureSession = null;
}
if (surface != null) {
surface.release();
surface = null;
}
if (cameraDevice != null) {
cameraDevice.close();
cameraDevice = null;
}
Logging.d(TAG, "Stop done");
}
private void reportError(String error) {
checkIsOnCameraThread();
Logging.e(TAG, "Error: " + error);
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
state = SessionState.STOPPED;
stopInternal();
if (startFailure) {
callback.onFailure(FailureType.ERROR, error);
} else {
events.onCameraError(this, error);
}
}
private int getFrameOrientation() {
int rotation = CameraSession.getDeviceOrientation(applicationContext);
if (!isCameraFrontFacing) {
rotation = 360 - rotation;
}
return (cameraOrientation + rotation) % 360;
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}
4 changes: 2 additions & 2 deletions app/src/main/res/values/dimens.xml
Original file line number Diff line number Diff line change
@@ -54,8 +54,8 @@
<dimen name="dialog_padding">24dp</dimen>
<dimen name="dialog_padding_top_bottom">18dp</dimen>

<dimen name="call_self_video_long_side_length">150dp</dimen>
<dimen name="call_self_video_short_side_length">80dp</dimen>
<dimen name="call_self_video_long_side_length">160dp</dimen>
<dimen name="call_self_video_short_side_length">120dp</dimen>
<dimen name="call_grid_item_min_height">180dp</dimen>
<dimen name="call_controls_height">110dp</dimen>
<dimen name="call_participant_progress_bar_size">48dp</dimen>