aboutsummaryrefslogtreecommitdiff
path: root/talk/app/webrtc/java/android/org/webrtc
diff options
context:
space:
mode:
Diffstat (limited to 'talk/app/webrtc/java/android/org/webrtc')
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java3
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java75
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java13
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/EglBase.java288
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/EglBase10.java299
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/EglBase14.java254
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java146
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java22
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/RendererCommon.java74
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java283
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java281
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java51
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java395
-rw-r--r--talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java81
14 files changed, 1484 insertions, 781 deletions
diff --git a/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java b/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java
index 097d1cd906..3444529596 100644
--- a/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java
+++ b/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java
@@ -27,7 +27,9 @@
package org.webrtc;
+import android.annotation.TargetApi;
import android.content.Context;
+
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
@@ -45,6 +47,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
+@TargetApi(21)
public class Camera2Enumerator implements CameraEnumerationAndroid.Enumerator {
private final static String TAG = "Camera2Enumerator";
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
diff --git a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java
index 3e37f6afdc..5f68c3759e 100644
--- a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java
+++ b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java
@@ -29,7 +29,6 @@ package org.webrtc;
import static java.lang.Math.abs;
import static java.lang.Math.ceil;
-import android.hardware.Camera;
import android.graphics.ImageFormat;
import org.json.JSONArray;
@@ -72,7 +71,7 @@ public class CameraEnumerationAndroid {
// other image formats then this needs to be updated and
// VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
// all imageFormats.
- public final int imageFormat = ImageFormat.YV12;
+ public final int imageFormat = ImageFormat.NV21;
public CaptureFormat(int width, int height, int minFramerate,
int maxFramerate) {
@@ -88,25 +87,15 @@ public class CameraEnumerationAndroid {
}
// Calculates the frame size of the specified image format. Currently only
- // supporting ImageFormat.YV12. The YV12's stride is the closest rounded up
- // multiple of 16 of the width and width and height are always even.
- // Android guarantees this:
- // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
+ // supporting ImageFormat.NV21.
+ // The size is width * height * number of bytes per pixel.
+ // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
public static int frameSize(int width, int height, int imageFormat) {
- if (imageFormat != ImageFormat.YV12) {
+ if (imageFormat != ImageFormat.NV21) {
throw new UnsupportedOperationException("Don't know how to calculate "
- + "the frame size of non-YV12 image formats.");
+ + "the frame size of non-NV21 image formats.");
}
- int yStride = roundUp(width, 16);
- int uvStride = roundUp(yStride / 2, 16);
- int ySize = yStride * height;
- int uvSize = uvStride * height / 2;
- return ySize + uvSize * 2;
- }
-
- // Rounds up |x| to the closest value that is a multiple of |alignment|.
- private static int roundUp(int x, int alignment) {
- return (int)ceil(x / (double)alignment) * alignment;
+ return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
}
@Override
@@ -114,21 +103,19 @@ public class CameraEnumerationAndroid {
return width + "x" + height + "@[" + minFramerate + ":" + maxFramerate + "]";
}
- @Override
- public boolean equals(Object that) {
- if (!(that instanceof CaptureFormat)) {
+ public boolean isSameFormat(final CaptureFormat that) {
+ if (that == null) {
return false;
}
- final CaptureFormat c = (CaptureFormat) that;
- return width == c.width && height == c.height && maxFramerate == c.maxFramerate
- && minFramerate == c.minFramerate;
+ return width == that.width && height == that.height && maxFramerate == that.maxFramerate
+ && minFramerate == that.minFramerate;
}
}
// Returns device names that can be used to create a new VideoCapturerAndroid.
public static String[] getDeviceNames() {
- String[] names = new String[Camera.getNumberOfCameras()];
- for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ String[] names = new String[android.hardware.Camera.getNumberOfCameras()];
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
names[i] = getDeviceName(i);
}
return names;
@@ -136,22 +123,22 @@ public class CameraEnumerationAndroid {
// Returns number of cameras on device.
public static int getDeviceCount() {
- return Camera.getNumberOfCameras();
+ return android.hardware.Camera.getNumberOfCameras();
}
// Returns the name of the camera with camera index. Returns null if the
// camera can not be used.
public static String getDeviceName(int index) {
- Camera.CameraInfo info = new Camera.CameraInfo();
+ android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
try {
- Camera.getCameraInfo(index, info);
+ android.hardware.Camera.getCameraInfo(index, info);
} catch (Exception e) {
Logging.e(TAG, "getCameraInfo failed on index " + index,e);
return null;
}
String facing =
- (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+ (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
return "Camera " + index + ", Facing " + facing
+ ", Orientation " + info.orientation;
}
@@ -159,13 +146,13 @@ public class CameraEnumerationAndroid {
// Returns the name of the front facing camera. Returns null if the
// camera can not be used or does not exist.
public static String getNameOfFrontFacingDevice() {
- return getNameOfDevice(Camera.CameraInfo.CAMERA_FACING_FRONT);
+ return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}
// Returns the name of the back facing camera. Returns null if the
// camera can not be used or does not exist.
public static String getNameOfBackFacingDevice() {
- return getNameOfDevice(Camera.CameraInfo.CAMERA_FACING_BACK);
+ return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
}
public static String getSupportedFormatsAsJson(int id) throws JSONException {
@@ -194,7 +181,8 @@ public class CameraEnumerationAndroid {
}
}
- public static int[] getFramerateRange(Camera.Parameters parameters, final int framerate) {
+ public static int[] getFramerateRange(android.hardware.Camera.Parameters parameters,
+ final int framerate) {
List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
if (listFpsRange.isEmpty()) {
Logging.w(TAG, "No supported preview fps range");
@@ -203,27 +191,30 @@ public class CameraEnumerationAndroid {
return Collections.min(listFpsRange,
new ClosestComparator<int[]>() {
@Override int diff(int[] range) {
- return abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX])
- + abs(framerate - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ final int maxFpsWeight = 10;
+ return range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX]
+ + maxFpsWeight * abs(framerate
+ - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
}
});
}
- public static Camera.Size getClosestSupportedSize(
- List<Camera.Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
+ public static android.hardware.Camera.Size getClosestSupportedSize(
+ List<android.hardware.Camera.Size> supportedSizes, final int requestedWidth,
+ final int requestedHeight) {
return Collections.min(supportedSizes,
- new ClosestComparator<Camera.Size>() {
- @Override int diff(Camera.Size size) {
+ new ClosestComparator<android.hardware.Camera.Size>() {
+ @Override int diff(android.hardware.Camera.Size size) {
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
}
});
}
private static String getNameOfDevice(int facing) {
- final Camera.CameraInfo info = new Camera.CameraInfo();
- for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
try {
- Camera.getCameraInfo(i, info);
+ android.hardware.Camera.getCameraInfo(i, info);
if (info.facing == facing) {
return getDeviceName(i);
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java
index 2f35dc3493..54469cc341 100644
--- a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java
+++ b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java
@@ -27,7 +27,6 @@
package org.webrtc;
-import android.hardware.Camera;
import android.os.SystemClock;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@@ -60,11 +59,11 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
private List<CaptureFormat> enumerateFormats(int cameraId) {
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
final long startTimeMs = SystemClock.elapsedRealtime();
- final Camera.Parameters parameters;
- Camera camera = null;
+ final android.hardware.Camera.Parameters parameters;
+ android.hardware.Camera camera = null;
try {
Logging.d(TAG, "Opening camera with index " + cameraId);
- camera = Camera.open(cameraId);
+ camera = android.hardware.Camera.open(cameraId);
parameters = camera.getParameters();
} catch (RuntimeException e) {
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
@@ -84,10 +83,10 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
// corresponding to the highest fps.
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
- minFps = range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
- maxFps = range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
}
- for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
+ for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
}
} catch (Exception e) {
diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase.java b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
index 2ee36882e8..035645bdd1 100644
--- a/talk/app/webrtc/java/android/org/webrtc/EglBase.java
+++ b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
@@ -28,244 +28,108 @@
package org.webrtc;
import android.graphics.SurfaceTexture;
-import android.view.SurfaceHolder;
-
-import org.webrtc.Logging;
+import android.view.Surface;
import javax.microedition.khronos.egl.EGL10;
-import javax.microedition.khronos.egl.EGLConfig;
-import javax.microedition.khronos.egl.EGLContext;
-import javax.microedition.khronos.egl.EGLDisplay;
-import javax.microedition.khronos.egl.EGLSurface;
+
/**
- * Holds EGL state and utility methods for handling an EGLContext, an EGLDisplay, and an EGLSurface.
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
*/
-public final class EglBase {
- private static final String TAG = "EglBase";
+public abstract class EglBase {
+ // EGL wrapper for an actual EGLContext.
+ public static class Context {
+ }
+
// These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
// https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
// This is similar to how GlSurfaceView does:
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
private static final int EGL_OPENGL_ES2_BIT = 4;
- private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
// Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
- private final EGL10 egl;
- private EGLContext eglContext;
- private ConfigType configType;
- private EGLConfig eglConfig;
- private EGLDisplay eglDisplay;
- private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
-
- // EGLConfig constructor type. Influences eglChooseConfig arguments.
- public static enum ConfigType {
- // No special parameters.
- PLAIN,
- // Configures with EGL_SURFACE_TYPE = EGL_PBUFFER_BIT.
- PIXEL_BUFFER,
- // Configures with EGL_RECORDABLE_ANDROID = 1.
- // Discourages EGL from using pixel formats that cannot efficiently be
- // converted to something usable by the video encoder.
- RECORDABLE
- }
-
- // Create root context without any EGLSurface or parent EGLContext. This can be used for branching
- // new contexts that share data.
- public EglBase() {
- this(EGL10.EGL_NO_CONTEXT, ConfigType.PLAIN);
- }
-
- // Create a new context with the specified config type, sharing data with sharedContext.
- public EglBase(EGLContext sharedContext, ConfigType configType) {
- this.egl = (EGL10) EGLContext.getEGL();
- this.configType = configType;
- eglDisplay = getEglDisplay();
- eglConfig = getEglConfig(eglDisplay, configType);
- eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
- }
-
- // Create EGLSurface from the Android SurfaceHolder.
- public void createSurface(SurfaceHolder surfaceHolder) {
- createSurfaceInternal(surfaceHolder);
- }
+ public static final int[] CONFIG_PLAIN = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_RGBA = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_PIXEL_BUFFER = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_RECORDABLE = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL10.EGL_NONE
+ };
+
+ // Create a new context with the specified config attributes, sharing data with sharedContext.
+ // |sharedContext| can be null.
+ public static EglBase create(Context sharedContext, int[] configAttributes) {
+ return (EglBase14.isEGL14Supported()
+ && (sharedContext == null || sharedContext instanceof EglBase14.Context))
+ ? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
+ : new EglBase10((EglBase10.Context) sharedContext, configAttributes);
+ }
+
+ public static EglBase create() {
+ return create(null, CONFIG_PLAIN);
+ }
+
+ public abstract void createSurface(Surface surface);
// Create EGLSurface from the Android SurfaceTexture.
- public void createSurface(SurfaceTexture surfaceTexture) {
- createSurfaceInternal(surfaceTexture);
- }
-
- // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
- private void createSurfaceInternal(Object nativeWindow) {
- if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
- throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
- }
- checkIsNotReleased();
- if (configType == ConfigType.PIXEL_BUFFER) {
- Logging.w(TAG, "This EGL context is configured for PIXEL_BUFFER, but uses regular Surface");
- }
- if (eglSurface != EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("Already has an EGLSurface");
- }
- int[] surfaceAttribs = {EGL10.EGL_NONE};
- eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
- if (eglSurface == EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("Failed to create window surface");
- }
- }
+ public abstract void createSurface(SurfaceTexture surfaceTexture);
// Create dummy 1x1 pixel buffer surface so the context can be made current.
- public void createDummyPbufferSurface() {
- createPbufferSurface(1, 1);
- }
-
- public void createPbufferSurface(int width, int height) {
- checkIsNotReleased();
- if (configType != ConfigType.PIXEL_BUFFER) {
- throw new RuntimeException(
- "This EGL context is not configured to use a pixel buffer: " + configType);
- }
- if (eglSurface != EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("Already has an EGLSurface");
- }
- int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
- eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
- if (eglSurface == EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("Failed to create pixel buffer surface");
- }
- }
+ public abstract void createDummyPbufferSurface();
- public EGLContext getContext() {
- return eglContext;
- }
+ public abstract void createPbufferSurface(int width, int height);
- public boolean hasSurface() {
- return eglSurface != EGL10.EGL_NO_SURFACE;
- }
+ public abstract Context getEglBaseContext();
- public int surfaceWidth() {
- final int widthArray[] = new int[1];
- egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
- return widthArray[0];
- }
+ public abstract boolean hasSurface();
- public int surfaceHeight() {
- final int heightArray[] = new int[1];
- egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
- return heightArray[0];
- }
+ public abstract int surfaceWidth();
- public void releaseSurface() {
- if (eglSurface != EGL10.EGL_NO_SURFACE) {
- egl.eglDestroySurface(eglDisplay, eglSurface);
- eglSurface = EGL10.EGL_NO_SURFACE;
- }
- }
+ public abstract int surfaceHeight();
- private void checkIsNotReleased() {
- if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
- || eglConfig == null) {
- throw new RuntimeException("This object has been released");
- }
- }
+ public abstract void releaseSurface();
- public void release() {
- checkIsNotReleased();
- releaseSurface();
- detachCurrent();
- egl.eglDestroyContext(eglDisplay, eglContext);
- egl.eglTerminate(eglDisplay);
- eglContext = EGL10.EGL_NO_CONTEXT;
- eglDisplay = EGL10.EGL_NO_DISPLAY;
- eglConfig = null;
- }
+ public abstract void release();
- public void makeCurrent() {
- checkIsNotReleased();
- if (eglSurface == EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("No EGLSurface - can't make current");
- }
- if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
- throw new RuntimeException("eglMakeCurrent failed");
- }
- }
+ public abstract void makeCurrent();
// Detach the current EGL context, so that it can be made current on another thread.
- public void detachCurrent() {
- if (!egl.eglMakeCurrent(
- eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
- throw new RuntimeException("eglMakeCurrent failed");
- }
- }
+ public abstract void detachCurrent();
- public void swapBuffers() {
- checkIsNotReleased();
- if (eglSurface == EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("No EGLSurface - can't swap buffers");
- }
- egl.eglSwapBuffers(eglDisplay, eglSurface);
- }
-
- // Return an EGLDisplay, or die trying.
- private EGLDisplay getEglDisplay() {
- EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
- if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
- throw new RuntimeException("Unable to get EGL10 display");
- }
- int[] version = new int[2];
- if (!egl.eglInitialize(eglDisplay, version)) {
- throw new RuntimeException("Unable to initialize EGL10");
- }
- return eglDisplay;
- }
-
- // Return an EGLConfig, or die trying.
- private EGLConfig getEglConfig(EGLDisplay eglDisplay, ConfigType configType) {
- // Always RGB888, GLES2.
- int[] configAttributes = {
- EGL10.EGL_RED_SIZE, 8,
- EGL10.EGL_GREEN_SIZE, 8,
- EGL10.EGL_BLUE_SIZE, 8,
- EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
- EGL10.EGL_NONE, 0, // Allocate dummy fields for specific options.
- EGL10.EGL_NONE
- };
-
- // Fill in dummy fields based on configType.
- switch (configType) {
- case PLAIN:
- break;
- case PIXEL_BUFFER:
- configAttributes[configAttributes.length - 3] = EGL10.EGL_SURFACE_TYPE;
- configAttributes[configAttributes.length - 2] = EGL10.EGL_PBUFFER_BIT;
- break;
- case RECORDABLE:
- configAttributes[configAttributes.length - 3] = EGL_RECORDABLE_ANDROID;
- configAttributes[configAttributes.length - 2] = 1;
- break;
- default:
- throw new IllegalArgumentException();
- }
-
- EGLConfig[] configs = new EGLConfig[1];
- int[] numConfigs = new int[1];
- if (!egl.eglChooseConfig(
- eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
- throw new RuntimeException("Unable to find RGB888 " + configType + " EGL config");
- }
- return configs[0];
- }
-
- // Return an EGLConfig, or die trying.
- private EGLContext createEglContext(
- EGLContext sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
- int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
- EGLContext eglContext =
- egl.eglCreateContext(eglDisplay, eglConfig, sharedContext, contextAttributes);
- if (eglContext == EGL10.EGL_NO_CONTEXT) {
- throw new RuntimeException("Failed to create EGL context");
- }
- return eglContext;
- }
+ public abstract void swapBuffers();
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase10.java b/talk/app/webrtc/java/android/org/webrtc/EglBase10.java
new file mode 100644
index 0000000000..f2aa9857fa
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/EglBase10.java
@@ -0,0 +1,299 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.graphics.Canvas;
+import android.graphics.SurfaceTexture;
+import android.graphics.Rect;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+final class EglBase10 extends EglBase {
+ // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ private final EGL10 egl;
+ private EGLContext eglContext;
+ private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+
+ // EGL wrapper for an actual EGLContext.
+ public static class Context extends EglBase.Context {
+ private final EGLContext eglContext;
+
+ public Context(EGLContext eglContext) {
+ this.eglContext = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ EglBase10(Context sharedContext, int[] configAttributes) {
+ this.egl = (EGL10) EGLContext.getEGL();
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+ }
+
+ @Override
+ public void createSurface(Surface surface) {
+ /**
+ * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
+ * couldn't actually take a Surface object until API 17. Older versions fortunately just call
+ * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
+ */
+ class FakeSurfaceHolder implements SurfaceHolder {
+ private final Surface surface;
+
+ FakeSurfaceHolder(Surface surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ public void addCallback(Callback callback) {}
+
+ @Override
+ public void removeCallback(Callback callback) {}
+
+ @Override
+ public boolean isCreating() {
+ return false;
+ }
+
+ @Deprecated
+ @Override
+ public void setType(int i) {}
+
+ @Override
+ public void setFixedSize(int i, int i2) {}
+
+ @Override
+ public void setSizeFromLayout() {}
+
+ @Override
+ public void setFormat(int i) {}
+
+ @Override
+ public void setKeepScreenOn(boolean b) {}
+
+ @Override
+ public Canvas lockCanvas() {
+ return null;
+ }
+
+ @Override
+ public Canvas lockCanvas(Rect rect) {
+ return null;
+ }
+
+ @Override
+ public void unlockCanvasAndPost(Canvas canvas) {}
+
+ @Override
+ public Rect getSurfaceFrame() {
+ return null;
+ }
+
+ @Override
+ public Surface getSurface() {
+ return surface;
+ }
+ }
+
+ createSurfaceInternal(new FakeSurfaceHolder(surface));
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+ private void createSurfaceInternal(Object nativeWindow) {
+ if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_NONE};
+ eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create window surface");
+ }
+ }
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+ eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create pixel buffer surface");
+ }
+ }
+
+ @Override
+ public org.webrtc.EglBase.Context getEglBaseContext() {
+ return new EglBase10.Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL10.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ egl.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ egl.eglDestroyContext(eglDisplay, eglContext);
+ egl.eglTerminate(eglDisplay);
+ eglContext = EGL10.EGL_NO_CONTEXT;
+ eglDisplay = EGL10.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ if (!egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ egl.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL10 display");
+ }
+ int[] version = new int[2];
+ if (!egl.eglInitialize(eglDisplay, version)) {
+ throw new RuntimeException("Unable to initialize EGL10");
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!egl.eglChooseConfig(
+ eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ return configs[0];
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLContext createEglContext(
+ Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+ int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
+ EGLContext rootContext =
+ sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
+ EGLContext eglContext =
+ egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
+ if (eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Failed to create EGL context");
+ }
+ return eglContext;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase14.java b/talk/app/webrtc/java/android/org/webrtc/EglBase14.java
new file mode 100644
index 0000000000..c6f98c3b31
--- /dev/null
+++ b/talk/app/webrtc/java/android/org/webrtc/EglBase14.java
@@ -0,0 +1,254 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products
+ * derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+/**
+ * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+@TargetApi(18)
+final class EglBase14 extends EglBase {
+ private static final String TAG = "EglBase14";
+ private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
+ private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
+ private EGLContext eglContext;
+ private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+
+ // EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
+ // time stamp on a surface is supported from 18 so we require 18.
+ public static boolean isEGL14Supported() {
+ Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
+ + ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
+ return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
+ }
+
+ public static class Context extends EglBase.Context {
+ private final android.opengl.EGLContext egl14Context;
+
+ Context(android.opengl.EGLContext eglContext) {
+ this.egl14Context = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ // |sharedContext| may be null.
+ EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+ }
+
+ // Create EGLSurface from the Android Surface.
+ @Override
+ public void createSurface(Surface surface) {
+ createSurfaceInternal(surface);
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either Surface or SurfaceTexture.
+ private void createSurfaceInternal(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create window surface");
+ }
+ }
+
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create pixel buffer surface");
+ }
+ }
+
+ @Override
+ public Context getEglBaseContext() {
+ return new EglBase14.Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL14.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ if (!EGL14.eglMakeCurrent(
+ eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ public void swapBuffers(long timeStampNs) {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ // See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+ EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private static EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ throw new RuntimeException("Unable to initialize EGL14");
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(
+ eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ return configs[0];
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLContext createEglContext(
+ EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+ int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
+ EGLContext rootContext =
+ sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
+ EGLContext eglContext =
+ EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
+ if (eglContext == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Failed to create EGL context");
+ }
+ return eglContext;
+ }
+}
diff --git a/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
index 2cb8af754d..6d3d5d2563 100644
--- a/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
@@ -40,13 +40,13 @@ import java.util.IdentityHashMap;
import java.util.Map;
/**
- * Helper class to draw a quad that covers the entire viewport. Rotation, mirror, and cropping is
- * specified using a 4x4 texture coordinate transform matrix. The frame input can either be an OES
- * texture or YUV textures in I420 format. The GL state must be preserved between draw calls, this
- * is intentional to maximize performance. The function release() must be called manually to free
- * the resources held by this object.
+ * Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
+ * cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
+ * be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
+ * calls, this is intentional to maximize performance. The function release() must be called
+ * manually to free the resources held by this object.
*/
-public class GlRectDrawer {
+public class GlRectDrawer implements RendererCommon.GlDrawer {
// Simple vertex shader, used for both YUV and OES.
private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n"
@@ -118,67 +118,31 @@ public class GlRectDrawer {
1.0f, 1.0f // Top right.
});
- // The keys are one of the fragments shaders above.
- private final Map<String, GlShader> shaders = new IdentityHashMap<String, GlShader>();
- private GlShader currentShader;
- private float[] currentTexMatrix;
- private int texMatrixLocation;
- // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
- // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader that
- // handles stride and compare performance with intermediate copy.
- private ByteBuffer copyBuffer;
+ private static class Shader {
+ public final GlShader glShader;
+ public final int texMatrixLocation;
- /**
- * Upload |planes| into |outputYuvTextures|, taking stride into consideration. |outputYuvTextures|
- * must have been generated in advance.
- */
- public void uploadYuvData(
- int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
- // Make a first pass to see if we need a temporary copy buffer.
- int copyCapacityNeeded = 0;
- for (int i = 0; i < 3; ++i) {
- final int planeWidth = (i == 0) ? width : width / 2;
- final int planeHeight = (i == 0) ? height : height / 2;
- if (strides[i] > planeWidth) {
- copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidth * planeHeight);
- }
- }
- // Allocate copy buffer if necessary.
- if (copyCapacityNeeded > 0
- && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
- copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
- }
- // Upload each plane.
- for (int i = 0; i < 3; ++i) {
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
- GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
- final int planeWidth = (i == 0) ? width : width / 2;
- final int planeHeight = (i == 0) ? height : height / 2;
- // GLES only accepts packed data, i.e. stride == planeWidth.
- final ByteBuffer packedByteBuffer;
- if (strides[i] == planeWidth) {
- // Input is packed already.
- packedByteBuffer = planes[i];
- } else {
- VideoRenderer.nativeCopyPlane(
- planes[i], planeWidth, planeHeight, strides[i], copyBuffer, planeWidth);
- packedByteBuffer = copyBuffer;
- }
- GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidth, planeHeight, 0,
- GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+ public Shader(String fragmentShader) {
+ this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+ this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
}
}
+ // The keys are one of the fragments shaders above.
+ private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
+
/**
* Draw an OES texture frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
- public void drawOes(int oesTextureId, float[] texMatrix) {
- prepareShader(OES_FRAGMENT_SHADER_STRING);
+ @Override
+ public void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height) {
+ prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
// updateTexImage() may be called from another thread in another EGL context, so we need to
// bind/unbind the texture in each draw call so that GLES understads it's a new texture.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
- drawRectangle(texMatrix);
+ drawRectangle(x, y, width, height);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
@@ -186,10 +150,12 @@ public class GlRectDrawer {
* Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
* are allocated at the first call to this function.
*/
- public void drawRgb(int textureId, float[] texMatrix) {
- prepareShader(RGB_FRAGMENT_SHADER_STRING);
+ @Override
+ public void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height) {
+ prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
- drawRectangle(texMatrix);
+ drawRectangle(x, y, width, height);
// Unbind the texture as a precaution.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
}
@@ -198,14 +164,15 @@ public class GlRectDrawer {
* Draw a YUV frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
- public void drawYuv(int[] yuvTextures, float[] texMatrix) {
- prepareShader(YUV_FRAGMENT_SHADER_STRING);
+ @Override
+ public void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height) {
+ prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
// Bind the textures.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
}
- drawRectangle(texMatrix);
+ drawRectangle(x, y, width, height);
// Unbind the textures as a precaution..
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
@@ -213,60 +180,51 @@ public class GlRectDrawer {
}
}
- private void drawRectangle(float[] texMatrix) {
- // Try avoid uploading the texture if possible.
- if (!Arrays.equals(currentTexMatrix, texMatrix)) {
- currentTexMatrix = texMatrix.clone();
- // Copy the texture transformation matrix over.
- GLES20.glUniformMatrix4fv(texMatrixLocation, 1, false, texMatrix, 0);
- }
+ private void drawRectangle(int x, int y, int width, int height) {
// Draw quad.
+ GLES20.glViewport(x, y, width, height);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
- private void prepareShader(String fragmentShader) {
- // Lazy allocation.
- if (!shaders.containsKey(fragmentShader)) {
- final GlShader shader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+ private void prepareShader(String fragmentShader, float[] texMatrix) {
+ final Shader shader;
+ if (shaders.containsKey(fragmentShader)) {
+ shader = shaders.get(fragmentShader);
+ } else {
+ // Lazy allocation.
+ shader = new Shader(fragmentShader);
shaders.put(fragmentShader, shader);
- shader.useProgram();
+ shader.glShader.useProgram();
// Initialize fragment shader uniform values.
if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
- GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
- GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
- GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
} else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
- GLES20.glUniform1i(shader.getUniformLocation("rgb_tex"), 0);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
} else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
- GLES20.glUniform1i(shader.getUniformLocation("oes_tex"), 0);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
} else {
throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
}
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes.
- shader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
- shader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
- }
-
- // Update GLES state if shader is not already current.
- final GlShader shader = shaders.get(fragmentShader);
- if (currentShader != shader) {
- currentShader = shader;
- shader.useProgram();
- GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
- currentTexMatrix = null;
- texMatrixLocation = shader.getUniformLocation("texMatrix");
+ shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
+ shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
}
+ shader.glShader.useProgram();
+ // Copy the texture transformation matrix over.
+ GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
}
/**
* Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
*/
+ @Override
public void release() {
- for (GlShader shader : shaders.values()) {
- shader.release();
+ for (Shader shader : shaders.values()) {
+ shader.glShader.release();
}
shaders.clear();
- copyBuffer = null;
}
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java b/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java
index e3a7850db4..950dcdfa44 100644
--- a/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java
+++ b/talk/app/webrtc/java/android/org/webrtc/NetworkMonitorAutoDetect.java
@@ -55,7 +55,7 @@ import android.util.Log;
* ACCESS_NETWORK_STATE permission.
*/
public class NetworkMonitorAutoDetect extends BroadcastReceiver {
- static enum ConnectionType {
+ public static enum ConnectionType {
CONNECTION_UNKNOWN,
CONNECTION_ETHERNET,
CONNECTION_WIFI,
@@ -96,6 +96,10 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
/** Queries the ConnectivityManager for information about the current connection. */
static class ConnectivityManagerDelegate {
+ /**
+ * Note: In some rare Android systems connectivityManager is null. We handle that
+ * gracefully below.
+ */
private final ConnectivityManager connectivityManager;
ConnectivityManagerDelegate(Context context) {
@@ -114,6 +118,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
* default network.
*/
NetworkState getNetworkState() {
+ if (connectivityManager == null) {
+ return new NetworkState(false, -1, -1);
+ }
return getNetworkState(connectivityManager.getActiveNetworkInfo());
}
@@ -123,6 +130,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@SuppressLint("NewApi")
NetworkState getNetworkState(Network network) {
+ if (connectivityManager == null) {
+ return new NetworkState(false, -1, -1);
+ }
return getNetworkState(connectivityManager.getNetworkInfo(network));
}
@@ -142,6 +152,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@SuppressLint("NewApi")
Network[] getAllNetworks() {
+ if (connectivityManager == null) {
+ return new Network[0];
+ }
return connectivityManager.getAllNetworks();
}
@@ -152,6 +165,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@SuppressLint("NewApi")
int getDefaultNetId() {
+ if (connectivityManager == null) {
+ return INVALID_NET_ID;
+ }
// Android Lollipop had no API to get the default network; only an
// API to return the NetworkInfo for the default network. To
// determine the default network one can find the network with
@@ -188,6 +204,9 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
*/
@SuppressLint("NewApi")
boolean hasInternetCapability(Network network) {
+ if (connectivityManager == null) {
+ return false;
+ }
final NetworkCapabilities capabilities =
connectivityManager.getNetworkCapabilities(network);
return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
@@ -240,7 +259,6 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
static final int INVALID_NET_ID = -1;
private static final String TAG = "NetworkMonitorAutoDetect";
- private static final int UNKNOWN_LINK_SPEED = -1;
private final IntentFilter intentFilter;
// Observer for the connection type change.
diff --git a/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
index 94d180da5a..5ada4cc416 100644
--- a/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
+++ b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
@@ -28,8 +28,11 @@
package org.webrtc;
import android.graphics.Point;
+import android.opengl.GLES20;
import android.opengl.Matrix;
+import java.nio.ByteBuffer;
+
/**
* Static helper functions for renderer implementations.
*/
@@ -47,6 +50,73 @@ public class RendererCommon {
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
}
+ /** Interface for rendering frames on an EGLSurface. */
+ public static interface GlDrawer {
+ /**
+ * Functions for drawing frames with different sources. The rendering surface target is
+ * implied by the current EGL context of the calling thread and requires no explicit argument.
+ * The coordinates specify the viewport location on the surface target.
+ */
+ void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height);
+ void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height);
+ void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height);
+
+ /**
+ * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+ */
+ void release();
+ }
+
+ /**
+ * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
+ * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
+ */
+ public static class YuvUploader {
+ // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
+ // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
+ // that handles stride and compare performance with intermediate copy.
+ private ByteBuffer copyBuffer;
+
+ /**
+ * Upload |planes| into |outputYuvTextures|, taking stride into consideration.
+ * |outputYuvTextures| must have been generated in advance.
+ */
+ public void uploadYuvData(
+ int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
+ final int[] planeWidths = new int[] {width, width / 2, width / 2};
+ final int[] planeHeights = new int[] {height, height / 2, height / 2};
+ // Make a first pass to see if we need a temporary copy buffer.
+ int copyCapacityNeeded = 0;
+ for (int i = 0; i < 3; ++i) {
+ if (strides[i] > planeWidths[i]) {
+ copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
+ }
+ }
+ // Allocate copy buffer if necessary.
+ if (copyCapacityNeeded > 0
+ && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
+ copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
+ }
+ // Upload each plane.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
+ // GLES only accepts packed data, i.e. stride == planeWidth.
+ final ByteBuffer packedByteBuffer;
+ if (strides[i] == planeWidths[i]) {
+ // Input is packed already.
+ packedByteBuffer = planes[i];
+ } else {
+ VideoRenderer.nativeCopyPlane(
+ planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
+ packedByteBuffer = copyBuffer;
+ }
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
+ planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+ }
+ }
+ }
+
// Types of video scaling:
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
// maintaining the aspect ratio (black borders may be displayed).
@@ -182,9 +252,9 @@ public class RendererCommon {
}
// Each dimension is constrained on max display size and how much we are allowed to crop.
final int width = Math.min(maxDisplayWidth,
- (int) (maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+ Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(maxDisplayHeight,
- (int) (maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+ Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
return new Point(width, height);
}
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
index b9c158f848..b001d2a101 100644
--- a/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceTextureHelper.java
@@ -35,12 +35,12 @@ import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import javax.microedition.khronos.egl.EGLContext;
-
/**
* Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
* of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
@@ -51,7 +51,7 @@ import javax.microedition.khronos.egl.EGLContext;
* wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
* when the webrtc::VideoFrame is no longer used.
*/
-final class SurfaceTextureHelper {
+class SurfaceTextureHelper {
private static final String TAG = "SurfaceTextureHelper";
/**
* Callback interface for being notified that a new texture frame is available. The calls will be
@@ -65,7 +65,7 @@ final class SurfaceTextureHelper {
int oesTextureId, float[] transformMatrix, long timestampNs);
}
- public static SurfaceTextureHelper create(EGLContext sharedContext) {
+ public static SurfaceTextureHelper create(EglBase.Context sharedContext) {
return create(sharedContext, null);
}
@@ -74,7 +74,8 @@ final class SurfaceTextureHelper {
* |handler| is non-null, the callback will be executed on that handler's thread. If |handler| is
* null, a dedicated private thread is created for the callbacks.
*/
- public static SurfaceTextureHelper create(final EGLContext sharedContext, final Handler handler) {
+ public static SurfaceTextureHelper create(final EglBase.Context sharedContext,
+ final Handler handler) {
final Handler finalHandler;
if (handler != null) {
finalHandler = handler;
@@ -94,25 +95,240 @@ final class SurfaceTextureHelper {
});
}
+ // State for YUV conversion, instantiated on demand.
+ static private class YuvConverter {
+ private final EglBase eglBase;
+ private final GlShader shader;
+ private boolean released = false;
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e.
+ // (-1, -1) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer DEVICE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer TEXTURE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f // Top right.
+ });
+
+ private static final String VERTEX_SHADER =
+ "varying vec2 interp_tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "\n"
+ + "uniform mat4 texMatrix;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc = (texMatrix * in_tc).xy;\n"
+ + "}\n";
+
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform samplerExternalOES oesTex;\n"
+ // Difference in texture coordinate corresponding to one
+ // sub-pixel in the x direction.
+ + "uniform vec2 xUnit;\n"
+ // Color conversion coefficients, including constant term
+ + "uniform vec4 coeffs;\n"
+ + "\n"
+ + "void main() {\n"
+ // Since the alpha read from the texture is always 1, this could
+ // be written as a mat4 x vec4 multiply. However, that seems to
+ // give a worse framerate, possibly because the additional
+ // multiplies by 1.0 consume resources. TODO(nisse): Could also
+ // try to do it as a vec3 x mat3x4, followed by an add in of a
+ // constant vector.
+ + " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ + " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ + "}\n";
+
+ private int texMatrixLoc;
+ private int xUnitLoc;
+ private int coeffsLoc;;
+
+ YuvConverter (EglBase.Context sharedContext) {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
+ shader.useProgram();
+ texMatrixLoc = shader.getUniformLocation("texMatrix");
+ xUnitLoc = shader.getUniformLocation("xUnit");
+ coeffsLoc = shader.getUniformLocation("coeffs");
+ GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
+ GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+ // Initialize vertex shader attributes.
+ shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
+ // If the width is not a multiple of 4 pixels, the texture
+ // will be scaled up slightly and clipped at the right border.
+ shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void convert(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (released) {
+ throw new IllegalStateException(
+ "YuvConverter.convert called on released object");
+ }
+
+ // We draw into a buffer laid out like
+ //
+ // +---------+
+ // | |
+ // | Y |
+ // | |
+ // | |
+ // +----+----+
+ // | U | V |
+ // | | |
+ // +----+----+
+ //
+ // In memory, we use the same stride for all of Y, U and V. The
+ // U data starts at offset |height| * |stride| from the Y data,
+ // and the V data starts at at offset |stride/2| from the U
+ // data, with rows of U and V data alternating.
+ //
+ // Now, it would have made sense to allocate a pixel buffer with
+ // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
+ // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
+ // unsupported by devices. So do the following hack: Allocate an
+ // RGBA buffer, of width |stride|/4. To render each of these
+ // large pixels, sample the texture at 4 different x coordinates
+ // and store the results in the four components.
+ //
+ // Since the V data needs to start on a boundary of such a
+ // larger pixel, it is not sufficient that |stride| is even, it
+ // has to be a multiple of 8 pixels.
+
+ if (stride % 8 != 0) {
+ throw new IllegalArgumentException(
+ "Invalid stride, must be a multiple of 8");
+ }
+ if (stride < width){
+ throw new IllegalArgumentException(
+ "Invalid stride, must >= width");
+ }
+
+ int y_width = (width+3) / 4;
+ int uv_width = (width+7) / 8;
+ int uv_height = (height+1)/2;
+ int total_height = height + uv_height;
+ int size = stride * total_height;
+
+ if (buf.capacity() < size) {
+ throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
+ }
+ // Produce a frame buffer starting at top-left corner, not
+ // bottom-left.
+ transformMatrix =
+ RendererCommon.multiplyMatrices(transformMatrix,
+ RendererCommon.verticalFlipMatrix());
+
+ // Create new pBuffferSurface with the correct size if needed.
+ if (eglBase.hasSurface()) {
+ if (eglBase.surfaceWidth() != stride/4 ||
+ eglBase.surfaceHeight() != total_height){
+ eglBase.releaseSurface();
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+ } else {
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+
+ eglBase.makeCurrent();
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
+ GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
+
+ // Draw Y
+ GLES20.glViewport(0, 0, y_width, height);
+ // Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ transformMatrix[0] / width,
+ transformMatrix[1] / width);
+ // Y'UV444 to RGB888, see
+ // https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
+ // We use the ITU-R coefficients for U and V */
+ GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw U
+ GLES20.glViewport(0, height, uv_width, uv_height);
+ // Matrix * (1;0;0;0) / (2*width). Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ transformMatrix[0] / (2.0f*width),
+ transformMatrix[1] / (2.0f*width));
+ GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw V
+ GLES20.glViewport(stride/8, height, uv_width, uv_height);
+ GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
+ GLES20.GL_UNSIGNED_BYTE, buf);
+
+ GlUtil.checkNoGLES2Error("YuvConverter.convert");
+
+ // Unbind texture. Reportedly needed on some devices to get
+ // the texture updated from the camera.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void release() {
+ released = true;
+ eglBase.makeCurrent();
+ shader.release();
+ eglBase.release();
+ }
+ }
+
private final Handler handler;
- private final boolean isOwningThread;
+ private boolean isOwningThread;
private final EglBase eglBase;
private final SurfaceTexture surfaceTexture;
private final int oesTextureId;
+ private YuvConverter yuvConverter;
+
private OnTextureFrameAvailableListener listener;
// The possible states of this class.
private boolean hasPendingTexture = false;
- private boolean isTextureInUse = false;
+ private volatile boolean isTextureInUse = false;
private boolean isQuitting = false;
- private SurfaceTextureHelper(EGLContext sharedContext, Handler handler, boolean isOwningThread) {
+ private SurfaceTextureHelper(EglBase.Context sharedContext,
+ Handler handler, boolean isOwningThread) {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
}
this.handler = handler;
this.isOwningThread = isOwningThread;
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
@@ -120,6 +336,18 @@ final class SurfaceTextureHelper {
surfaceTexture = new SurfaceTexture(oesTextureId);
}
+ private YuvConverter getYuvConverter() {
+ // yuvConverter is assigned once
+ if (yuvConverter != null)
+ return yuvConverter;
+
+ synchronized(this) {
+ if (yuvConverter == null)
+ yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
+ return yuvConverter;
+ }
+ }
+
/**
* Start to stream textures to the given |listener|.
* A Listener can only be set once.
@@ -164,12 +392,19 @@ final class SurfaceTextureHelper {
});
}
+ public boolean isTextureInUse() {
+ return isTextureInUse;
+ }
+
/**
* Call disconnect() to stop receiving frames. Resources are released when the texture frame has
* been returned by a call to returnTextureFrame(). You are guaranteed to not receive any more
* onTextureFrameAvailable() after this function returns.
*/
public void disconnect() {
+ if (!isOwningThread) {
+ throw new IllegalStateException("Must call disconnect(handler).");
+ }
if (handler.getLooper().getThread() == Thread.currentThread()) {
isQuitting = true;
if (!isTextureInUse) {
@@ -190,6 +425,28 @@ final class SurfaceTextureHelper {
ThreadUtils.awaitUninterruptibly(barrier);
}
+ /**
+ * Call disconnect() to stop receiving frames and quit the looper used by |handler|.
+ * Resources are released when the texture frame has been returned by a call to
+ * returnTextureFrame(). You are guaranteed to not receive any more
+ * onTextureFrameAvailable() after this function returns.
+ */
+ public void disconnect(Handler handler) {
+ if (this.handler != handler) {
+ throw new IllegalStateException("Wrong handler.");
+ }
+ isOwningThread = true;
+ disconnect();
+ }
+
+ public void textureToYUV(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (textureId != oesTextureId)
+ throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
+
+ getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix);
+ }
+
private void tryDeliverTextureFrame() {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("Wrong thread.");
@@ -218,12 +475,14 @@ final class SurfaceTextureHelper {
if (isTextureInUse || !isQuitting) {
throw new IllegalStateException("Unexpected release.");
}
+ synchronized (this) {
+ if (yuvConverter != null)
+ yuvConverter.release();
+ }
eglBase.makeCurrent();
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
surfaceTexture.release();
eglBase.release();
- if (isOwningThread) {
- handler.getLooper().quit();
- }
+ handler.getLooper().quit();
}
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
index d7c9e2af0a..fa199b33c8 100644
--- a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
@@ -28,10 +28,9 @@
package org.webrtc;
import android.content.Context;
+import android.content.res.Resources.NotFoundException;
import android.graphics.Point;
-import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
-import android.opengl.Matrix;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
@@ -67,7 +66,8 @@ public class SurfaceViewRenderer extends SurfaceView
// EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
// from the render thread.
private EglBase eglBase;
- private GlRectDrawer drawer;
+ private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+ private RendererCommon.GlDrawer drawer;
// Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
private int[] yuvTextures = null;
@@ -77,23 +77,22 @@ public class SurfaceViewRenderer extends SurfaceView
// These variables are synchronized on |layoutLock|.
private final Object layoutLock = new Object();
- // These three different dimension values are used to keep track of the state in these functions:
- // requestLayout() -> onMeasure() -> onLayout() -> surfaceChanged().
- // requestLayout() is triggered internally by frame size changes, but can also be triggered
- // externally by layout update requests.
- // Most recent measurement specification from onMeasure().
- private int widthSpec;
- private int heightSpec;
- // Current size on screen in pixels. Updated in onLayout(), and should be consistent with
- // |widthSpec|/|heightSpec| after that.
- private int layoutWidth;
- private int layoutHeight;
- // Current surface size of the underlying Surface. Updated in surfaceChanged(), and should be
- // consistent with |layoutWidth|/|layoutHeight| after that.
+ // These dimension values are used to keep track of the state in these functions: onMeasure(),
+ // onLayout(), and surfaceChanged(). A new layout is triggered with requestLayout(). This happens
+ // internally when the incoming frame size changes. requestLayout() can also be triggered
+ // externally. The layout change is a two pass process: first onMeasure() is called in a top-down
+ // traversal of the View tree, followed by an onLayout() pass that is also top-down. During the
+ // onLayout() pass, each parent is responsible for positioning its children using the sizes
+ // computed in the measure pass.
+ // |desiredLayoutsize| is the layout size we have requested in onMeasure() and are waiting for to
+ // take effect.
+ private Point desiredLayoutSize = new Point();
+ // |layoutSize|/|surfaceSize| is the actual current layout/surface size. They are updated in
+ // onLayout() and surfaceChanged() respectively.
+ private final Point layoutSize = new Point();
// TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple
// layout and surface size.
- private int surfaceWidth;
- private int surfaceHeight;
+ private final Point surfaceSize = new Point();
// |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed().
private boolean isSurfaceCreated;
// Last rendered frame dimensions, or 0 if no frame has been rendered yet.
@@ -121,12 +120,18 @@ public class SurfaceViewRenderer extends SurfaceView
// Time in ns spent in renderFrameOnRenderThread() function.
private long renderTimeNs;
- // Runnable for posting frames to render thread..
+ // Runnable for posting frames to render thread.
private final Runnable renderFrameRunnable = new Runnable() {
@Override public void run() {
renderFrameOnRenderThread();
}
};
+ // Runnable for clearing Surface to black.
+ private final Runnable makeBlackRunnable = new Runnable() {
+ @Override public void run() {
+ makeBlack();
+ }
+ };
/**
* Standard View constructor. In order to render something, you must first call init().
@@ -149,17 +154,28 @@ public class SurfaceViewRenderer extends SurfaceView
* reinitialize the renderer after a previous init()/release() cycle.
*/
public void init(
- EGLContext sharedContext, RendererCommon.RendererEvents rendererEvents) {
+ EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+ init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
+ }
+
+ /**
+ * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle.
+ */
+ public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
+ int[] configAttributes, RendererCommon.GlDrawer drawer) {
synchronized (handlerLock) {
if (renderThreadHandler != null) {
- throw new IllegalStateException("Already initialized");
+ throw new IllegalStateException(getResourceName() + "Already initialized");
}
- Logging.d(TAG, "Initializing");
+ Logging.d(TAG, getResourceName() + "Initializing.");
this.rendererEvents = rendererEvents;
+ this.drawer = drawer;
renderThread = new HandlerThread(TAG);
renderThread.start();
- drawer = new GlRectDrawer();
- eglBase = new EglBase(sharedContext, EglBase.ConfigType.PLAIN);
+ eglBase = EglBase.create(sharedContext, configAttributes);
renderThreadHandler = new Handler(renderThread.getLooper());
}
tryCreateEglSurface();
@@ -174,8 +190,8 @@ public class SurfaceViewRenderer extends SurfaceView
runOnRenderThread(new Runnable() {
@Override public void run() {
synchronized (layoutLock) {
- if (isSurfaceCreated) {
- eglBase.createSurface(getHolder());
+ if (isSurfaceCreated && !eglBase.hasSurface()) {
+ eglBase.createSurface(getHolder().getSurface());
eglBase.makeCurrent();
// Necessary for YUV frames with odd width.
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
@@ -195,7 +211,7 @@ public class SurfaceViewRenderer extends SurfaceView
final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
synchronized (handlerLock) {
if (renderThreadHandler == null) {
- Logging.d(TAG, "Already released");
+ Logging.d(TAG, getResourceName() + "Already released");
return;
}
// Release EGL and GL resources on render thread.
@@ -210,11 +226,8 @@ public class SurfaceViewRenderer extends SurfaceView
GLES20.glDeleteTextures(3, yuvTextures, 0);
yuvTextures = null;
}
- if (eglBase.hasSurface()) {
- // Clear last rendered image to black.
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
- eglBase.swapBuffers();
- }
+ // Clear last rendered image to black.
+ makeBlack();
eglBase.release();
eglBase = null;
eglCleanupBarrier.countDown();
@@ -242,6 +255,14 @@ public class SurfaceViewRenderer extends SurfaceView
frameRotation = 0;
rendererEvents = null;
}
+ resetStatistics();
+ }
+
+ /**
+ * Reset statistics. This will reset the logged statistics in logStatistics(), and
+ * RendererEvents.onFirstFrameRendered() will be called for the next frame.
+ */
+ public void resetStatistics() {
synchronized (statisticsLock) {
framesReceived = 0;
framesDropped = 0;
@@ -277,27 +298,28 @@ public class SurfaceViewRenderer extends SurfaceView
}
synchronized (handlerLock) {
if (renderThreadHandler == null) {
- Logging.d(TAG, "Dropping frame - SurfaceViewRenderer not initialized or already released.");
- } else {
- synchronized (frameLock) {
- if (pendingFrame == null) {
- updateFrameDimensionsAndReportEvents(frame);
- pendingFrame = frame;
- renderThreadHandler.post(renderFrameRunnable);
- return;
+ Logging.d(TAG, getResourceName()
+ + "Dropping frame - Not initialized or already released.");
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ // Drop old frame.
+ synchronized (statisticsLock) {
+ ++framesDropped;
}
+ VideoRenderer.renderFrameDone(pendingFrame);
}
+ pendingFrame = frame;
+ updateFrameDimensionsAndReportEvents(frame);
+ renderThreadHandler.post(renderFrameRunnable);
}
}
- // Drop frame.
- synchronized (statisticsLock) {
- ++framesDropped;
- }
- VideoRenderer.renderFrameDone(frame);
}
// Returns desired layout size given current measure specification and video aspect ratio.
- private Point getDesiredLayoutSize() {
+ private Point getDesiredLayoutSize(int widthSpec, int heightSpec) {
synchronized (layoutLock) {
final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
@@ -317,18 +339,30 @@ public class SurfaceViewRenderer extends SurfaceView
@Override
protected void onMeasure(int widthSpec, int heightSpec) {
synchronized (layoutLock) {
- this.widthSpec = widthSpec;
- this.heightSpec = heightSpec;
- final Point size = getDesiredLayoutSize();
- setMeasuredDimension(size.x, size.y);
+ if (frameWidth == 0 || frameHeight == 0) {
+ super.onMeasure(widthSpec, heightSpec);
+ return;
+ }
+ desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
+ if (desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight()) {
+ // Clear the surface asap before the layout change to avoid stretched video and other
+ // render artifacs. Don't wait for it to finish because the IO thread should never be
+ // blocked, so it's a best-effort attempt.
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.postAtFrontOfQueue(makeBlackRunnable);
+ }
+ }
+ }
+ setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
synchronized (layoutLock) {
- layoutWidth = right - left;
- layoutHeight = bottom - top;
+ layoutSize.x = right - left;
+ layoutSize.y = bottom - top;
}
// Might have a pending frame waiting for a layout of correct size.
runOnRenderThread(renderFrameRunnable);
@@ -337,7 +371,7 @@ public class SurfaceViewRenderer extends SurfaceView
// SurfaceHolder.Callback interface.
@Override
public void surfaceCreated(final SurfaceHolder holder) {
- Logging.d(TAG, "Surface created");
+ Logging.d(TAG, getResourceName() + "Surface created.");
synchronized (layoutLock) {
isSurfaceCreated = true;
}
@@ -346,11 +380,11 @@ public class SurfaceViewRenderer extends SurfaceView
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
- Logging.d(TAG, "Surface destroyed");
+ Logging.d(TAG, getResourceName() + "Surface destroyed.");
synchronized (layoutLock) {
isSurfaceCreated = false;
- surfaceWidth = 0;
- surfaceHeight = 0;
+ surfaceSize.x = 0;
+ surfaceSize.y = 0;
}
runOnRenderThread(new Runnable() {
@Override public void run() {
@@ -361,10 +395,10 @@ public class SurfaceViewRenderer extends SurfaceView
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
- Logging.d(TAG, "Surface changed: " + width + "x" + height);
+ Logging.d(TAG, getResourceName() + "Surface changed: " + width + "x" + height);
synchronized (layoutLock) {
- surfaceWidth = width;
- surfaceHeight = height;
+ surfaceSize.x = width;
+ surfaceSize.y = height;
}
// Might have a pending frame waiting for a surface of correct size.
runOnRenderThread(renderFrameRunnable);
@@ -381,26 +415,35 @@ public class SurfaceViewRenderer extends SurfaceView
}
}
+ private String getResourceName() {
+ try {
+ return getResources().getResourceEntryName(getId()) + ": ";
+ } catch (NotFoundException e) {
+ return "";
+ }
+ }
+
+ private void makeBlack() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ if (eglBase != null && eglBase.hasSurface()) {
+ GLES20.glClearColor(0, 0, 0, 0);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ }
+ }
+
/**
* Requests new layout if necessary. Returns true if layout and surface size are consistent.
*/
private boolean checkConsistentLayout() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
synchronized (layoutLock) {
- final Point desiredLayoutSize = getDesiredLayoutSize();
- if (desiredLayoutSize.x != layoutWidth || desiredLayoutSize.y != layoutHeight) {
- Logging.d(TAG, "Requesting new layout with size: "
- + desiredLayoutSize.x + "x" + desiredLayoutSize.y);
- // Request layout update on UI thread.
- post(new Runnable() {
- @Override public void run() {
- requestLayout();
- }
- });
- return false;
- }
- // Wait for requestLayout() to propagate through this sequence before returning true:
- // requestLayout() -> onMeasure() -> onLayout() -> surfaceChanged().
- return surfaceWidth == layoutWidth && surfaceHeight == layoutHeight;
+ // Return false while we are in the middle of a layout change.
+ return layoutSize.equals(desiredLayoutSize) && surfaceSize.equals(layoutSize);
}
}
@@ -408,61 +451,51 @@ public class SurfaceViewRenderer extends SurfaceView
* Renders and releases |pendingFrame|.
*/
private void renderFrameOnRenderThread() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ // Fetch and render |pendingFrame|.
+ final VideoRenderer.I420Frame frame;
+ synchronized (frameLock) {
+ if (pendingFrame == null) {
+ return;
+ }
+ frame = pendingFrame;
+ pendingFrame = null;
+ }
if (eglBase == null || !eglBase.hasSurface()) {
- Logging.d(TAG, "No surface to draw on");
+ Logging.d(TAG, getResourceName() + "No surface to draw on");
+ VideoRenderer.renderFrameDone(frame);
return;
}
if (!checkConsistentLayout()) {
// Output intermediate black frames while the layout is updated.
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
- eglBase.swapBuffers();
+ makeBlack();
+ VideoRenderer.renderFrameDone(frame);
return;
}
// After a surface size change, the EGLSurface might still have a buffer of the old size in the
// pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
// changed. Such a buffer will be rendered incorrectly, so flush it with a black frame.
synchronized (layoutLock) {
- if (eglBase.surfaceWidth() != surfaceWidth || eglBase.surfaceHeight() != surfaceHeight) {
- GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
- eglBase.swapBuffers();
+ if (eglBase.surfaceWidth() != surfaceSize.x || eglBase.surfaceHeight() != surfaceSize.y) {
+ makeBlack();
}
}
- // Fetch and render |pendingFrame|.
- final VideoRenderer.I420Frame frame;
- synchronized (frameLock) {
- if (pendingFrame == null) {
- return;
- }
- frame = pendingFrame;
- pendingFrame = null;
- }
final long startTimeNs = System.nanoTime();
- final float[] samplingMatrix;
- if (frame.yuvFrame) {
- // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
- // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
- // bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling
- // matrix.
- samplingMatrix = RendererCommon.verticalFlipMatrix();
- } else {
- // TODO(magjed): Move updateTexImage() to the video source instead.
- SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
- surfaceTexture.updateTexImage();
- samplingMatrix = new float[16];
- surfaceTexture.getTransformMatrix(samplingMatrix);
- }
-
final float[] texMatrix;
synchronized (layoutLock) {
final float[] rotatedSamplingMatrix =
- RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree);
+ RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
- mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
+ mirror, frameAspectRatio(), (float) layoutSize.x / layoutSize.y);
texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
}
- GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight);
+ // TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's
+ // a workaround for bug 5147. Performance will be slightly worse.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if (frame.yuvFrame) {
// Make sure YUV textures are allocated.
if (yuvTextures == null) {
@@ -471,11 +504,11 @@ public class SurfaceViewRenderer extends SurfaceView
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
}
- drawer.uploadYuvData(
+ yuvUploader.uploadYuvData(
yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
- drawer.drawYuv(yuvTextures, texMatrix);
+ drawer.drawYuv(yuvTextures, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
} else {
- drawer.drawOes(frame.textureId, texMatrix);
+ drawer.drawOes(frame.textureId, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
}
eglBase.swapBuffers();
@@ -483,6 +516,12 @@ public class SurfaceViewRenderer extends SurfaceView
synchronized (statisticsLock) {
if (framesRendered == 0) {
firstFrameTimeNs = startTimeNs;
+ synchronized (layoutLock) {
+ Logging.d(TAG, getResourceName() + "Reporting first rendered frame.");
+ if (rendererEvents != null) {
+ rendererEvents.onFirstFrameRendered();
+ }
+ }
}
++framesRendered;
renderTimeNs += (System.nanoTime() - startTimeNs);
@@ -508,32 +547,32 @@ public class SurfaceViewRenderer extends SurfaceView
synchronized (layoutLock) {
if (frameWidth != frame.width || frameHeight != frame.height
|| frameRotation != frame.rotationDegree) {
+ Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
+ + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
if (rendererEvents != null) {
- final String id = getResources().getResourceEntryName(getId());
- if (frameWidth == 0 || frameHeight == 0) {
- Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
- rendererEvents.onFirstFrameRendered();
- }
- Logging.d(TAG, "ID: " + id + ". Reporting frame resolution changed to "
- + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
}
frameWidth = frame.width;
frameHeight = frame.height;
frameRotation = frame.rotationDegree;
+ post(new Runnable() {
+ @Override public void run() {
+ requestLayout();
+ }
+ });
}
}
}
private void logStatistics() {
synchronized (statisticsLock) {
- Logging.d(TAG, "ID: " + getResources().getResourceEntryName(getId()) + ". Frames received: "
+ Logging.d(TAG, getResourceName() + "Frames received: "
+ framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
- Logging.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
- " ms. FPS: " + (float) framesRendered * 1e9 / timeSinceFirstFrameNs);
- Logging.d(TAG, "Average render time: "
+ Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
+ Logging.d(TAG, getResourceName() + "Average render time: "
+ (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
}
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java b/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java
index 0d8968aba9..e60ead9f00 100644
--- a/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java
+++ b/talk/app/webrtc/java/android/org/webrtc/ThreadUtils.java
@@ -28,11 +28,13 @@
package org.webrtc;
import android.os.Handler;
+import android.os.SystemClock;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
-final class ThreadUtils {
+public class ThreadUtils {
/**
* Utility class to be used for checking that a method is called on the correct thread.
*/
@@ -86,6 +88,29 @@ final class ThreadUtils {
}
}
+ public static boolean joinUninterruptibly(final Thread thread, long timeoutMs) {
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ long timeRemainingMs = timeoutMs;
+ boolean wasInterrupted = false;
+ while (timeRemainingMs > 0) {
+ try {
+ thread.join(timeRemainingMs);
+ break;
+ } catch (InterruptedException e) {
+ // Someone is asking us to return early at our convenience. We can't cancel this operation,
+ // but we should preserve the information and pass it along.
+ wasInterrupted = true;
+ final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+ timeRemainingMs = timeoutMs - elapsedTimeMs;
+ }
+ }
+ // Pass interruption information along.
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ return !thread.isAlive();
+ }
+
public static void joinUninterruptibly(final Thread thread) {
executeUninterruptibly(new BlockingOperation() {
@Override
@@ -104,6 +129,30 @@ final class ThreadUtils {
});
}
+ public static boolean awaitUninterruptibly(CountDownLatch barrier, long timeoutMs) {
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ long timeRemainingMs = timeoutMs;
+ boolean wasInterrupted = false;
+ boolean result = false;
+ do {
+ try {
+ result = barrier.await(timeRemainingMs, TimeUnit.MILLISECONDS);
+ break;
+ } catch (InterruptedException e) {
+ // Someone is asking us to return early at our convenience. We can't cancel this operation,
+ // but we should preserve the information and pass it along.
+ wasInterrupted = true;
+ final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
+ timeRemainingMs = timeoutMs - elapsedTimeMs;
+ }
+ } while (timeRemainingMs > 0);
+ // Pass interruption information along.
+ if (wasInterrupted) {
+ Thread.currentThread().interrupt();
+ }
+ return result;
+ }
+
/**
* Post |callable| to |handler| and wait for the result.
*/
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
index 4caefc513d..36f60edd5c 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -28,9 +28,6 @@
package org.webrtc;
import android.content.Context;
-import android.graphics.SurfaceTexture;
-import android.hardware.Camera;
-import android.hardware.Camera.PreviewCallback;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
@@ -53,9 +50,6 @@ import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
-import javax.microedition.khronos.egl.EGLContext;
-import javax.microedition.khronos.egl.EGL10;
-
// Android specific implementation of VideoCapturer.
// An instance of this class can be created by an application using
// VideoCapturerAndroid.create();
@@ -68,21 +62,22 @@ import javax.microedition.khronos.egl.EGL10;
// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
// the camera has been stopped.
@SuppressWarnings("deprecation")
-public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallback,
+public class VideoCapturerAndroid extends VideoCapturer implements
+ android.hardware.Camera.PreviewCallback,
SurfaceTextureHelper.OnTextureFrameAvailableListener {
private final static String TAG = "VideoCapturerAndroid";
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+ private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 6000;
- private Camera camera; // Only non-null while capturing.
+ private android.hardware.Camera camera; // Only non-null while capturing.
private HandlerThread cameraThread;
private final Handler cameraThreadHandler;
private Context applicationContext;
// Synchronization lock for |id|.
private final Object cameraIdLock = new Object();
private int id;
- private Camera.CameraInfo info;
- private final FramePool videoBuffers;
- private final CameraStatistics cameraStatistics = new CameraStatistics();
+ private android.hardware.Camera.CameraInfo info;
+ private final CameraStatistics cameraStatistics;
// Remember the requested format in case we want to switch cameras.
private int requestedWidth;
private int requestedHeight;
@@ -94,17 +89,28 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private CapturerObserver frameObserver = null;
private final CameraEventsHandler eventsHandler;
private boolean firstFrameReported;
+ // Arbitrary queue depth. Higher number means more memory allocated & held,
+ // lower number means more sensitivity to processing time in the client (and
+ // potentially stalling the capturer if it runs out of buffers to write to).
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
+ private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
private final boolean isCapturingToTexture;
- private final SurfaceTextureHelper surfaceHelper;
+ final SurfaceTextureHelper surfaceHelper; // Package visible for testing purposes.
// The camera API can output one old frame after the camera has been switched or the resolution
// has been changed. This flag is used for dropping the first frame after camera restart.
private boolean dropNextFrame = false;
+ // |openCameraOnCodecThreadRunner| is used for retrying to open the camera if it is in use by
+ // another application when startCaptureOnCameraThread is called.
+ private Runnable openCameraOnCodecThreadRunner;
+ private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+ private final static int OPEN_CAMERA_DELAY_MS = 500;
+ private int openCameraAttempts;
// Camera error callback.
- private final Camera.ErrorCallback cameraErrorCallback =
- new Camera.ErrorCallback() {
+ private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
+ new android.hardware.Camera.ErrorCallback() {
@Override
- public void onError(int error, Camera camera) {
+ public void onError(int error, android.hardware.Camera camera) {
String errorMessage;
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
@@ -120,47 +126,45 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Camera observer - monitors camera framerate. Observer is executed on camera thread.
private final Runnable cameraObserver = new Runnable() {
+ private int freezePeriodCount;
@Override
public void run() {
int cameraFramesCount = cameraStatistics.getAndResetFrameCount();
int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
/ CAMERA_OBSERVER_PERIOD_MS;
- Logging.d(TAG, "Camera fps: " + cameraFps +
- ". Pending buffers: " + cameraStatistics.pendingFramesTimeStamps());
+ Logging.d(TAG, "Camera fps: " + cameraFps +".");
if (cameraFramesCount == 0) {
- Logging.e(TAG, "Camera freezed.");
- if (eventsHandler != null) {
- eventsHandler.onCameraError("Camera failure.");
+ ++freezePeriodCount;
+ if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS
+ && eventsHandler != null) {
+ Logging.e(TAG, "Camera freezed.");
+ if (surfaceHelper.isTextureInUse()) {
+ // This can only happen if we are capturing to textures.
+ eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+ } else {
+ eventsHandler.onCameraFreezed("Camera failure.");
+ }
+ return;
}
} else {
- cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+ freezePeriodCount = 0;
}
+ cameraThreadHandler.postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
}
};
private static class CameraStatistics {
private int frameCount = 0;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
- private final Set<Long> timeStampsNs = new HashSet<Long>();
CameraStatistics() {
threadChecker.detachThread();
}
- public void addPendingFrame(long timestamp) {
+ public void addFrame() {
threadChecker.checkIsOnValidThread();
++frameCount;
- timeStampsNs.add(timestamp);
- }
-
- public void frameReturned(long timestamp) {
- threadChecker.checkIsOnValidThread();
- if (!timeStampsNs.contains(timestamp)) {
- throw new IllegalStateException(
- "CameraStatistics.frameReturned called with unknown timestamp " + timestamp);
- }
- timeStampsNs.remove(timestamp);
}
public int getAndResetFrameCount() {
@@ -169,28 +173,16 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
frameCount = 0;
return count;
}
-
- // Return number of pending frames that have not been returned.
- public int pendingFramesCount() {
- threadChecker.checkIsOnValidThread();
- return timeStampsNs.size();
- }
-
- public String pendingFramesTimeStamps() {
- threadChecker.checkIsOnValidThread();
- List<Long> timeStampsMs = new ArrayList<Long>();
- for (long ts : timeStampsNs) {
- timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(ts));
- }
- return timeStampsMs.toString();
- }
}
public static interface CameraEventsHandler {
- // Camera error handler - invoked when camera stops receiving frames
+ // Camera error handler - invoked when camera can not be opened
// or any camera exception happens on camera thread.
void onCameraError(String errorDescription);
+ // Invoked when camera stops receiving frames
+ void onCameraFreezed(String errorDescription);
+
// Callback invoked when camera is opening.
void onCameraOpening(int cameraId);
@@ -216,7 +208,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
public static VideoCapturerAndroid create(String name,
- CameraEventsHandler eventsHandler, EGLContext sharedEglContext) {
+ CameraEventsHandler eventsHandler, EglBase.Context sharedEglContext) {
final int cameraId = lookupDeviceName(name);
if (cameraId == -1) {
return null;
@@ -224,7 +216,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
final VideoCapturerAndroid capturer = new VideoCapturerAndroid(cameraId, eventsHandler,
sharedEglContext);
- capturer.setNativeCapturer(nativeCreateVideoCapturer(capturer));
+ capturer.setNativeCapturer(
+ nativeCreateVideoCapturer(capturer, capturer.surfaceHelper));
return capturer;
}
@@ -243,7 +236,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Switch camera to the next valid camera id. This can only be called while
// the camera is running.
public void switchCamera(final CameraSwitchHandler handler) {
- if (Camera.getNumberOfCameras() < 2) {
+ if (android.hardware.Camera.getNumberOfCameras() < 2) {
if (handler != null) {
handler.onCameraSwitchError("No camera to switch to.");
}
@@ -274,7 +267,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
pendingCameraSwitch = false;
}
if (handler != null) {
- handler.onCameraSwitchDone(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
+ handler.onCameraSwitchDone(
+ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}
}
});
@@ -282,6 +276,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Requests a new output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
+ // It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
+ // the same result as |width| = 480, |height| = 640.
// TODO(magjed/perkj): Document what this function does. Change name?
public void onOutputFormatRequest(final int width, final int height, final int framerate) {
cameraThreadHandler.post(new Runnable() {
@@ -303,7 +299,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Helper function to retrieve the current camera id synchronously. Note that the camera id might
// change at any point by switchCamera() calls.
- private int getCurrentCameraId() {
+ int getCurrentCameraId() {
synchronized (cameraIdLock) {
return id;
}
@@ -329,20 +325,19 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
- EGLContext sharedContext) {
- Logging.d(TAG, "VideoCapturerAndroid");
+ EglBase.Context sharedContext) {
this.id = cameraId;
this.eventsHandler = eventsHandler;
cameraThread = new HandlerThread(TAG);
cameraThread.start();
cameraThreadHandler = new Handler(cameraThread.getLooper());
- videoBuffers = new FramePool(cameraThread);
isCapturingToTexture = (sharedContext != null);
- surfaceHelper = SurfaceTextureHelper.create(
- isCapturingToTexture ? sharedContext : EGL10.EGL_NO_CONTEXT, cameraThreadHandler);
+ cameraStatistics = new CameraStatistics();
+ surfaceHelper = SurfaceTextureHelper.create(sharedContext, cameraThreadHandler);
if (isCapturingToTexture) {
surfaceHelper.setListener(this);
}
+ Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
}
private void checkIsOnCameraThread() {
@@ -355,13 +350,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// found. If |deviceName| is empty, the first available device is used.
private static int lookupDeviceName(String deviceName) {
Logging.d(TAG, "lookupDeviceName: " + deviceName);
- if (deviceName == null || Camera.getNumberOfCameras() == 0) {
+ if (deviceName == null || android.hardware.Camera.getNumberOfCameras() == 0) {
return -1;
}
if (deviceName.isEmpty()) {
return 0;
}
- for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
return i;
}
@@ -382,14 +377,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
if (camera != null) {
throw new IllegalStateException("Release called while camera is running");
}
- if (cameraStatistics.pendingFramesCount() != 0) {
- throw new IllegalStateException("Release called with pending frames left");
- }
}
});
- surfaceHelper.disconnect();
- cameraThread.quit();
- ThreadUtils.joinUninterruptibly(cameraThread);
+ surfaceHelper.disconnect(cameraThreadHandler);
cameraThread = null;
}
@@ -413,6 +403,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
if (frameObserver == null) {
throw new RuntimeException("frameObserver not set.");
}
+
cameraThreadHandler.post(new Runnable() {
@Override public void run() {
startCaptureOnCameraThread(width, height, framerate, frameObserver,
@@ -422,8 +413,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
private void startCaptureOnCameraThread(
- int width, int height, int framerate, CapturerObserver frameObserver,
- Context applicationContext) {
+ final int width, final int height, final int framerate, final CapturerObserver frameObserver,
+ final Context applicationContext) {
Throwable error = null;
checkIsOnCameraThread();
if (camera != null) {
@@ -431,17 +422,36 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
this.applicationContext = applicationContext;
this.frameObserver = frameObserver;
+ this.firstFrameReported = false;
+
try {
- synchronized (cameraIdLock) {
- Logging.d(TAG, "Opening camera " + id);
- firstFrameReported = false;
- if (eventsHandler != null) {
- eventsHandler.onCameraOpening(id);
+ try {
+ synchronized (cameraIdLock) {
+ Logging.d(TAG, "Opening camera " + id);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraOpening(id);
+ }
+ camera = android.hardware.Camera.open(id);
+ info = new android.hardware.Camera.CameraInfo();
+ android.hardware.Camera.getCameraInfo(id, info);
+ }
+ } catch (RuntimeException e) {
+ openCameraAttempts++;
+ if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
+ Logging.e(TAG, "Camera.open failed, retrying", e);
+ openCameraOnCodecThreadRunner = new Runnable() {
+ @Override public void run() {
+ startCaptureOnCameraThread(width, height, framerate, frameObserver,
+ applicationContext);
+ }
+ };
+ cameraThreadHandler.postDelayed(openCameraOnCodecThreadRunner, OPEN_CAMERA_DELAY_MS);
+ return;
}
- camera = Camera.open(id);
- info = new Camera.CameraInfo();
- Camera.getCameraInfo(id, info);
+ openCameraAttempts = 0;
+ throw e;
}
+
try {
camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
} catch (IOException e) {
@@ -485,17 +495,18 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
requestedFramerate = framerate;
// Find closest supported format for |width| x |height| @ |framerate|.
- final Camera.Parameters parameters = camera.getParameters();
+ final android.hardware.Camera.Parameters parameters = camera.getParameters();
final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
- final Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
- parameters.getSupportedPreviewSizes(), width, height);
+ final android.hardware.Camera.Size previewSize =
+ CameraEnumerationAndroid.getClosestSupportedSize(
+ parameters.getSupportedPreviewSizes(), width, height);
final CaptureFormat captureFormat = new CaptureFormat(
previewSize.width, previewSize.height,
- range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
- range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
// Check if we are already using this capture format, then we don't need to do anything.
- if (captureFormat.equals(this.captureFormat)) {
+ if (captureFormat.isSameFormat(this.captureFormat)) {
return;
}
@@ -511,11 +522,15 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate);
}
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
- parameters.setPreviewFormat(captureFormat.imageFormat);
+
+ if (!isCapturingToTexture) {
+ parameters.setPreviewFormat(captureFormat.imageFormat);
+ }
// Picture size is for taking pictures and not for preview/video, but we need to set it anyway
// as a workaround for an aspect ratio problem on Nexus 7.
- final Camera.Size pictureSize = CameraEnumerationAndroid.getClosestSupportedSize(
- parameters.getSupportedPictureSizes(), width, height);
+ final android.hardware.Camera.Size pictureSize =
+ CameraEnumerationAndroid.getClosestSupportedSize(
+ parameters.getSupportedPictureSizes(), width, height);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
// Temporarily stop preview if it's already running.
@@ -532,13 +547,19 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
this.captureFormat = captureFormat;
List<String> focusModes = parameters.getSupportedFocusModes();
- if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
- parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
camera.setParameters(parameters);
if (!isCapturingToTexture) {
- videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera);
+ queuedBuffers.clear();
+ final int frameSize = captureFormat.frameSize();
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+ queuedBuffers.add(buffer.array());
+ camera.addCallbackBuffer(buffer.array());
+ }
camera.setPreviewCallbackWithBuffer(this);
}
camera.startPreview();
@@ -561,6 +582,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private void stopCaptureOnCameraThread() {
checkIsOnCameraThread();
Logging.d(TAG, "stopCaptureOnCameraThread");
+ if (openCameraOnCodecThreadRunner != null) {
+ cameraThreadHandler.removeCallbacks(openCameraOnCodecThreadRunner);
+ }
+ openCameraAttempts = 0;
if (camera == null) {
Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
return;
@@ -571,13 +596,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
Logging.d(TAG, "Stop preview.");
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
- if (!isCapturingToTexture()) {
- videoBuffers.stopReturnBuffersToCamera();
- Logging.d(TAG, "stopReturnBuffersToCamera called."
- + (cameraStatistics.pendingFramesCount() == 0?
- " All buffers have been returned."
- : " Pending buffers: " + cameraStatistics.pendingFramesTimeStamps() + "."));
- }
+ queuedBuffers.clear();
captureFormat = null;
Logging.d(TAG, "Release camera.");
@@ -593,7 +612,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
Logging.d(TAG, "switchCameraOnCameraThread");
stopCaptureOnCameraThread();
synchronized (cameraIdLock) {
- id = (id + 1) % Camera.getNumberOfCameras();
+ id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
}
dropNextFrame = true;
startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
@@ -612,17 +631,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
frameObserver.onOutputFormatRequest(width, height, framerate);
}
- public void returnBuffer(final long timeStamp) {
- cameraThreadHandler.post(new Runnable() {
- @Override public void run() {
- cameraStatistics.frameReturned(timeStamp);
- if (isCapturingToTexture) {
- surfaceHelper.returnTextureFrame();
- } else {
- videoBuffers.returnBuffer(timeStamp);
- }
- }
- });
+ // Exposed for testing purposes only.
+ Handler getCameraThreadHandler() {
+ return cameraThreadHandler;
}
private int getDeviceOrientation() {
@@ -650,7 +661,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private int getFrameOrientation() {
int rotation = getDeviceOrientation();
- if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation;
}
return (info.orientation + rotation) % 360;
@@ -658,9 +669,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Called on cameraThread so must not "synchronized".
@Override
- public void onPreviewFrame(byte[] data, Camera callbackCamera) {
+ public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
checkIsOnCameraThread();
- if (camera == null) {
+ if (camera == null || !queuedBuffers.contains(data)) {
+ // The camera has been stopped or |data| is an old invalid buffer.
return;
}
if (camera != callbackCamera) {
@@ -675,16 +687,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
firstFrameReported = true;
}
- // Mark the frame owning |data| as used.
- // Note that since data is directBuffer,
- // data.length >= videoBuffers.frameSize.
- if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) {
- cameraStatistics.addPendingFrame(captureTimeNs);
- frameObserver.onByteBufferFrameCaptured(data, videoBuffers.frameSize, captureFormat.width,
- captureFormat.height, getFrameOrientation(), captureTimeNs);
- } else {
- Logging.w(TAG, "reserveByteBuffer failed - dropping frame.");
- }
+ cameraStatistics.addFrame();
+ frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
+ getFrameOrientation(), captureTimeNs);
+ camera.addCallbackBuffer(data);
}
@Override
@@ -696,135 +702,22 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
surfaceHelper.returnTextureFrame();
return;
}
- if (!dropNextFrame) {
+ if (dropNextFrame) {
surfaceHelper.returnTextureFrame();
- dropNextFrame = true;
+ dropNextFrame = false;
return;
}
int rotation = getFrameOrientation();
- if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
}
- transformMatrix = RendererCommon.rotateTextureMatrix(transformMatrix, rotation);
-
- final int rotatedWidth = (rotation % 180 == 0) ? captureFormat.width : captureFormat.height;
- final int rotatedHeight = (rotation % 180 == 0) ? captureFormat.height : captureFormat.width;
- cameraStatistics.addPendingFrame(timestampNs);
- frameObserver.onTextureFrameCaptured(rotatedWidth, rotatedHeight, oesTextureId,
- transformMatrix, timestampNs);
- }
-
- // Class used for allocating and bookkeeping video frames. All buffers are
- // direct allocated so that they can be directly used from native code. This class is
- // not thread-safe, and enforces single thread use.
- private static class FramePool {
- // Thread that all calls should be made on.
- private final Thread thread;
- // Arbitrary queue depth. Higher number means more memory allocated & held,
- // lower number means more sensitivity to processing time in the client (and
- // potentially stalling the capturer if it runs out of buffers to write to).
- private static final int numCaptureBuffers = 3;
- // This container tracks the buffers added as camera callback buffers. It is needed for finding
- // the corresponding ByteBuffer given a byte[].
- private final Map<byte[], ByteBuffer> queuedBuffers = new IdentityHashMap<byte[], ByteBuffer>();
- // This container tracks the frames that have been sent but not returned. It is needed for
- // keeping the buffers alive and for finding the corresponding ByteBuffer given a timestamp.
- private final Map<Long, ByteBuffer> pendingBuffers = new HashMap<Long, ByteBuffer>();
- private int frameSize = 0;
- private Camera camera;
-
- public FramePool(Thread thread) {
- this.thread = thread;
- }
-
- private void checkIsOnValidThread() {
- if (Thread.currentThread() != thread) {
- throw new IllegalStateException("Wrong thread");
- }
- }
-
- // Discards previous queued buffers and adds new callback buffers to camera.
- public void queueCameraBuffers(int frameSize, Camera camera) {
- checkIsOnValidThread();
- this.camera = camera;
- this.frameSize = frameSize;
-
- queuedBuffers.clear();
- for (int i = 0; i < numCaptureBuffers; ++i) {
- final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
- camera.addCallbackBuffer(buffer.array());
- queuedBuffers.put(buffer.array(), buffer);
- }
- Logging.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers
- + " buffers of size " + frameSize + ".");
- }
-
- public void stopReturnBuffersToCamera() {
- checkIsOnValidThread();
- this.camera = null;
- queuedBuffers.clear();
- // Frames in |pendingBuffers| need to be kept alive until they are returned.
- }
-
- public boolean reserveByteBuffer(byte[] data, long timeStamp) {
- checkIsOnValidThread();
- final ByteBuffer buffer = queuedBuffers.remove(data);
- if (buffer == null) {
- // Frames might be posted to |onPreviewFrame| with the previous format while changing
- // capture format in |startPreviewOnCameraThread|. Drop these old frames.
- Logging.w(TAG, "Received callback buffer from previous configuration with length: "
- + (data == null ? "null" : data.length));
- return false;
- }
- if (buffer.capacity() != frameSize) {
- throw new IllegalStateException("Callback buffer has unexpected frame size");
- }
- if (pendingBuffers.containsKey(timeStamp)) {
- Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique");
- return false;
- }
- pendingBuffers.put(timeStamp, buffer);
- if (queuedBuffers.isEmpty()) {
- Logging.d(TAG, "Camera is running out of capture buffers.");
- }
- return true;
- }
-
- public void returnBuffer(long timeStamp) {
- checkIsOnValidThread();
- final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
- if (returnedFrame == null) {
- throw new RuntimeException("unknown data buffer with time stamp "
- + timeStamp + "returned?!?");
- }
-
- if (camera != null && returnedFrame.capacity() == frameSize) {
- camera.addCallbackBuffer(returnedFrame.array());
- if (queuedBuffers.isEmpty()) {
- Logging.d(TAG, "Frame returned when camera is running out of capture"
- + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp));
- }
- queuedBuffers.put(returnedFrame.array(), returnedFrame);
- return;
- }
-
- if (returnedFrame.capacity() != frameSize) {
- Logging.d(TAG, "returnBuffer with time stamp "
- + TimeUnit.NANOSECONDS.toMillis(timeStamp)
- + " called with old frame size, " + returnedFrame.capacity() + ".");
- // Since this frame has the wrong size, don't requeue it. Frames with the correct size are
- // created in queueCameraBuffers so this must be an old buffer.
- return;
- }
-
- Logging.d(TAG, "returnBuffer with time stamp "
- + TimeUnit.NANOSECONDS.toMillis(timeStamp)
- + " called after camera has been stopped.");
- }
+ cameraStatistics.addFrame();
+ frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
+ transformMatrix, rotation, timestampNs);
}
// Interface used for providing callbacks to an observer.
@@ -835,13 +728,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
// Delivers a captured frame. Called on a Java thread owned by
// VideoCapturerAndroid.
- abstract void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
- int rotation, long timeStamp);
+ abstract void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
+ long timeStamp);
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
// owned by VideoCapturerAndroid.
abstract void onTextureFrameCaptured(
- int width, int height, int oesTextureId, float[] transformMatrix, long timestamp);
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timestamp);
// Requests an output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
@@ -864,17 +758,18 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
}
@Override
- public void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
+ public void onByteBufferFrameCaptured(byte[] data, int width, int height,
int rotation, long timeStamp) {
- nativeOnByteBufferFrameCaptured(nativeCapturer, data, length, width, height, rotation,
+ nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
timeStamp);
}
@Override
public void onTextureFrameCaptured(
- int width, int height, int oesTextureId, float[] transformMatrix, long timestamp) {
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timestamp) {
nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
- timestamp);
+ rotation, timestamp);
}
@Override
@@ -887,10 +782,12 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
byte[] data, int length, int width, int height, int rotation, long timeStamp);
private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
- int oesTextureId, float[] transformMatrix, long timestamp);
+ int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
private native void nativeOnOutputFormatRequest(long nativeCapturer,
int width, int height, int framerate);
}
- private static native long nativeCreateVideoCapturer(VideoCapturerAndroid videoCapturer);
+ private static native long nativeCreateVideoCapturer(
+ VideoCapturerAndroid videoCapturer,
+ SurfaceTextureHelper surfaceHelper);
}
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
index bacd0cf11f..bb6f01cea2 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -38,7 +38,7 @@ import javax.microedition.khronos.opengles.GL10;
import android.annotation.SuppressLint;
import android.graphics.Point;
import android.graphics.Rect;
-import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
@@ -59,7 +59,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private static Runnable eglContextReady = null;
private static final String TAG = "VideoRendererGui";
private GLSurfaceView surface;
- private static EGLContext eglContext = null;
+ private static EglBase.Context eglContext = null;
// Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
// If true then for every newly created yuv image renderer createTexture()
// should be called. The variable is accessed on multiple threads and
@@ -69,8 +69,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private int screenHeight;
// List of yuv renderers.
private final ArrayList<YuvImageRenderer> yuvImageRenderers;
- // |drawer| is synchronized on |yuvImageRenderers|.
- private GlRectDrawer drawer;
// Render and draw threads.
private static Thread renderFrameThread;
private static Thread drawThread;
@@ -99,6 +97,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// currently leaking resources to avoid a rare crash in release() where the EGLContext has
// become invalid beforehand.
private int[] yuvTextures = { 0, 0, 0 };
+ private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+ private final RendererCommon.GlDrawer drawer;
// Resources for making a deep copy of incoming OES texture frame.
private GlTextureFrameBuffer textureCopy;
@@ -157,12 +157,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private YuvImageRenderer(
GLSurfaceView surface, int id,
int x, int y, int width, int height,
- RendererCommon.ScalingType scalingType, boolean mirror) {
+ RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
this.surface = surface;
this.id = id;
this.scalingType = scalingType;
this.mirror = mirror;
+ this.drawer = drawer;
layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
updateLayoutProperties = false;
rotationDegree = 0;
@@ -174,6 +175,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private synchronized void release() {
surface = null;
+ drawer.release();
synchronized (pendingFrameLock) {
if (pendingFrame != null) {
VideoRenderer.renderFrameDone(pendingFrame);
@@ -226,7 +228,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
}
- private void draw(GlRectDrawer drawer) {
+ private void draw() {
if (!seenFrame) {
// No frame received yet - nothing to render.
return;
@@ -241,29 +243,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
if (isNewFrame) {
+ rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
+ pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
if (pendingFrame.yuvFrame) {
rendererType = RendererType.RENDERER_YUV;
- drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
+ yuvUploader.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
- // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
- // top-left corner of the image, but in glTexImage2D() the first element corresponds to
- // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
- // sampling matrix.
- final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
- rotatedSamplingMatrix =
- RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
} else {
rendererType = RendererType.RENDERER_TEXTURE;
- // External texture rendering. Update texture image to latest and make a deep copy of
- // the external texture.
- // TODO(magjed): Move updateTexImage() to the video source instead.
- final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
- surfaceTexture.updateTexImage();
- final float[] samplingMatrix = new float[16];
- surfaceTexture.getTransformMatrix(samplingMatrix);
- rotatedSamplingMatrix =
- RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
-
+ // External texture rendering. Make a deep copy of the external texture.
// Reallocate offscreen texture if necessary.
textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
@@ -272,12 +260,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GlUtil.checkNoGLES2Error("glBindFramebuffer");
// Copy the OES texture content. This will also normalize the sampling matrix.
- GLES20.glViewport(0, 0, textureCopy.getWidth(), textureCopy.getHeight());
- drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix);
+ drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
+ 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
rotatedSamplingMatrix = RendererCommon.identityMatrix();
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES20.glFinish();
}
copyTimeNs += (System.nanoTime() - now);
VideoRenderer.renderFrameDone(pendingFrame);
@@ -285,17 +274,17 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
}
- // OpenGL defaults to lower left origin - flip vertically.
- GLES20.glViewport(displayLayout.left, screenHeight - displayLayout.bottom,
- displayLayout.width(), displayLayout.height());
-
updateLayoutMatrix();
final float[] texMatrix =
RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+ // OpenGL defaults to lower left origin - flip viewport position vertically.
+ final int viewportY = screenHeight - displayLayout.bottom;
if (rendererType == RendererType.RENDERER_YUV) {
- drawer.drawYuv(yuvTextures, texMatrix);
+ drawer.drawYuv(yuvTextures, texMatrix,
+ displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
} else {
- drawer.drawRgb(textureCopy.getTextureId(), texMatrix);
+ drawer.drawRgb(textureCopy.getTextureId(), texMatrix,
+ displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
}
if (isNewFrame) {
@@ -314,7 +303,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) {
Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
- " ms. FPS: " + (float)framesRendered * 1e9 / timeSinceFirstFrameNs);
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
Logging.d(TAG, "Draw time: " +
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
(int) (copyTimeNs / (1000 * framesReceived)) + " us");
@@ -429,7 +418,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
eglContextReady = eglContextReadyCallback;
}
- public static synchronized EGLContext getEGLContext() {
+ public static synchronized EglBase.Context getEglBaseContext() {
return eglContext;
}
@@ -477,6 +466,16 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
*/
public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
RendererCommon.ScalingType scalingType, boolean mirror) {
+ return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
+ }
+
+ /**
+ * Creates VideoRenderer.Callbacks with top left corner at (x, y) and resolution (width, height).
+ * All parameters are in percentage of screen resolution. The custom |drawer| will be used for
+ * drawing frames on the EGLSurface. This class is responsible for calling release() on |drawer|.
+ */
+ public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
// Check display region parameters.
if (x < 0 || x > 100 || y < 0 || y > 100 ||
width < 0 || width > 100 || height < 0 || height > 100 ||
@@ -490,7 +489,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
instance.surface, instance.yuvImageRenderers.size(),
- x, y, width, height, scalingType, mirror);
+ x, y, width, height, scalingType, mirror, drawer);
synchronized (instance.yuvImageRenderers) {
if (instance.onSurfaceCreatedCalled) {
// onSurfaceCreated has already been called for VideoRendererGui -
@@ -498,6 +497,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// rendering list.
final CountDownLatch countDownLatch = new CountDownLatch(1);
instance.surface.queueEvent(new Runnable() {
+ @Override
public void run() {
yuvImageRenderer.createTextures();
yuvImageRenderer.setScreenSize(
@@ -608,13 +608,16 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
// Store render EGL context.
synchronized (VideoRendererGui.class) {
- eglContext = ((EGL10) EGLContext.getEGL()).eglGetCurrentContext();
+ if (EglBase14.isEGL14Supported()) {
+ eglContext = new EglBase14.Context(EGL14.eglGetCurrentContext());
+ } else {
+ eglContext = new EglBase10.Context(((EGL10) EGLContext.getEGL()).eglGetCurrentContext());
+ }
+
Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
}
synchronized (yuvImageRenderers) {
- // Create drawer for YUV/OES frames.
- drawer = new GlRectDrawer();
// Create textures for all images.
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.createTextures();
@@ -655,7 +658,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
synchronized (yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
- yuvImageRenderer.draw(drawer);
+ yuvImageRenderer.draw();
}
}
}