summaryrefslogtreecommitdiff
path: root/app/webrtc
diff options
context:
space:
mode:
Diffstat (limited to 'app/webrtc')
-rw-r--r--app/webrtc/java/android/org/webrtc/VideoRendererGui.java34
-rw-r--r--app/webrtc/java/jni/peerconnection_jni.cc11
2 files changed, 28 insertions, 17 deletions
diff --git a/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
index af625c0..48fbfce 100644
--- a/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
+++ b/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -78,7 +78,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// clipped.
// SCALE_FILL - video frame is scaled to to fill the size of the view. Video
// aspect ratio is changed if necessary.
- private static enum ScalingType
+ public static enum ScalingType
{ SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_FILL };
private final String VERTEX_SHADER_STRING =
@@ -244,9 +244,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private float texTop;
private float texBottom;
private FloatBuffer textureVertices;
- // Texture UV coordinates offsets.
- private float texOffsetU;
- private float texOffsetV;
+ // Texture UV coordinates.
private FloatBuffer textureCoords;
// Flag if texture vertices or coordinates update is needed.
private boolean updateTextureProperties;
@@ -279,13 +277,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
};
textureVertices = directNativeFloatBuffer(textureVeticesFloat);
// Create texture UV coordinates.
- texOffsetU = 0;
- texOffsetV = 0;
float textureCoordinatesFloat[] = new float[] {
- texOffsetU, texOffsetV, // left top
- texOffsetU, 1.0f - texOffsetV, // left bottom
- 1.0f - texOffsetU, texOffsetV, // right top
- 1.0f - texOffsetU, 1.0f - texOffsetV // right bottom
+ 0, 0, 0, 1, 1, 0, 1, 1
};
textureCoords = directNativeFloatBuffer(textureCoordinatesFloat);
updateTextureProperties = false;
@@ -328,6 +321,9 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
float texBottom = this.texBottom;
float displayWidth = (texRight - texLeft) * screenWidth / 2;
float displayHeight = (texTop - texBottom) * screenHeight / 2;
+ Log.d(TAG, "ID: " + id + ". Display: " + displayWidth +
+ " x " + displayHeight + ". Video: " + videoWidth +
+ " x " + videoHeight);
if (displayWidth > 1 && displayHeight > 1 &&
videoWidth > 1 && videoHeight > 1) {
float displayAspectRatio = displayWidth / displayHeight;
@@ -345,6 +341,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
texTop -= deltaY;
texBottom += deltaY;
}
+ Log.d(TAG, " Texture vertices: (" + texLeft + "," + texBottom +
+ ") - (" + texRight + "," + texTop + ")");
// Re-allocate vertices buffer to adjust to video aspect ratio.
float textureVeticesFloat[] = new float[] {
texLeft, texTop,
@@ -355,12 +353,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
textureVertices = directNativeFloatBuffer(textureVeticesFloat);
}
if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
+ float texOffsetU = 0;
+ float texOffsetV = 0;
// Need to re-adjust UV coordinates to match display AR.
if (displayAspectRatio > videoAspectRatio) {
texOffsetV = (1.0f - videoAspectRatio / displayAspectRatio) / 2.0f;
} else {
texOffsetU = (1.0f - displayAspectRatio / videoAspectRatio) / 2.0f;
}
+ Log.d(TAG, " Texture UV offsets: " + texOffsetU + ", " + texOffsetV);
// Re-allocate coordinates buffer to adjust to display aspect ratio.
float textureCoordinatesFloat[] = new float[] {
texOffsetU, texOffsetV, // left top
@@ -575,14 +576,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
* (width, height). All parameters are in percentage of screen resolution.
*/
public static VideoRenderer createGui(
- int x, int y, int width, int height) throws Exception {
- YuvImageRenderer javaGuiRenderer = create(x, y, width, height);
+ int x, int y, int width, int height, ScalingType scalingType)
+ throws Exception {
+ YuvImageRenderer javaGuiRenderer = create(x, y, width, height, scalingType);
return new VideoRenderer(javaGuiRenderer);
}
public static VideoRenderer.Callbacks createGuiRenderer(
- int x, int y, int width, int height) {
- return create(x, y, width, height);
+ int x, int y, int width, int height, ScalingType scalingType) {
+ return create(x, y, width, height, scalingType);
}
/**
@@ -591,7 +593,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
* screen resolution.
*/
public static YuvImageRenderer create(
- int x, int y, int width, int height) {
+ int x, int y, int width, int height, ScalingType scalingType) {
// Check display region parameters.
if (x < 0 || x > 100 || y < 0 || y > 100 ||
width < 0 || width > 100 || height < 0 || height > 100 ||
@@ -605,7 +607,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
instance.surface, instance.yuvImageRenderers.size(),
- x, y, width, height, ScalingType.SCALE_ASPECT_FIT);
+ x, y, width, height, scalingType);
synchronized (instance.yuvImageRenderers) {
if (instance.onSurfaceCreatedCalled) {
// onSurfaceCreated has already been called for VideoRendererGui -
diff --git a/app/webrtc/java/jni/peerconnection_jni.cc b/app/webrtc/java/jni/peerconnection_jni.cc
index 27f69e4..92acd7b 100644
--- a/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/app/webrtc/java/jni/peerconnection_jni.cc
@@ -2055,8 +2055,17 @@ int MediaCodecVideoDecoder::SetAndroidObjects(JNIEnv* jni,
render_egl_context_ = NULL;
} else {
render_egl_context_ = jni->NewGlobalRef(render_egl_context);
+ CHECK_EXCEPTION(jni) << "error calling NewGlobalRef for EGL Context.";
+ jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
+ if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
+ ALOGE("Wrong EGL Context.");
+ jni->DeleteGlobalRef(render_egl_context_);
+ render_egl_context_ = NULL;
+ }
+ }
+ if (render_egl_context_ == NULL) {
+ ALOGD("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
}
- ALOGD("VideoDecoder EGL context set.");
return 0;
}