204 lines
8.1 KiB
Java
204 lines
8.1 KiB
Java
package org.webrtc;
|
|
|
|
import android.graphics.Matrix;
|
|
import android.graphics.Point;
|
|
import android.opengl.GLES20;
|
|
import androidx.annotation.Nullable;
|
|
import c.d.b.a.a;
|
|
import java.nio.ByteBuffer;
|
|
import org.webrtc.RendererCommon;
|
|
import org.webrtc.VideoFrame;
|
|
public class VideoFrameDrawer {
|
|
public static final String TAG = "VideoFrameDrawer";
|
|
public static final float[] srcPoints = {0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f};
|
|
private final float[] dstPoints = new float[6];
|
|
@Nullable
|
|
private VideoFrame lastI420Frame;
|
|
private int renderHeight;
|
|
private final Matrix renderMatrix = new Matrix();
|
|
private final Point renderSize = new Point();
|
|
private int renderWidth;
|
|
private final YuvUploader yuvUploader = new YuvUploader(null);
|
|
|
|
/* renamed from: org.webrtc.VideoFrameDrawer$1 reason: invalid class name */
|
|
public static /* synthetic */ class AnonymousClass1 {
|
|
public static final /* synthetic */ int[] $SwitchMap$org$webrtc$VideoFrame$TextureBuffer$Type;
|
|
|
|
static {
|
|
VideoFrame.TextureBuffer.Type.values();
|
|
int[] iArr = new int[2];
|
|
$SwitchMap$org$webrtc$VideoFrame$TextureBuffer$Type = iArr;
|
|
try {
|
|
iArr[VideoFrame.TextureBuffer.Type.OES.ordinal()] = 1;
|
|
} catch (NoSuchFieldError unused) {
|
|
}
|
|
try {
|
|
$SwitchMap$org$webrtc$VideoFrame$TextureBuffer$Type[VideoFrame.TextureBuffer.Type.RGB.ordinal()] = 2;
|
|
} catch (NoSuchFieldError unused2) {
|
|
}
|
|
}
|
|
}
|
|
|
|
public static class YuvUploader {
|
|
@Nullable
|
|
private ByteBuffer copyBuffer;
|
|
@Nullable
|
|
private int[] yuvTextures;
|
|
|
|
private YuvUploader() {
|
|
}
|
|
|
|
public /* synthetic */ YuvUploader(AnonymousClass1 r1) {
|
|
this();
|
|
}
|
|
|
|
@Nullable
|
|
public int[] getYuvTextures() {
|
|
return this.yuvTextures;
|
|
}
|
|
|
|
public void release() {
|
|
this.copyBuffer = null;
|
|
int[] iArr = this.yuvTextures;
|
|
if (iArr != null) {
|
|
GLES20.glDeleteTextures(3, iArr, 0);
|
|
this.yuvTextures = null;
|
|
}
|
|
}
|
|
|
|
@Nullable
|
|
public int[] uploadFromBuffer(VideoFrame.I420Buffer i420Buffer) {
|
|
return uploadYuvData(i420Buffer.getWidth(), i420Buffer.getHeight(), new int[]{i420Buffer.getStrideY(), i420Buffer.getStrideU(), i420Buffer.getStrideV()}, new ByteBuffer[]{i420Buffer.getDataY(), i420Buffer.getDataU(), i420Buffer.getDataV()});
|
|
}
|
|
|
|
@Nullable
|
|
public int[] uploadYuvData(int i, int i2, int[] iArr, ByteBuffer[] byteBufferArr) {
|
|
ByteBuffer byteBuffer;
|
|
ByteBuffer byteBuffer2;
|
|
int i3 = i / 2;
|
|
int[] iArr2 = {i, i3, i3};
|
|
int i4 = i2 / 2;
|
|
int[] iArr3 = {i2, i4, i4};
|
|
int i5 = 0;
|
|
for (int i6 = 0; i6 < 3; i6++) {
|
|
if (iArr[i6] > iArr2[i6]) {
|
|
i5 = Math.max(i5, iArr2[i6] * iArr3[i6]);
|
|
}
|
|
}
|
|
if (i5 > 0 && ((byteBuffer2 = this.copyBuffer) == null || byteBuffer2.capacity() < i5)) {
|
|
this.copyBuffer = ByteBuffer.allocateDirect(i5);
|
|
}
|
|
if (this.yuvTextures == null) {
|
|
this.yuvTextures = new int[3];
|
|
for (int i7 = 0; i7 < 3; i7++) {
|
|
this.yuvTextures[i7] = GlUtil.generateTexture(3553);
|
|
}
|
|
}
|
|
for (int i8 = 0; i8 < 3; i8++) {
|
|
GLES20.glActiveTexture(33984 + i8);
|
|
GLES20.glBindTexture(3553, this.yuvTextures[i8]);
|
|
if (iArr[i8] == iArr2[i8]) {
|
|
byteBuffer = byteBufferArr[i8];
|
|
} else {
|
|
YuvHelper.copyPlane(byteBufferArr[i8], iArr[i8], this.copyBuffer, iArr2[i8], iArr2[i8], iArr3[i8]);
|
|
byteBuffer = this.copyBuffer;
|
|
}
|
|
GLES20.glTexImage2D(3553, 0, 6409, iArr2[i8], iArr3[i8], 0, 6409, 5121, byteBuffer);
|
|
}
|
|
return this.yuvTextures;
|
|
}
|
|
}
|
|
|
|
private void calculateTransformedRenderSize(int i, int i2, @Nullable Matrix matrix) {
|
|
if (matrix == null) {
|
|
this.renderWidth = i;
|
|
this.renderHeight = i2;
|
|
return;
|
|
}
|
|
matrix.mapPoints(this.dstPoints, srcPoints);
|
|
for (int i3 = 0; i3 < 3; i3++) {
|
|
float[] fArr = this.dstPoints;
|
|
int i4 = i3 * 2;
|
|
int i5 = i4 + 0;
|
|
fArr[i5] = fArr[i5] * ((float) i);
|
|
int i6 = i4 + 1;
|
|
fArr[i6] = fArr[i6] * ((float) i2);
|
|
}
|
|
float[] fArr2 = this.dstPoints;
|
|
this.renderWidth = distance(fArr2[0], fArr2[1], fArr2[2], fArr2[3]);
|
|
float[] fArr3 = this.dstPoints;
|
|
this.renderHeight = distance(fArr3[0], fArr3[1], fArr3[4], fArr3[5]);
|
|
}
|
|
|
|
private static int distance(float f, float f2, float f3, float f4) {
|
|
return (int) Math.round(Math.hypot((double) (f3 - f), (double) (f4 - f2)));
|
|
}
|
|
|
|
public static void drawTexture(RendererCommon.GlDrawer glDrawer, VideoFrame.TextureBuffer textureBuffer, Matrix matrix, int i, int i2, int i3, int i4, int i5, int i6) {
|
|
Matrix matrix2 = new Matrix(textureBuffer.getTransformMatrix());
|
|
matrix2.preConcat(matrix);
|
|
float[] convertMatrixFromAndroidGraphicsMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix2);
|
|
int ordinal = textureBuffer.getType().ordinal();
|
|
if (ordinal == 0) {
|
|
glDrawer.drawOes(textureBuffer.getTextureId(), convertMatrixFromAndroidGraphicsMatrix, i, i2, i3, i4, i5, i6);
|
|
} else if (ordinal == 1) {
|
|
glDrawer.drawRgb(textureBuffer.getTextureId(), convertMatrixFromAndroidGraphicsMatrix, i, i2, i3, i4, i5, i6);
|
|
} else {
|
|
throw new RuntimeException("Unknown texture type.");
|
|
}
|
|
}
|
|
|
|
public void drawFrame(VideoFrame videoFrame, RendererCommon.GlDrawer glDrawer) {
|
|
drawFrame(videoFrame, glDrawer, null);
|
|
}
|
|
|
|
public void drawFrame(VideoFrame videoFrame, RendererCommon.GlDrawer glDrawer, Matrix matrix) {
|
|
drawFrame(videoFrame, glDrawer, matrix, 0, 0, videoFrame.getRotatedWidth(), videoFrame.getRotatedHeight());
|
|
}
|
|
|
|
public void drawFrame(VideoFrame videoFrame, RendererCommon.GlDrawer glDrawer, @Nullable Matrix matrix, int i, int i2, int i3, int i4) {
|
|
calculateTransformedRenderSize(videoFrame.getRotatedWidth(), videoFrame.getRotatedHeight(), matrix);
|
|
if (this.renderWidth <= 0 || this.renderHeight <= 0) {
|
|
StringBuilder K = a.K("Illegal frame size: ");
|
|
K.append(this.renderWidth);
|
|
K.append("x");
|
|
K.append(this.renderHeight);
|
|
Logging.w("VideoFrameDrawer", K.toString());
|
|
return;
|
|
}
|
|
boolean z2 = videoFrame.getBuffer() instanceof VideoFrame.TextureBuffer;
|
|
this.renderMatrix.reset();
|
|
this.renderMatrix.preTranslate(0.5f, 0.5f);
|
|
if (!z2) {
|
|
this.renderMatrix.preScale(1.0f, -1.0f);
|
|
}
|
|
this.renderMatrix.preRotate((float) videoFrame.getRotation());
|
|
this.renderMatrix.preTranslate(-0.5f, -0.5f);
|
|
if (matrix != null) {
|
|
this.renderMatrix.preConcat(matrix);
|
|
}
|
|
if (z2) {
|
|
this.lastI420Frame = null;
|
|
drawTexture(glDrawer, (VideoFrame.TextureBuffer) videoFrame.getBuffer(), this.renderMatrix, this.renderWidth, this.renderHeight, i, i2, i3, i4);
|
|
return;
|
|
}
|
|
if (videoFrame != this.lastI420Frame) {
|
|
this.lastI420Frame = videoFrame;
|
|
VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420();
|
|
this.yuvUploader.uploadFromBuffer(i420);
|
|
i420.release();
|
|
}
|
|
glDrawer.drawYuv(this.yuvUploader.getYuvTextures(), RendererCommon.convertMatrixFromAndroidGraphicsMatrix(this.renderMatrix), this.renderWidth, this.renderHeight, i, i2, i3, i4);
|
|
}
|
|
|
|
public VideoFrame.Buffer prepareBufferForViewportSize(VideoFrame.Buffer buffer, int i, int i2) {
|
|
buffer.retain();
|
|
return buffer;
|
|
}
|
|
|
|
public void release() {
|
|
this.yuvUploader.release();
|
|
this.lastI420Frame = null;
|
|
}
|
|
}
|