discord-jadx/app/src/main/java/org/webrtc/AndroidVideoDecoder.java

582 lines
25 KiB
Java

package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.SystemClock;
import android.view.Surface;
import androidx.annotation.Nullable;
import c.d.b.a.a;
import h0.c.p0;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.webrtc.EglBase;
import org.webrtc.EncodedImage;
import org.webrtc.ThreadUtils;
import org.webrtc.VideoDecoder;
import org.webrtc.VideoFrame;
public class AndroidVideoDecoder implements VideoDecoder, VideoSink {
private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left";
private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right";
private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top";
private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height";
private static final String MEDIA_FORMAT_KEY_STRIDE = "stride";
private static final String TAG = "AndroidVideoDecoder";
@Nullable
private VideoDecoder.Callback callback;
@Nullable
private MediaCodecWrapper codec;
private final String codecName;
private final VideoCodecMimeType codecType;
private int colorFormat;
private ThreadUtils.ThreadChecker decoderThreadChecker;
private final Object dimensionLock = new Object();
private final BlockingDeque<FrameInfo> frameInfos;
private boolean hasDecodedFirstFrame;
private int height;
private boolean keyFrameRequired;
private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
@Nullable
private Thread outputThread;
private ThreadUtils.ThreadChecker outputThreadChecker;
@Nullable
private DecodedTextureMetadata renderedTextureMetadata;
private final Object renderedTextureMetadataLock = new Object();
private volatile boolean running;
@Nullable
private final EglBase.Context sharedContext;
@Nullable
private volatile Exception shutdownException;
private int sliceHeight;
private int stride;
@Nullable
private Surface surface;
@Nullable
private SurfaceTextureHelper surfaceTextureHelper;
private int width;
/* renamed from: org.webrtc.AndroidVideoDecoder$1 reason: invalid class name */
public class AnonymousClass1 extends Thread {
public AnonymousClass1(String str) {
super(str);
}
@Override // java.lang.Thread, java.lang.Runnable
public void run() {
AndroidVideoDecoder.access$002(AndroidVideoDecoder.this, new ThreadUtils.ThreadChecker());
while (AndroidVideoDecoder.access$100(AndroidVideoDecoder.this)) {
AndroidVideoDecoder.this.deliverDecodedFrame();
}
AndroidVideoDecoder.access$200(AndroidVideoDecoder.this);
}
}
public static class DecodedTextureMetadata {
public final Integer decodeTimeMs;
public final long presentationTimestampUs;
public DecodedTextureMetadata(long j, Integer num) {
this.presentationTimestampUs = j;
this.decodeTimeMs = num;
}
}
public static class FrameInfo {
public final long decodeStartTimeMs;
public final int rotation;
public FrameInfo(long j, int i) {
this.decodeStartTimeMs = j;
this.rotation = i;
}
}
public AndroidVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String str, VideoCodecMimeType videoCodecMimeType, int i, @Nullable EglBase.Context context) {
if (isSupportedColorFormat(i)) {
Logging.d("AndroidVideoDecoder", "ctor name: " + str + " type: " + videoCodecMimeType + " color format: " + i + " context: " + context);
this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
this.codecName = str;
this.codecType = videoCodecMimeType;
this.colorFormat = i;
this.sharedContext = context;
this.frameInfos = new LinkedBlockingDeque();
return;
}
throw new IllegalArgumentException(a.l("Unsupported color format: ", i));
}
public static /* synthetic */ ThreadUtils.ThreadChecker access$002(AndroidVideoDecoder androidVideoDecoder, ThreadUtils.ThreadChecker threadChecker) {
androidVideoDecoder.outputThreadChecker = threadChecker;
return threadChecker;
}
public static /* synthetic */ boolean access$100(AndroidVideoDecoder androidVideoDecoder) {
return androidVideoDecoder.running;
}
public static /* synthetic */ void access$200(AndroidVideoDecoder androidVideoDecoder) {
androidVideoDecoder.releaseCodecOnOutputThread();
}
private VideoFrame.Buffer copyI420Buffer(ByteBuffer byteBuffer, int i, int i2, int i3, int i4) {
if (i % 2 == 0) {
int i5 = (i3 + 1) / 2;
int i6 = i2 % 2;
int i7 = i6 == 0 ? (i4 + 1) / 2 : i4 / 2;
int i8 = i / 2;
int i9 = (i * i2) + 0;
int i10 = i8 * i7;
int i11 = ((i8 * i2) / 2) + i9;
int i12 = i11 + i10;
VideoFrame.I420Buffer allocateI420Buffer = allocateI420Buffer(i3, i4);
byteBuffer.limit((i * i4) + 0);
byteBuffer.position(0);
copyPlane(byteBuffer.slice(), i, allocateI420Buffer.getDataY(), allocateI420Buffer.getStrideY(), i3, i4);
byteBuffer.limit(i9 + i10);
byteBuffer.position(i9);
copyPlane(byteBuffer.slice(), i8, allocateI420Buffer.getDataU(), allocateI420Buffer.getStrideU(), i5, i7);
if (i6 == 1) {
byteBuffer.position(((i7 - 1) * i8) + i9);
ByteBuffer dataU = allocateI420Buffer.getDataU();
dataU.position(allocateI420Buffer.getStrideU() * i7);
dataU.put(byteBuffer);
}
byteBuffer.limit(i12);
byteBuffer.position(i11);
copyPlane(byteBuffer.slice(), i8, allocateI420Buffer.getDataV(), allocateI420Buffer.getStrideV(), i5, i7);
if (i6 == 1) {
byteBuffer.position(((i7 - 1) * i8) + i11);
ByteBuffer dataV = allocateI420Buffer.getDataV();
dataV.position(allocateI420Buffer.getStrideV() * i7);
dataV.put(byteBuffer);
}
return allocateI420Buffer;
}
throw new AssertionError(a.l("Stride is not divisible by two: ", i));
}
private VideoFrame.Buffer copyNV12ToI420Buffer(ByteBuffer byteBuffer, int i, int i2, int i3, int i4) {
return new NV12Buffer(i3, i4, i, i2, byteBuffer, null).toI420();
}
private Thread createOutputThread() {
return new AnonymousClass1("AndroidVideoDecoder.outputThread");
}
private void deliverByteFrame(int i, MediaCodec.BufferInfo bufferInfo, int i2, Integer num) {
int i3;
int i4;
int i5;
int i6;
synchronized (this.dimensionLock) {
i3 = this.width;
i4 = this.height;
i5 = this.stride;
i6 = this.sliceHeight;
}
int i7 = bufferInfo.size;
if (i7 < ((i3 * i4) * 3) / 2) {
StringBuilder L = a.L("Insufficient output buffer size: ");
L.append(bufferInfo.size);
Logging.e("AndroidVideoDecoder", L.toString());
return;
}
int i8 = (i7 >= ((i5 * i4) * 3) / 2 || i6 != i4 || i5 <= i3) ? i5 : (i7 * 2) / (i4 * 3);
ByteBuffer byteBuffer = this.codec.getOutputBuffers()[i];
byteBuffer.position(bufferInfo.offset);
byteBuffer.limit(bufferInfo.offset + bufferInfo.size);
ByteBuffer slice = byteBuffer.slice();
VideoFrame.Buffer copyI420Buffer = this.colorFormat == 19 ? copyI420Buffer(slice, i8, i6, i3, i4) : copyNV12ToI420Buffer(slice, i8, i6, i3, i4);
this.codec.releaseOutputBuffer(i, false);
VideoFrame videoFrame = new VideoFrame(copyI420Buffer, i2, bufferInfo.presentationTimeUs * 1000);
this.callback.onDecodedFrame(videoFrame, num, null);
videoFrame.release();
}
private void deliverTextureFrame(int i, MediaCodec.BufferInfo bufferInfo, int i2, Integer num) {
int i3;
int i4;
synchronized (this.dimensionLock) {
i3 = this.width;
i4 = this.height;
}
synchronized (this.renderedTextureMetadataLock) {
if (this.renderedTextureMetadata != null) {
this.codec.releaseOutputBuffer(i, false);
return;
}
if (i3 > 0) {
if (i4 > 0) {
this.surfaceTextureHelper.setTextureSize(i3, i4);
this.surfaceTextureHelper.setFrameRotation(i2);
this.renderedTextureMetadata = new DecodedTextureMetadata(bufferInfo.presentationTimeUs, num);
this.codec.releaseOutputBuffer(i, true);
return;
}
}
this.codec.releaseOutputBuffer(i, false);
}
}
private VideoCodecStatus initDecodeInternal(int i, int i2) {
this.decoderThreadChecker.checkIsOnValidThread();
Logging.d("AndroidVideoDecoder", "initDecodeInternal name: " + this.codecName + " type: " + this.codecType + " width: " + i + " height: " + i2);
if (this.outputThread != null) {
Logging.e("AndroidVideoDecoder", "initDecodeInternal called while the codec is already running");
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
this.width = i;
this.height = i2;
this.stride = i;
this.sliceHeight = i2;
this.hasDecodedFirstFrame = false;
this.keyFrameRequired = true;
try {
this.codec = this.mediaCodecWrapperFactory.createByCodecName(this.codecName);
try {
MediaFormat createVideoFormat = MediaFormat.createVideoFormat(this.codecType.mimeType(), i, i2);
if (this.sharedContext == null) {
createVideoFormat.setInteger("color-format", this.colorFormat);
}
this.codec.configure(createVideoFormat, this.surface, null, 0);
this.codec.start();
this.running = true;
Thread createOutputThread = createOutputThread();
this.outputThread = createOutputThread;
createOutputThread.start();
Logging.d("AndroidVideoDecoder", "initDecodeInternal done");
return VideoCodecStatus.OK;
} catch (IllegalArgumentException | IllegalStateException e) {
Logging.e("AndroidVideoDecoder", "initDecode failed", e);
release();
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
} catch (IOException | IllegalArgumentException | IllegalStateException unused) {
StringBuilder L = a.L("Cannot create media decoder ");
L.append(this.codecName);
Logging.e("AndroidVideoDecoder", L.toString());
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
}
private boolean isSupportedColorFormat(int i) {
for (int i2 : MediaCodecUtils.DECODER_COLOR_FORMATS) {
if (i2 == i) {
return true;
}
}
return false;
}
private void reformat(MediaFormat mediaFormat) {
int i;
int i2;
this.outputThreadChecker.checkIsOnValidThread();
Logging.d("AndroidVideoDecoder", "Decoder format changed: " + mediaFormat.toString());
if (!mediaFormat.containsKey("crop-left") || !mediaFormat.containsKey("crop-right") || !mediaFormat.containsKey("crop-bottom") || !mediaFormat.containsKey("crop-top")) {
i2 = mediaFormat.getInteger("width");
i = mediaFormat.getInteger("height");
} else {
i2 = (mediaFormat.getInteger("crop-right") + 1) - mediaFormat.getInteger("crop-left");
i = (mediaFormat.getInteger("crop-bottom") + 1) - mediaFormat.getInteger("crop-top");
}
synchronized (this.dimensionLock) {
if (!this.hasDecodedFirstFrame || (this.width == i2 && this.height == i)) {
this.width = i2;
this.height = i;
} else {
stopOnOutputThread(new RuntimeException("Unexpected size change. Configured " + this.width + "*" + this.height + ". New " + i2 + "*" + i));
return;
}
}
if (this.surfaceTextureHelper == null && mediaFormat.containsKey("color-format")) {
this.colorFormat = mediaFormat.getInteger("color-format");
StringBuilder L = a.L("Color: 0x");
L.append(Integer.toHexString(this.colorFormat));
Logging.d("AndroidVideoDecoder", L.toString());
if (!isSupportedColorFormat(this.colorFormat)) {
StringBuilder L2 = a.L("Unsupported color format: ");
L2.append(this.colorFormat);
stopOnOutputThread(new IllegalStateException(L2.toString()));
return;
}
}
synchronized (this.dimensionLock) {
if (mediaFormat.containsKey("stride")) {
this.stride = mediaFormat.getInteger("stride");
}
if (mediaFormat.containsKey("slice-height")) {
this.sliceHeight = mediaFormat.getInteger("slice-height");
}
Logging.d("AndroidVideoDecoder", "Frame stride and slice height: " + this.stride + " x " + this.sliceHeight);
this.stride = Math.max(this.width, this.stride);
this.sliceHeight = Math.max(this.height, this.sliceHeight);
}
}
private VideoCodecStatus reinitDecode(int i, int i2) {
this.decoderThreadChecker.checkIsOnValidThread();
VideoCodecStatus releaseInternal = releaseInternal();
return releaseInternal != VideoCodecStatus.OK ? releaseInternal : initDecodeInternal(i, i2);
}
private void releaseCodecOnOutputThread() {
this.outputThreadChecker.checkIsOnValidThread();
Logging.d("AndroidVideoDecoder", "Releasing MediaCodec on output thread");
try {
this.codec.stop();
} catch (Exception e) {
Logging.e("AndroidVideoDecoder", "Media decoder stop failed", e);
}
try {
this.codec.release();
} catch (Exception e2) {
Logging.e("AndroidVideoDecoder", "Media decoder release failed", e2);
this.shutdownException = e2;
}
Logging.d("AndroidVideoDecoder", "Release on output thread done");
}
/* JADX WARNING: Unknown variable types count: 1 */
private VideoCodecStatus releaseInternal() {
if (!this.running) {
Logging.d("AndroidVideoDecoder", "release: Decoder is not running.");
return VideoCodecStatus.OK;
}
?? r2 = 0;
try {
this.running = false;
if (!ThreadUtils.joinUninterruptibly(this.outputThread, 5000)) {
Logging.e("AndroidVideoDecoder", "Media decoder release timeout", new RuntimeException());
return VideoCodecStatus.TIMEOUT;
} else if (this.shutdownException != null) {
Logging.e("AndroidVideoDecoder", "Media decoder release error", new RuntimeException(this.shutdownException));
this.shutdownException = null;
VideoCodecStatus videoCodecStatus = VideoCodecStatus.ERROR;
this.codec = null;
this.outputThread = null;
return videoCodecStatus;
} else {
this.codec = null;
this.outputThread = null;
return VideoCodecStatus.OK;
}
} finally {
this.codec = r2;
this.outputThread = r2;
}
}
private void stopOnOutputThread(Exception exc) {
this.outputThreadChecker.checkIsOnValidThread();
this.running = false;
this.shutdownException = exc;
}
public VideoFrame.I420Buffer allocateI420Buffer(int i, int i2) {
return JavaI420Buffer.allocate(i, i2);
}
public void copyPlane(ByteBuffer byteBuffer, int i, ByteBuffer byteBuffer2, int i2, int i3, int i4) {
YuvHelper.copyPlane(byteBuffer, i, byteBuffer2, i2, i3, i4);
}
@Override // org.webrtc.VideoDecoder
public /* synthetic */ long createNativeVideoDecoder() {
return p0.a(this);
}
public SurfaceTextureHelper createSurfaceTextureHelper() {
return SurfaceTextureHelper.create("decoder-texture-thread", this.sharedContext);
}
@Override // org.webrtc.VideoDecoder
public VideoCodecStatus decode(EncodedImage encodedImage, VideoDecoder.DecodeInfo decodeInfo) {
int i;
int i2;
VideoCodecStatus reinitDecode;
this.decoderThreadChecker.checkIsOnValidThread();
boolean z2 = false;
if (this.codec == null || this.callback == null) {
StringBuilder L = a.L("decode uninitalized, codec: ");
if (this.codec != null) {
z2 = true;
}
L.append(z2);
L.append(", callback: ");
L.append(this.callback);
Logging.d("AndroidVideoDecoder", L.toString());
return VideoCodecStatus.UNINITIALIZED;
}
ByteBuffer byteBuffer = encodedImage.buffer;
if (byteBuffer == null) {
Logging.e("AndroidVideoDecoder", "decode() - no input data");
return VideoCodecStatus.ERR_PARAMETER;
}
int remaining = byteBuffer.remaining();
if (remaining == 0) {
Logging.e("AndroidVideoDecoder", "decode() - input buffer empty");
return VideoCodecStatus.ERR_PARAMETER;
}
synchronized (this.dimensionLock) {
i = this.width;
i2 = this.height;
}
int i3 = encodedImage.encodedWidth;
int i4 = encodedImage.encodedHeight;
if (i3 * i4 > 0 && ((i3 != i || i4 != i2) && (reinitDecode = reinitDecode(i3, i4)) != VideoCodecStatus.OK)) {
return reinitDecode;
}
if (!this.keyFrameRequired || encodedImage.frameType == EncodedImage.FrameType.VideoFrameKey) {
try {
int dequeueInputBuffer = this.codec.dequeueInputBuffer(500000);
if (dequeueInputBuffer < 0) {
Logging.e("AndroidVideoDecoder", "decode() - no HW buffers available; decoder falling behind");
return VideoCodecStatus.ERROR;
}
try {
ByteBuffer byteBuffer2 = this.codec.getInputBuffers()[dequeueInputBuffer];
if (byteBuffer2.capacity() < remaining) {
Logging.e("AndroidVideoDecoder", "decode() - HW buffer too small");
return VideoCodecStatus.ERROR;
}
byteBuffer2.put(encodedImage.buffer);
this.frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), encodedImage.rotation));
try {
this.codec.queueInputBuffer(dequeueInputBuffer, 0, remaining, TimeUnit.NANOSECONDS.toMicros(encodedImage.captureTimeNs), 0);
if (this.keyFrameRequired) {
this.keyFrameRequired = false;
}
return VideoCodecStatus.OK;
} catch (IllegalStateException e) {
Logging.e("AndroidVideoDecoder", "queueInputBuffer failed", e);
this.frameInfos.pollLast();
return VideoCodecStatus.ERROR;
}
} catch (IllegalStateException e2) {
Logging.e("AndroidVideoDecoder", "getInputBuffers failed", e2);
return VideoCodecStatus.ERROR;
}
} catch (IllegalStateException e3) {
Logging.e("AndroidVideoDecoder", "dequeueInputBuffer failed", e3);
return VideoCodecStatus.ERROR;
}
} else {
Logging.e("AndroidVideoDecoder", "decode() - key frame required first");
return VideoCodecStatus.NO_OUTPUT;
}
}
public void deliverDecodedFrame() {
this.outputThreadChecker.checkIsOnValidThread();
try {
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int dequeueOutputBuffer = this.codec.dequeueOutputBuffer(bufferInfo, 100000);
if (dequeueOutputBuffer == -2) {
reformat(this.codec.getOutputFormat());
} else if (dequeueOutputBuffer < 0) {
Logging.v("AndroidVideoDecoder", "dequeueOutputBuffer returned " + dequeueOutputBuffer);
} else {
FrameInfo poll = this.frameInfos.poll();
Integer num = null;
int i = 0;
if (poll != null) {
num = Integer.valueOf((int) (SystemClock.elapsedRealtime() - poll.decodeStartTimeMs));
i = poll.rotation;
}
this.hasDecodedFirstFrame = true;
if (this.surfaceTextureHelper != null) {
deliverTextureFrame(dequeueOutputBuffer, bufferInfo, i, num);
} else {
deliverByteFrame(dequeueOutputBuffer, bufferInfo, i, num);
}
}
} catch (IllegalStateException e) {
Logging.e("AndroidVideoDecoder", "deliverDecodedFrame failed", e);
}
}
@Override // org.webrtc.VideoDecoder
public String getImplementationName() {
return this.codecName;
}
@Override // org.webrtc.VideoDecoder
public /* synthetic */ boolean getPrefersLateDecoding() {
return p0.b(this);
}
@Override // org.webrtc.VideoDecoder
public VideoCodecStatus initDecode(VideoDecoder.Settings settings, VideoDecoder.Callback callback) {
this.decoderThreadChecker = new ThreadUtils.ThreadChecker();
this.callback = callback;
if (this.sharedContext != null) {
try {
this.surfaceTextureHelper = createSurfaceTextureHelper();
this.surface = new Surface(this.surfaceTextureHelper.getSurfaceTexture());
this.surfaceTextureHelper.startListening(this);
} catch (Throwable th) {
Logging.e("AndroidVideoDecoder", "Error creating SurfaceTextureHelper", th);
Surface surface = this.surface;
if (surface != null) {
surface.release();
this.surface = null;
}
SurfaceTextureHelper surfaceTextureHelper = this.surfaceTextureHelper;
if (surfaceTextureHelper != null) {
surfaceTextureHelper.dispose();
this.surfaceTextureHelper = null;
}
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
}
return initDecodeInternal(settings.width, settings.height);
}
@Override // org.webrtc.VideoSink
public void onFrame(VideoFrame videoFrame) {
long j;
Integer num;
synchronized (this.renderedTextureMetadataLock) {
DecodedTextureMetadata decodedTextureMetadata = this.renderedTextureMetadata;
if (decodedTextureMetadata != null) {
j = decodedTextureMetadata.presentationTimestampUs * 1000;
num = decodedTextureMetadata.decodeTimeMs;
this.renderedTextureMetadata = null;
} else {
throw new IllegalStateException("Rendered texture metadata was null in onTextureFrameAvailable.");
}
}
this.callback.onDecodedFrame(new VideoFrame(videoFrame.getBuffer(), videoFrame.getRotation(), j), num, null);
}
@Override // org.webrtc.VideoDecoder
public VideoCodecStatus release() {
Logging.d("AndroidVideoDecoder", "release");
VideoCodecStatus releaseInternal = releaseInternal();
if (this.surface != null) {
releaseSurface();
this.surface = null;
this.surfaceTextureHelper.stopListening();
this.surfaceTextureHelper.dispose();
this.surfaceTextureHelper = null;
}
synchronized (this.renderedTextureMetadataLock) {
this.renderedTextureMetadata = null;
}
this.callback = null;
this.frameInfos.clear();
return releaseInternal;
}
public void releaseSurface() {
this.surface.release();
}
}