discord-jadx/app/src/main/java/b/a/q/m0/b.java

292 lines
11 KiB
Java

package b.a.q.m0;
import android.content.Context;
import android.content.Intent;
import android.graphics.Point;
import android.graphics.Rect;
import android.media.AudioFormat;
import android.media.AudioPlaybackCaptureConfiguration;
import android.media.AudioRecord;
import android.media.projection.MediaProjection;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import androidx.annotation.RequiresApi;
import androidx.recyclerview.widget.RecyclerView;
import co.discord.media_engine.NativeCapturerObserver;
import co.discord.media_engine.SoundshareAudioSource;
import com.discord.rtcconnection.mediaengine.ThumbnailEmitter;
import com.discord.utilities.display.DisplayUtils;
import com.discord.utilities.logging.Logger;
import d0.z.d.m;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import kotlin.jvm.internal.DefaultConstructorMarker;
import org.webrtc.CapturerObserver;
import org.webrtc.JniCommon;
import org.webrtc.ScreenCapturerAndroid;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.TimestampAligner;
import org.webrtc.VideoFrame;
import rx.Observable;
import rx.Subscription;
import rx.functions.Action1;
/* compiled from: ScreenCapturer.kt */
/* loaded from: classes.dex */
public final class b extends ScreenCapturerAndroid {
public static final a j = new a(null);
public NativeCapturerObserver l;
public SurfaceTextureHelper m;
public Context n;
public int p;
public int q;
public Long r;
public int v;
public Subscription w;
/* renamed from: x reason: collision with root package name */
public final Logger f265x;
/* renamed from: y reason: collision with root package name */
public final ThumbnailEmitter f266y;
public final RunnableC0041b k = new RunnableC0041b();
public final SoundshareAudioSource o = new SoundshareAudioSource();
/* renamed from: s reason: collision with root package name */
public final Rect f264s = new Rect();
public final Rect t = new Rect();
public final Point u = new Point();
/* compiled from: ScreenCapturer.kt */
/* loaded from: classes.dex */
public static final class a {
public a(DefaultConstructorMarker defaultConstructorMarker) {
}
}
/* compiled from: ScreenCapturer.kt */
/* renamed from: b.a.q.m0.b$b reason: collision with other inner class name */
/* loaded from: classes.dex */
public final class RunnableC0041b implements Runnable {
public RunnableC0041b() {
}
@Override // java.lang.Runnable
public void run() {
b bVar = b.this;
if (bVar.p > 0) {
Long l = bVar.r;
if (l != null) {
long rtcTimeNanos = TimestampAligner.getRtcTimeNanos() - l.longValue();
b bVar2 = b.this;
if (rtcTimeNanos > bVar2.q) {
NativeCapturerObserver nativeCapturerObserver = bVar2.l;
if (nativeCapturerObserver == null) {
m.throwUninitializedPropertyAccessException("nativeObserver");
}
nativeCapturerObserver.repeatLastFrame();
}
}
b.this.c();
}
}
}
/* compiled from: ScreenCapturer.kt */
/* loaded from: classes.dex */
public static final class c<T> implements Action1<Long> {
public c() {
}
@Override // rx.functions.Action1
public void call(Long l) {
b.this.k.run();
}
}
/* compiled from: ScreenCapturer.kt */
/* loaded from: classes.dex */
public static final class d<T> implements Action1<Throwable> {
public static final d j = new d();
@Override // rx.functions.Action1
public void call(Throwable th) {
}
}
/* JADX WARN: 'super' call moved to the top of the method (can break code semantics) */
public b(Intent intent, MediaProjection.Callback callback, Logger logger, ThumbnailEmitter thumbnailEmitter) {
super(intent, callback);
m.checkNotNullParameter(intent, "mediaProjectionPermissionResultData");
m.checkNotNullParameter(callback, "mediaProjectionCallback");
m.checkNotNullParameter(logger, "logger");
this.f265x = logger;
this.f266y = thumbnailEmitter;
}
public final synchronized void a(Rect rect, int i) {
m.checkNotNullParameter(rect, "measuredSize");
int max = Math.max(rect.width(), rect.height());
if (max > 0) {
float f = i / max;
this.u.set((int) (rect.width() * f), (int) (rect.height() * f));
Point point = this.u;
int i2 = point.x;
int i3 = i2 % 16;
if (i3 >= 8) {
i2 += 16;
}
int i4 = i2 - i3;
int i5 = point.y;
int i6 = i5 % 16;
if (i6 >= 8) {
i5 += 16;
}
point.set(i4, i5 - i6);
Point point2 = this.u;
super.changeCaptureFormat(point2.x, point2.y, this.p);
}
}
@RequiresApi(29)
public final AudioRecord b() {
MediaProjection mediaProjection = this.mediaProjection;
if (mediaProjection == null) {
return null;
}
AudioPlaybackCaptureConfiguration build = new AudioPlaybackCaptureConfiguration.Builder(mediaProjection).addMatchingUsage(1).addMatchingUsage(14).addMatchingUsage(0).build();
m.checkNotNullExpressionValue(build, "AudioPlaybackCaptureConf…KNOWN)\n .build()");
try {
return new AudioRecord.Builder().setAudioFormat(new AudioFormat.Builder().setEncoding(2).setSampleRate(44100).setChannelMask(16).build()).setAudioPlaybackCaptureConfig(build).build();
} catch (SecurityException unused) {
Log.w(b.class.getName(), "Failed to record audio");
return null;
}
}
public final void c() {
if (this.p > 0) {
Subscription subscription = this.w;
if (subscription != null) {
subscription.unsubscribe();
}
Observable<Long> d02 = Observable.d0(this.q / 1000000, TimeUnit.MILLISECONDS);
SurfaceTextureHelper surfaceTextureHelper = this.m;
if (surfaceTextureHelper == null) {
m.throwUninitializedPropertyAccessException("surfaceTextureHelper");
}
Handler handler = surfaceTextureHelper.getHandler();
m.checkNotNullExpressionValue(handler, "surfaceTextureHelper.handler");
Looper looper = handler.getLooper();
AtomicReference<j0.j.b.a> atomicReference = j0.j.b.a.a;
Objects.requireNonNull(looper, "looper == null");
this.w = d02.X(new j0.j.b.b(looper)).W(new c(), d.j);
}
}
@Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer
public synchronized void changeCaptureFormat(int i, int i2, int i3) {
a(this.f264s, Math.max(i, i2));
d(i3);
}
@Override // org.webrtc.ScreenCapturerAndroid
public void createVirtualDisplay() {
try {
super.createVirtualDisplay();
} catch (Throwable th) {
Logger.e$default(this.f265x, "ScreenCapturer", "error in createVirtualDisplay", th, null, 8, null);
}
}
public final void d(int i) {
if (i <= 0) {
this.p = 0;
this.q = 0;
return;
}
this.p = i;
this.q = 1000000000 / Math.max(10, i / 2);
}
@Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer
public void dispose() {
stopCapture();
ThumbnailEmitter thumbnailEmitter = this.f266y;
if (thumbnailEmitter != null) {
synchronized (thumbnailEmitter) {
thumbnailEmitter.d.release();
thumbnailEmitter.c.release();
JniCommon.nativeFreeByteBuffer(thumbnailEmitter.f2773b);
thumbnailEmitter.a = RecyclerView.FOREVER_NS;
}
}
this.o.release();
super.dispose();
}
@Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer
public synchronized void initialize(SurfaceTextureHelper surfaceTextureHelper, Context context, CapturerObserver capturerObserver) {
m.checkNotNullParameter(surfaceTextureHelper, "surfaceTextureHelper");
m.checkNotNullParameter(context, "applicationContext");
m.checkNotNullParameter(capturerObserver, "capturerObserver");
this.m = surfaceTextureHelper;
this.n = context;
this.l = (NativeCapturerObserver) capturerObserver;
super.initialize(surfaceTextureHelper, context, capturerObserver);
}
@Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoSink
public void onFrame(VideoFrame videoFrame) {
Rect rect = this.t;
Context context = this.n;
if (context == null) {
m.throwUninitializedPropertyAccessException("context");
}
rect.set(DisplayUtils.getScreenSize(context));
if (!m.areEqual(this.t, this.f264s)) {
this.f264s.set(this.t);
a(this.t, this.v);
}
this.r = Long.valueOf(videoFrame.getTimestampNs());
ThumbnailEmitter thumbnailEmitter = this.f266y;
if (thumbnailEmitter != null) {
synchronized (thumbnailEmitter) {
m.checkNotNullParameter(videoFrame, "frame");
long currentTimeMillis = thumbnailEmitter.k.currentTimeMillis() - thumbnailEmitter.e;
long timestampNs = videoFrame.getTimestampNs() - thumbnailEmitter.a;
if (currentTimeMillis > thumbnailEmitter.j) {
long j2 = 1000;
if (timestampNs > thumbnailEmitter.i * j2 * j2) {
thumbnailEmitter.a = videoFrame.getTimestampNs();
thumbnailEmitter.l.invoke(thumbnailEmitter.a(videoFrame));
}
}
}
}
super.onFrame(videoFrame);
}
@Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer
public synchronized void startCapture(int i, int i2, int i3) {
AudioRecord b2;
super.startCapture(i, i2, i3);
this.v = Math.max(i, i2);
d(i3);
if (Build.VERSION.SDK_INT >= 29 && (b2 = b()) != null) {
this.o.startRecording(b2);
}
c();
}
@Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer
public synchronized void stopCapture() {
super.stopCapture();
this.o.stopRecording();
this.p = 0;
this.r = null;
}
}