package b.a.q.m0; import android.content.Context; import android.content.Intent; import android.graphics.Point; import android.graphics.Rect; import android.media.AudioFormat; import android.media.AudioPlaybackCaptureConfiguration; import android.media.AudioRecord; import android.media.projection.MediaProjection; import android.os.Build; import android.os.Handler; import android.os.Looper; import android.util.Log; import androidx.annotation.RequiresApi; import androidx.recyclerview.widget.RecyclerView; import co.discord.media_engine.NativeCapturerObserver; import co.discord.media_engine.SoundshareAudioSource; import com.discord.rtcconnection.mediaengine.ThumbnailEmitter; import com.discord.utilities.display.DisplayUtils; import com.discord.utilities.logging.Logger; import d0.z.d.m; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import kotlin.jvm.internal.DefaultConstructorMarker; import org.webrtc.CapturerObserver; import org.webrtc.JniCommon; import org.webrtc.ScreenCapturerAndroid; import org.webrtc.SurfaceTextureHelper; import org.webrtc.TimestampAligner; import org.webrtc.VideoFrame; import rx.Observable; import rx.Subscription; import rx.functions.Action1; /* compiled from: ScreenCapturer.kt */ public final class b extends ScreenCapturerAndroid { public static final a j = new a(null); public final RunnableC0034b k = new RunnableC0034b(); public NativeCapturerObserver l; public SurfaceTextureHelper m; public Context n; public final SoundshareAudioSource o = new SoundshareAudioSource(); public int p; public int q; public Long r; /* renamed from: s reason: collision with root package name */ public final Rect f262s = new Rect(); public final Rect t = new Rect(); public final Point u = new Point(); public int v; public Subscription w; /* renamed from: x reason: collision with root package name */ public final Logger f263x; /* renamed from: y reason: collision with root package name */ public final ThumbnailEmitter f264y; /* compiled from: ScreenCapturer.kt */ public static final class a { public a(DefaultConstructorMarker defaultConstructorMarker) { } } /* compiled from: ScreenCapturer.kt */ /* renamed from: b.a.q.m0.b$b reason: collision with other inner class name */ public final class RunnableC0034b implements Runnable { public RunnableC0034b() { } @Override // java.lang.Runnable public void run() { b bVar = b.this; if (bVar.p > 0) { Long l = bVar.r; if (l != null) { long rtcTimeNanos = TimestampAligner.getRtcTimeNanos() - l.longValue(); b bVar2 = b.this; if (rtcTimeNanos > ((long) bVar2.q)) { NativeCapturerObserver nativeCapturerObserver = bVar2.l; if (nativeCapturerObserver == null) { m.throwUninitializedPropertyAccessException("nativeObserver"); } nativeCapturerObserver.repeatLastFrame(); } } b.this.c(); } } } /* compiled from: ScreenCapturer.kt */ public static final class c implements Action1 { public final /* synthetic */ b j; public c(b bVar) { this.j = bVar; } /* JADX DEBUG: Method arguments types fixed to match base method, original types: [java.lang.Object] */ @Override // rx.functions.Action1 public void call(Long l) { this.j.k.run(); } } /* compiled from: ScreenCapturer.kt */ public static final class d implements Action1 { public static final d j = new d(); /* JADX DEBUG: Method arguments types fixed to match base method, original types: [java.lang.Object] */ @Override // rx.functions.Action1 public void call(Throwable th) { } } /* JADX INFO: super call moved to the top of the method (can break code semantics) */ public b(Intent intent, MediaProjection.Callback callback, Logger logger, ThumbnailEmitter thumbnailEmitter) { super(intent, callback); m.checkNotNullParameter(intent, "mediaProjectionPermissionResultData"); m.checkNotNullParameter(callback, "mediaProjectionCallback"); m.checkNotNullParameter(logger, "logger"); this.f263x = logger; this.f264y = thumbnailEmitter; } public final synchronized void a(Rect rect, int i) { m.checkNotNullParameter(rect, "measuredSize"); int max = Math.max(rect.width(), rect.height()); if (max > 0) { float f = ((float) i) / ((float) max); this.u.set((int) (((float) rect.width()) * f), (int) (((float) rect.height()) * f)); Point point = this.u; int i2 = point.x; int i3 = i2 % 16; if (i3 >= 8) { i2 += 16; } int i4 = i2 - i3; int i5 = point.y; int i6 = i5 % 16; if (i6 >= 8) { i5 += 16; } point.set(i4, i5 - i6); Point point2 = this.u; super.changeCaptureFormat(point2.x, point2.y, this.p); } } @RequiresApi(29) public final AudioRecord b() { MediaProjection mediaProjection = this.mediaProjection; if (mediaProjection == null) { return null; } AudioPlaybackCaptureConfiguration build = new AudioPlaybackCaptureConfiguration.Builder(mediaProjection).addMatchingUsage(1).addMatchingUsage(14).addMatchingUsage(0).build(); m.checkNotNullExpressionValue(build, "AudioPlaybackCaptureConf…KNOWN)\n .build()"); try { return new AudioRecord.Builder().setAudioFormat(new AudioFormat.Builder().setEncoding(2).setSampleRate(44100).setChannelMask(16).build()).setAudioPlaybackCaptureConfig(build).build(); } catch (SecurityException unused) { Log.w(b.class.getName(), "Failed to record audio"); return null; } } public final void c() { if (this.p > 0) { Subscription subscription = this.w; if (subscription != null) { subscription.unsubscribe(); } Observable d02 = Observable.d0(((long) this.q) / 1000000, TimeUnit.MILLISECONDS); SurfaceTextureHelper surfaceTextureHelper = this.m; if (surfaceTextureHelper == null) { m.throwUninitializedPropertyAccessException("surfaceTextureHelper"); } Handler handler = surfaceTextureHelper.getHandler(); m.checkNotNullExpressionValue(handler, "surfaceTextureHelper.handler"); Looper looper = handler.getLooper(); AtomicReference atomicReference = j0.j.b.a.a; Objects.requireNonNull(looper, "looper == null"); this.w = d02.X(new j0.j.b.b(looper)).W(new c(this), d.j); } } @Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer public synchronized void changeCaptureFormat(int i, int i2, int i3) { a(this.f262s, Math.max(i, i2)); d(i3); } @Override // org.webrtc.ScreenCapturerAndroid public void createVirtualDisplay() { try { super.createVirtualDisplay(); } catch (Throwable th) { Logger.e$default(this.f263x, "ScreenCapturer", "error in createVirtualDisplay", th, null, 8, null); } } public final void d(int i) { if (i <= 0) { this.p = 0; this.q = 0; return; } this.p = i; this.q = 1000000000 / Math.max(10, i / 2); } @Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer public void dispose() { stopCapture(); ThumbnailEmitter thumbnailEmitter = this.f264y; if (thumbnailEmitter != null) { synchronized (thumbnailEmitter) { thumbnailEmitter.d.release(); thumbnailEmitter.c.release(); JniCommon.nativeFreeByteBuffer(thumbnailEmitter.f2770b); thumbnailEmitter.a = RecyclerView.FOREVER_NS; } } this.o.release(); super.dispose(); } @Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer public synchronized void initialize(SurfaceTextureHelper surfaceTextureHelper, Context context, CapturerObserver capturerObserver) { m.checkNotNullParameter(surfaceTextureHelper, "surfaceTextureHelper"); m.checkNotNullParameter(context, "applicationContext"); m.checkNotNullParameter(capturerObserver, "capturerObserver"); this.m = surfaceTextureHelper; this.n = context; this.l = (NativeCapturerObserver) capturerObserver; super.initialize(surfaceTextureHelper, context, capturerObserver); } @Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoSink public void onFrame(VideoFrame videoFrame) { Rect rect = this.t; Context context = this.n; if (context == null) { m.throwUninitializedPropertyAccessException("context"); } rect.set(DisplayUtils.getScreenSize(context)); if (!m.areEqual(this.t, this.f262s)) { this.f262s.set(this.t); a(this.t, this.v); } this.r = Long.valueOf(videoFrame.getTimestampNs()); ThumbnailEmitter thumbnailEmitter = this.f264y; if (thumbnailEmitter != null) { synchronized (thumbnailEmitter) { m.checkNotNullParameter(videoFrame, "frame"); long currentTimeMillis = thumbnailEmitter.k.currentTimeMillis() - thumbnailEmitter.e; long timestampNs = videoFrame.getTimestampNs() - thumbnailEmitter.a; if (currentTimeMillis > thumbnailEmitter.j) { long j2 = (long) 1000; if (timestampNs > thumbnailEmitter.i * j2 * j2) { thumbnailEmitter.a = videoFrame.getTimestampNs(); thumbnailEmitter.l.invoke(thumbnailEmitter.a(videoFrame)); } } } } super.onFrame(videoFrame); } @Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer public synchronized void startCapture(int i, int i2, int i3) { AudioRecord b2; super.startCapture(i, i2, i3); this.v = Math.max(i, i2); d(i3); if (Build.VERSION.SDK_INT >= 29 && (b2 = b()) != null) { this.o.startRecording(b2); } c(); } @Override // org.webrtc.ScreenCapturerAndroid, org.webrtc.VideoCapturer public synchronized void stopCapture() { super.stopCapture(); this.o.stopRecording(); this.p = 0; this.r = null; } }