正在查看: Credmex v3.12.2 应用的 BaseFacePPRecognitionActivity.java JAVA 源代码文件
本页面展示 JAVA 反编译生成的源代码文件,支持语法高亮显示。 仅供安全研究与技术分析使用,严禁用于任何非法用途。请遵守相关法律法规。
正在查看: Credmex v3.12.2 应用的 BaseFacePPRecognitionActivity.java JAVA 源代码文件
本页面展示 JAVA 反编译生成的源代码文件,支持语法高亮显示。 仅供安全研究与技术分析使用,严禁用于任何非法用途。请遵守相关法律法规。
package com.credmex.authentication.activity;
import android.annotation.SuppressLint;
import android.app.Dialog;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.view.TextureView;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.credmex.authentication.faceDetector.FaceMaskView;
import com.credmex.authentication.faceDetector.IDetection;
import com.credmex.authentication.faceDetector.IMediaPlayer;
import com.credmex.authentication.faceDetector.SensorUtil;
import com.credmex.authentication.models.FaceIdCard;
import com.credmex.commom.CommonActivity;
import com.credmex.models.response.DetectionTypeResponse;
import com.credmex.net.CredMexObserver;
import com.credmex.net.IApiRoutes;
import com.credmex.track.ThirdPartTrackEvents;
import com.credmex.track.ThirdPartTrackLib;
import com.credmex.track.TrackEventParamsConfigurator;
import com.credmex.utils.AppUtil;
import com.credmex.utils.FaceCheckNetWarrantyUtil;
import com.credmex.utils.YqdUtils;
import com.credmex.utils.facepp.FacePPUploadFailedLivingInfoHelper;
import com.credmex.widght.dialog.CommonDialog;
import com.credmex.widght.dialog.DialogButtonListener;
import com.megvii.livenessdetection.DetectionConfig;
import com.megvii.livenessdetection.DetectionFrame;
import com.megvii.livenessdetection.Detector;
import com.megvii.livenessdetection.FaceQualityManager;
import com.megvii.livenessdetection.bean.FaceIDDataStruct;
import com.megvii.livenessdetection.bean.FaceInfo;
import com.veda.supertoolkit.appstatus.AppStatusListener;
import com.veda.supertoolkit.customtools.CollectionUtils;
import com.veda.supertoolkit.customtools.CrashReporter;
import com.veda.supertoolkit.customtools.ICamera;
import com.veda.supertoolkit.customtools.Screen;
import com.veda.supertoolkit.lifecycle.LifecycleEvent;
import com.veda.supertoolkit.rxjavatools.RxUtil;
import com.veda.supertoolkit.utils.FormatUtil;
import com.veda.supertoolkit.widgets.StatusBarCompat;
import io.reactivex.Flowable;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.functions.Predicate;
import io.reactivex.schedulers.Schedulers;
import io.sentry.ISpan;
import io.sentry.ITransaction;
import io.sentry.Sentry;
import io.sentry.SpanStatus;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public abstract class BaseFacePPRecognitionActivity extends CommonActivity implements Camera.PreviewCallback, Detector.DetectionListener, TextureView.SurfaceTextureListener {
protected static String q0 = "mobilePhone";
private static int r0 = 10;
private static int s0 = 10;
protected static int t0 = 80;
private TextureView D;
private View E;
private View F;
private View G;
private TextView H;
private View I;
private Detector J;
private ICamera K;
private Handler R;
private Handler T;
private IMediaPlayer U;
protected IDetection V;
private TextView W;
private TextView X;
private boolean Y;
private FaceQualityManager Z;
protected SensorUtil a0;
protected String d0;
protected FacePPUploadFailedLivingInfoHelper f0;
private FaceMaskView g0;
private View h0;
private Disposable i0;
@Nullable
private ISpan l0;
private HandlerThread S = new HandlerThread("videoEncoder");
private int b0 = 0;
protected FaceIdCard c0 = new FaceIdCard();
long e0 = 0;
private final ITransaction j0 = Sentry.D("FacePPDetector", "facePPDetector");
private Runnable k0 = new Runnable() {
@Override
public void run() {
BaseFacePPRecognitionActivity.this.e0 = System.currentTimeMillis();
BaseFacePPRecognitionActivity.this.I0();
BaseFacePPRecognitionActivity baseFacePPRecognitionActivity = BaseFacePPRecognitionActivity.this;
ArrayList<Detector.DetectionType> arrayList = baseFacePPRecognitionActivity.V.g;
if (arrayList != null) {
baseFacePPRecognitionActivity.changeType(arrayList.get(0), BaseFacePPRecognitionActivity.r0);
}
}
};
private boolean m0 = false;
private boolean n0 = false;
private int o0 = 0;
private boolean p0 = false;
static class AnonymousClass6 {
static final int[] a;
static {
int[] iArr = new int[Detector.DetectionFailedType.values().length];
a = iArr;
try {
iArr[Detector.DetectionFailedType.a.ordinal()] = 1;
} catch (NoSuchFieldError unused) {
}
try {
a[Detector.DetectionFailedType.c.ordinal()] = 2;
} catch (NoSuchFieldError unused2) {
}
try {
a[Detector.DetectionFailedType.b.ordinal()] = 3;
} catch (NoSuchFieldError unused3) {
}
}
}
private interface LiveSentryTrace {
}
private void A0(SpanStatus spanStatus, String str) {
if (this.j0.isFinished()) {
return;
}
this.j0.a("userCloseAction", str);
this.j0.n(spanStatus);
}
private boolean B0() {
if (this.p0) {
return this.K.g(this.D.getSurfaceTexture());
}
return false;
}
private void C0(DetectionFrame detectionFrame) {
FaceInfo a;
this.o0++;
if (detectionFrame != null && (a = detectionFrame.a()) != null) {
if (a.x > 0.5d || a.y > 0.5d) {
if (this.o0 > s0) {
this.o0 = 0;
this.W.setText(getString(2131820879));
return;
}
return;
}
if (a.z > 0.5d) {
if (this.o0 > s0) {
this.o0 = 0;
this.W.setText(getString(2131820880));
return;
}
return;
}
this.V.c(a.B);
}
faceInfoChecker(this.Z.a(detectionFrame));
}
private void G0(int i2, boolean z) {
if (!z) {
uploadFailedLiveFace();
j1(i2);
} else {
A0(SpanStatus.OK, "faceDetecting");
showNormalToast(i2);
M0();
}
}
private void H0() {
if (this.Y) {
return;
}
this.Y = true;
Animation loadAnimation = AnimationUtils.loadAnimation(this, 2130772015);
Animation loadAnimation2 = AnimationUtils.loadAnimation(this, 2130772014);
this.F.startAnimation(loadAnimation2);
this.V.c[0].setVisibility(0);
this.V.c[0].startAnimation(loadAnimation);
loadAnimation2.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationEnd(Animation animation) {
BaseFacePPRecognitionActivity.this.I.setVisibility(0);
}
@Override
public void onAnimationRepeat(Animation animation) {
}
@Override
public void onAnimationStart(Animation animation) {
}
});
this.R.post(this.k0);
}
public void I0() {
if (this.K.a == null) {
return;
}
this.E.setVisibility(4);
this.b0 = 0;
this.J.J();
this.J.p(this.V.g.get(0));
}
public void J0() {
this.J = new Detector(getApplicationContext(), new DetectionConfig.Builder().c());
ISpan x = this.j0.x("faceDetectorInit", "faceDetectorInit");
if (this.J.B(getApplicationContext(), YqdUtils.q(getApplicationContext(), "MegLive_model"), "")) {
ITransaction iTransaction = this.j0;
SpanStatus spanStatus = SpanStatus.OK;
iTransaction.a("faceDetectorInit", spanStatus.name());
x.n(spanStatus);
this.J.K(this);
reportFullyDisplayed();
new Thread(new Runnable() {
@Override
public final void run() {
BaseFacePPRecognitionActivity.this.T0();
}
}).start();
return;
}
showErrorToast(2131820866);
ITransaction iTransaction2 = this.j0;
SpanStatus spanStatus2 = SpanStatus.UNKNOWN_ERROR;
iTransaction2.a("faceDetectorInit", spanStatus2.name());
this.j0.a("errorStep", "faceDetectorInit");
x.n(spanStatus2);
A0(spanStatus2, "faceDetectorInit");
finish();
}
private void K0() {
this.f0 = new FacePPUploadFailedLivingInfoHelper(null, this.d0, this.c0, null);
}
public Camera N0(Integer num) throws Exception {
return this.K.f(this);
}
public void O0(ISpan iSpan, Camera camera) throws Exception {
this.D.setKeepScreenOn(true);
if (camera == null) {
showErrorToast(2131820882);
ThirdPartTrackLib.d(this, ThirdPartTrackEvents.UserAuth.MEX_LIVE_RECOGNITION_FAIL_OPEN_CAMERA);
ITransaction iTransaction = this.j0;
SpanStatus spanStatus = SpanStatus.UNKNOWN_ERROR;
iTransaction.a("openCamera", spanStatus.name());
this.j0.a("errorStep", "openCamera");
iSpan.n(spanStatus);
return;
}
Camera.getCameraInfo(1, new Camera.CameraInfo());
this.D.setLayoutParams(this.K.e());
this.Z = new FaceQualityManager(0.5f, 0.5f);
this.V.f = -1;
boolean B0 = B0();
this.K.a(this);
k1();
if (B0) {
ITransaction iTransaction2 = this.j0;
SpanStatus spanStatus2 = SpanStatus.OK;
iTransaction2.a("openCamera", spanStatus2.name());
iSpan.n(spanStatus2);
return;
}
ITransaction iTransaction3 = this.j0;
SpanStatus spanStatus3 = SpanStatus.UNKNOWN_ERROR;
iTransaction3.a("openCamera", spanStatus3.name());
this.j0.a("errorStep", "preview");
iSpan.n(spanStatus3);
}
public void P0(ISpan iSpan, Throwable th) throws Exception {
ITransaction iTransaction = this.j0;
SpanStatus spanStatus = SpanStatus.UNKNOWN_ERROR;
iTransaction.a("openCamera", spanStatus.name());
this.j0.a("errorStep", "openCamera");
iSpan.n(spanStatus);
showErrorToast(2131820882);
ThirdPartTrackLib.d(this, ThirdPartTrackEvents.UserAuth.MEX_LIVE_RECOGNITION_FAIL_OPEN_CAMERA);
}
public void Q0(long j2) {
this.H.setText(FormatUtil.s(Locale.CHINA, "%d", new Object[]{Long.valueOf(j2 / 1000)}));
}
public void S0(Boolean bool) throws Exception {
this.j0.a("goBackground", "true");
}
public void T0() {
if (AppUtil.c(this)) {
this.V.a();
}
}
public void U0(Detector.DetectionFailedType detectionFailedType, Map map) {
map.put(ThirdPartTrackEvents.UserAuth.Keys.MEX_LIVE_CHECK_TIME, Long.valueOf(System.currentTimeMillis() - this.e0));
map.put(ThirdPartTrackEvents.UserAuth.Keys.MEX_LIVE_CHECK_DETECTION_TYPE, this.V.g.get(this.b0));
map.put(ThirdPartTrackEvents.UserAuth.Keys.MEX_LIVE_CHECK_FAILED_REASON, detectionFailedType.name());
}
public void V0() {
G0(2131820893, true);
}
public void W0(Map map) {
map.put(ThirdPartTrackEvents.UserAuth.Keys.MEX_LIVE_CHECK_TIME, Long.valueOf(System.currentTimeMillis() - this.e0));
map.put(ThirdPartTrackEvents.UserAuth.Keys.MEX_LIVE_CHECK_DETECTION_TYPE, this.V.g.get(this.b0));
}
public void X0(Dialog dialog) {
dialog.dismiss();
i1();
}
public void Y0(Dialog dialog) {
dialog.dismiss();
A0(SpanStatus.UNKNOWN_ERROR, "faceDetecting");
finish();
}
public void Z0(long j2, Long l2) throws Exception {
this.h0.setRotation((((System.currentTimeMillis() - j2) % 8000) * (-360.0f)) / 8000.0f);
}
public void a1(Long l2) throws Exception {
ThirdPartTrackLib.d(this, ThirdPartTrackEvents.UserAuth.MEX_FACEPP_PAGE_TIMEOUT);
this.j0.a("pageTimeout", "pageTimeout");
f1();
}
public void e1(Map map) {
map.put(ThirdPartTrackEvents.UserAuth.Keys.RESULT, this.a0.b() ? "vertical" : "not_vertical");
}
private void h1(boolean z) {
FaceIdCard faceIdCard = this.c0;
if (faceIdCard == null) {
return;
}
faceIdCard.isDetectionFail = !z;
FaceIDDataStruct v2 = this.J.v();
FaceIdCard faceIdCard2 = this.c0;
faceIdCard2.delta = v2.a;
faceIdCard2.liveImageDatum = v2.b;
}
private void i1() {
this.W.setVisibility(0);
this.X.setVisibility(4);
this.F.clearAnimation();
this.V.m();
this.I.setVisibility(4);
this.Y = false;
this.J.J();
}
private void j1(int i2) {
CommonDialog.CommonDialogBuilder.c(this).e().i(i2).m(2131820886, new DialogButtonListener() {
@Override
public final void a(Dialog dialog) {
BaseFacePPRecognitionActivity.this.X0(dialog);
}
}).k(2131820885, new DialogButtonListener() {
@Override
public final void a(Dialog dialog) {
BaseFacePPRecognitionActivity.this.Y0(dialog);
}
}).q("dialog_live_recognition_failed").a().show();
}
private void k1() {
final long currentTimeMillis = System.currentTimeMillis();
RxUtil.b(this.i0);
this.i0 = duringActive(Flowable.F(16L, TimeUnit.MILLISECONDS)).W(new Consumer() {
public final void accept(Object obj) {
BaseFacePPRecognitionActivity.this.Z0(currentTimeMillis, (Long) obj);
}
}, new com.credmex.activity.x2());
}
private void l1() {
duringActive(Flowable.k0(F0(), TimeUnit.SECONDS, AndroidSchedulers.a())).J(AndroidSchedulers.a()).W(new Consumer() {
public final void accept(Object obj) {
BaseFacePPRecognitionActivity.this.a1((Long) obj);
}
}, new Consumer() {
public final void accept(Object obj) {
CrashReporter.a((Throwable) obj);
}
});
}
private void m1() {
if (this.a0.c == 0.0f) {
if (this.m0) {
return;
}
this.j0.a("sensorStatus", "sensor_error");
ThirdPartTrackLib.e(this, ThirdPartTrackEvents.UserAuth.MEX_FACEPP_V2_SENSOR_LOST_STATUS, new TrackEventParamsConfigurator() {
@Override
public final void a(Map map) {
map.put(ThirdPartTrackEvents.UserAuth.Keys.RESULT, "sensor_error");
}
});
this.m0 = true;
return;
}
if (this.n0) {
return;
}
this.j0.a("sensorStatus", "sensor_normal");
ThirdPartTrackLib.e(this, ThirdPartTrackEvents.UserAuth.MEX_FACEPP_V2_SENSOR_LOST_STATUS, new TrackEventParamsConfigurator() {
@Override
public final void a(Map map) {
map.put(ThirdPartTrackEvents.UserAuth.Keys.RESULT, "sensor_normal");
}
});
ThirdPartTrackLib.e(this, ThirdPartTrackEvents.UserAuth.MEX_FACEPP_V2_SENSOR_VERTICAL_STATUS, new TrackEventParamsConfigurator() {
@Override
public final void a(Map map) {
BaseFacePPRecognitionActivity.this.e1(map);
}
});
this.n0 = true;
}
@Override
protected int A() {
return 0;
}
@Override
protected void D() {
K0();
D0();
}
protected void D0() {
showLoadingDialog();
final ISpan x = this.j0.x("detectionTypeFetch", "detectionTypeFetch");
((IApiRoutes) this.apiHelper.a()).a0().a(new CredMexObserver<DetectionTypeResponse>(this) {
@Override
public void onError(Throwable th, DetectionTypeResponse detectionTypeResponse) {
BaseFacePPRecognitionActivity.this.dismissLoadingDialog();
ISpan iSpan = x;
SpanStatus spanStatus = SpanStatus.UNKNOWN_ERROR;
iSpan.n(spanStatus);
BaseFacePPRecognitionActivity.this.j0.a("detectionTypeFetch", spanStatus.name());
BaseFacePPRecognitionActivity.this.V.n();
BaseFacePPRecognitionActivity.this.E0();
}
public void onSuccess(DetectionTypeResponse detectionTypeResponse) {
BaseFacePPRecognitionActivity.this.dismissLoadingDialog();
ISpan iSpan = x;
SpanStatus spanStatus = SpanStatus.OK;
iSpan.n(spanStatus);
BaseFacePPRecognitionActivity.this.j0.a("detectionTypeFetch", spanStatus.name());
BaseFacePPRecognitionActivity.this.g1(detectionTypeResponse);
BaseFacePPRecognitionActivity.this.E0();
}
});
}
protected void E0() {
final ISpan x = this.j0.x("faceAuthorization", "faceAuthorization");
FaceCheckNetWarrantyUtil.a(this, new FaceCheckNetWarrantyUtil.DefaultCallBack(this) {
@Override
public void onError() {
super.onError();
ISpan iSpan = x;
SpanStatus spanStatus = SpanStatus.UNKNOWN_ERROR;
iSpan.a("faceAuthorization", spanStatus.name());
BaseFacePPRecognitionActivity.this.j0.a("errorStep", "faceAuthorization");
x.n(spanStatus);
BaseFacePPRecognitionActivity.this.J0();
}
@Override
public void onSuccess() {
super.onSuccess();
ISpan iSpan = x;
SpanStatus spanStatus = SpanStatus.OK;
iSpan.a("faceAuthorization", spanStatus.name());
x.n(spanStatus);
BaseFacePPRecognitionActivity.this.J0();
}
});
}
protected abstract int F0();
@Override
protected void G(@NonNull Bundle bundle) {
super.G(bundle);
this.d0 = bundle.getString(q0);
}
@Override
protected void K() {
super.K();
setToolbarTransparentTitleWhite();
}
@Override
protected boolean L() {
return true;
}
protected boolean L0() {
return this.a0.c == 0.0f;
}
protected void M0() {
AuthFacePPResultActivity.startRecognitionResultActivity(this, this.c0, this.d0);
}
@Override
protected void X(@NonNull Bundle bundle) {
super.X(bundle);
bundle.putString(q0, this.d0);
}
public void changeType(Detector.DetectionType detectionType, long j2) {
if (isFinishing() || isDestroyed()) {
return;
}
this.W.setVisibility(4);
this.X.setVisibility(0);
this.V.g.indexOf(detectionType);
this.V.b(detectionType, j2);
IMediaPlayer iMediaPlayer = this.U;
iMediaPlayer.b(iMediaPlayer.c(detectionType));
}
protected abstract void f1();
public void faceInfoChecker(List<FaceQualityManager.FaceQualityErrorType> list) {
if (list == null || list.size() == 0) {
H0();
return;
}
FaceQualityManager.FaceQualityErrorType faceQualityErrorType = list.get(0);
String string = faceQualityErrorType == FaceQualityManager.FaceQualityErrorType.c ? getString(2131820876) : faceQualityErrorType == FaceQualityManager.FaceQualityErrorType.d ? getString(2131820876) : faceQualityErrorType == FaceQualityManager.FaceQualityErrorType.e ? getString(2131820876) : faceQualityErrorType == FaceQualityManager.FaceQualityErrorType.f ? getString(2131820874) : faceQualityErrorType == FaceQualityManager.FaceQualityErrorType.g ? getString(2131820875) : faceQualityErrorType == FaceQualityManager.FaceQualityErrorType.h ? getString(2131820872) : faceQualityErrorType == FaceQualityManager.FaceQualityErrorType.i ? getString(2131820873) : faceQualityErrorType == FaceQualityManager.FaceQualityErrorType.j ? getString(2131820862) : faceQualityErrorType == FaceQualityManager.FaceQualityErrorType.k ? getString(2131820871) : "";
if (this.o0 > s0) {
this.o0 = 0;
this.W.setText(string);
}
}
@Override
protected void g() {
StatusBarCompat.d(this, A(), false);
}
protected void g1(DetectionTypeResponse detectionTypeResponse) {
DetectionTypeResponse.DetectionTypeCode detectionTypeCode;
if (detectionTypeResponse == null || (detectionTypeCode = detectionTypeResponse.body) == null || CollectionUtils.a(detectionTypeCode.detectionTypeCodes)) {
this.V.n();
} else {
this.V.d(detectionTypeResponse.body.detectionTypeCodes);
}
}
@Override
public int getLayoutID() {
return 2131558714;
}
public void handleNotPass(final long j2) {
if (j2 > 0) {
this.R.post(new Runnable() {
@Override
public final void run() {
BaseFacePPRecognitionActivity.this.Q0(j2);
}
});
}
}
@Override
@SuppressLint({"CheckResult"})
protected void init() {
this.a0 = new SensorUtil(this);
Screen.a(this);
this.R = new Handler();
this.S.start();
this.T = new Handler(this.S.getLooper());
this.U = new IMediaPlayer(this);
this.G = findViewById(2131362731);
this.K = new ICamera();
this.W = (TextView) findViewById(2131363164);
TextView textView = (TextView) findViewById(2131363209);
this.X = textView;
this.V = new IDetection(this.G, textView);
this.h0 = findViewById(2131362364);
FaceMaskView faceMaskView = (FaceMaskView) findViewById(2131362188);
this.g0 = faceMaskView;
faceMaskView.setDetectView(findViewById(2131362569));
TextureView textureView = (TextureView) findViewById(2131363247);
this.D = textureView;
textureView.setSurfaceTextureListener(this);
View findViewById = findViewById(2131362662);
this.E = findViewById;
findViewById.setVisibility(4);
View findViewById2 = findViewById(2131362455);
this.F = findViewById2;
findViewById2.setVisibility(0);
this.I = findViewById(2131362726);
this.H = (TextView) findViewById(2131363066);
this.V.o();
duringActive(AppStatusListener.h().k(), false).x(new Predicate() {
public final boolean test(Object obj) {
boolean booleanValue;
booleanValue = ((Boolean) obj).booleanValue();
return booleanValue;
}
}).e0(1L).W(new Consumer() {
public final void accept(Object obj) {
BaseFacePPRecognitionActivity.this.S0((Boolean) obj);
}
}, new com.credmex.activity.y0());
l1();
}
public void onBackPressed() {
A0(SpanStatus.CANCELLED, "backPressed");
super/*androidx.activity.ComponentActivity*/.onBackPressed();
}
@Override
protected void onDestroy() {
super.onDestroy();
if (AppStatusListener.h().j()) {
A0(SpanStatus.CANCELLED, "goBackground");
} else {
SpanStatus spanStatus = SpanStatus.UNKNOWN_ERROR;
A0(spanStatus, spanStatus.name());
}
Detector detector = this.J;
if (detector != null) {
detector.I();
}
this.V.l();
this.a0.c();
}
public void onDetectionFailed(final Detector.DetectionFailedType detectionFailedType) {
int i2;
try {
ThirdPartTrackLib.e(this, ThirdPartTrackEvents.UserAuth.MEX_LIVE_CHECK_FAILED, new TrackEventParamsConfigurator() {
@Override
public final void a(Map map) {
BaseFacePPRecognitionActivity.this.U0(detectionFailedType, map);
}
});
this.j0.a("faceDetecting", detectionFailedType.name());
this.j0.a("errorStep", "faceDetecting");
this.j0.a("faceDetectingErrorType", detectionFailedType.name());
} catch (Exception e) {
e.printStackTrace();
CrashReporter.a(e);
}
int i3 = AnonymousClass6.a[detectionFailedType.ordinal()];
if (i3 == 1) {
i2 = 2131820895;
} else if (i3 != 2) {
i2 = 2131820865;
} else {
f1();
i2 = 2131820896;
}
ISpan iSpan = this.l0;
if (iSpan != null) {
iSpan.n(SpanStatus.UNKNOWN_ERROR);
}
h1(false);
G0(i2, false);
}
public Detector.DetectionType onDetectionSuccess(DetectionFrame detectionFrame) {
try {
ThirdPartTrackLib.e(this, ThirdPartTrackEvents.UserAuth.MEX_LIVE_CHECK_SUCCESSFULLY, new TrackEventParamsConfigurator() {
@Override
public final void a(Map map) {
BaseFacePPRecognitionActivity.this.W0(map);
}
});
} catch (Exception e) {
e.printStackTrace();
CrashReporter.a(e);
}
this.U.d();
int i2 = this.b0 + 1;
this.b0 = i2;
if (i2 >= this.V.g.size()) {
this.E.setVisibility(0);
ITransaction iTransaction = this.j0;
SpanStatus spanStatus = SpanStatus.OK;
iTransaction.a("faceDetecting", spanStatus.name());
ISpan iSpan = this.l0;
if (iSpan != null) {
iSpan.n(spanStatus);
}
if (this.c0 != null) {
h1(true);
this.R.post(new Runnable() {
@Override
public final void run() {
BaseFacePPRecognitionActivity.this.V0();
}
});
}
} else {
changeType(this.V.g.get(this.b0), r0);
}
return this.b0 >= this.V.g.size() ? Detector.DetectionType.h : this.V.g.get(this.b0);
}
public void onFrameDetected(long j2, DetectionFrame detectionFrame) {
m1();
if (this.l0 == null) {
this.l0 = this.j0.x("faceDetecting", "faceDetecting");
}
if (this.a0.b() || L0()) {
C0(detectionFrame);
handleNotPass(j2);
} else if (this.a0.c == 0.0f) {
this.W.setText("");
} else {
this.W.setText(getString(2131820868));
}
}
@Override
public void onPreviewFrame(byte[] bArr, Camera camera) {
if (this.J == null || camera == null) {
return;
}
try {
Camera.Size previewSize = camera.getParameters().getPreviewSize();
this.J.r(bArr, previewSize.width, previewSize.height, 360 - this.K.d(this));
} catch (Exception e) {
e.printStackTrace();
CrashReporter.a(e);
}
}
protected void onStop() {
super/*com.veda.supertoolkit.activity.MxActivity*/.onStop();
this.R.removeCallbacksAndMessages(null);
try {
this.K.c();
} catch (Exception e) {
e.printStackTrace();
CrashReporter.a(e);
}
this.U.a();
this.D.setKeepScreenOn(false);
finish();
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i2, int i3) {
this.p0 = true;
final ISpan x = this.j0.x("cameraPermissionRequest", "cameraPermissionRequest");
requestCameraPermission(new CommonActivity.CameraPermissionCallback() {
@Override
public void a() {
x.n(SpanStatus.UNKNOWN_ERROR);
BaseFacePPRecognitionActivity.this.y0();
}
@Override
public void b() {
x.n(SpanStatus.UNKNOWN_ERROR);
BaseFacePPRecognitionActivity.this.y0();
}
@Override
public void c() {
x.n(SpanStatus.OK);
BaseFacePPRecognitionActivity.this.z0();
}
});
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
this.p0 = false;
return false;
}
@Override
@SuppressLint({"CheckResult"})
public Disposable runOnCreated(@NonNull Runnable runnable) {
return com.veda.supertoolkit.lifecycle.i.a(this, runnable);
}
@Override
@SuppressLint({"CheckResult"})
public Disposable runOnDestroy(@NonNull Runnable runnable) {
return com.veda.supertoolkit.lifecycle.i.b(this, runnable);
}
@Override
@SuppressLint({"CheckResult"})
public Disposable runOnEvent(@NonNull LifecycleEvent lifecycleEvent, @Nullable Runnable runnable) {
return com.veda.supertoolkit.lifecycle.i.c(this, lifecycleEvent, runnable);
}
protected abstract void uploadFailedLiveFace();
protected void y0() {
showErrorToast(2131821099);
A0(SpanStatus.CANCELLED, "cameraPermissionRequest");
finish();
}
@SuppressLint({"CheckResult"})
protected void z0() {
this.Y = false;
final ISpan x = this.j0.x("openCamera", "openCamera");
duringActive(Flowable.G(1)).b0(Schedulers.c()).H(new Function() {
public final Object apply(Object obj) {
Camera N0;
N0 = BaseFacePPRecognitionActivity.this.N0((Integer) obj);
return N0;
}
}).J(AndroidSchedulers.a()).e0(1L).W(new Consumer() {
public final void accept(Object obj) {
BaseFacePPRecognitionActivity.this.O0(x, (Camera) obj);
}
}, new Consumer() {
public final void accept(Object obj) {
BaseFacePPRecognitionActivity.this.P0(x, (Throwable) obj);
}
});
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i2, int i3) {
}
}