正在查看: Easycash v3.75.13 应用的 LiveRecognitionActivity.java JAVA 源代码文件
本页面展示 JAVA 反编译生成的源代码文件,支持语法高亮显示。 仅供安全研究与技术分析使用,严禁用于任何非法用途。请遵守相关法律法规。
正在查看: Easycash v3.75.13 应用的 LiveRecognitionActivity.java JAVA 源代码文件
本页面展示 JAVA 反编译生成的源代码文件,支持语法高亮显示。 仅供安全研究与技术分析使用,严禁用于任何非法用途。请遵守相关法律法规。
package com.fintopia.livenessdetection.facev2.activity;
import android.app.Activity;
import android.content.Intent;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.AudioManager;
import android.os.Bundle;
import android.os.Handler;
import android.text.TextUtils;
import android.view.TextureView;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.fintopia.livenessdetection.R;
import com.fintopia.livenessdetection.facev2.utils.FaceCheckNetWarrantyUtil;
import com.fintopia.livenessdetection.facev2.utils.IDetection;
import com.fintopia.livenessdetection.facev2.utils.IMediaPlayer;
import com.google.android.material.timepicker.TimeModel;
import com.lingyue.idnbaselib.GeneralConstants;
import com.lingyue.idnbaselib.livecheck.FaceDetectErrorStep;
import com.lingyue.idnbaselib.livecheck.LiveErrorCode;
import com.lingyue.idnbaselib.model.live.FaceIdCard;
import com.lingyue.idnbaselib.utils.EcFormatUtil;
import com.lingyue.idnbaselib.utils.GeneralUtil;
import com.lingyue.idnbaselib.utils.JsonParamsBuilder;
import com.lingyue.idnbaselib.utils.SensorUtil;
import com.lingyue.idnbaselib.utils.ThirdPartEventUtils;
import com.lingyue.supertoolkit.customtools.ICamera;
import com.lingyue.supertoolkit.customtools.Screen;
import com.lingyue.supertoolkit.resourcetools.SharedPreferenceUtils;
import com.lingyue.supertoolkit.widgets.BaseUtils;
import com.megvii.livenessdetection.DetectionConfig;
import com.megvii.livenessdetection.DetectionFrame;
import com.megvii.livenessdetection.Detector;
import com.megvii.livenessdetection.FaceQualityManager;
import com.megvii.livenessdetection.bean.FaceIDDataStruct;
import com.megvii.livenessdetection.bean.FaceInfo;
import com.veda.android.bananalibrary.infrastructure.BaseActivity;
import com.yangqianguan.statistics.FintopiaTrackDataUtils;
import io.sentry.ITransaction;
import io.sentry.Sentry;
import io.sentry.SpanStatus;
import java.io.Serializable;
import java.util.List;
import java.util.Locale;
public class LiveRecognitionActivity extends BaseActivity implements Camera.PreviewCallback, Detector.DetectionListener, TextureView.SurfaceTextureListener {
public static final int BEFORE_SHOW_VOLUME_TIP_WAITING_TIME = 10000;
public static final String INTENT_PARAM_ACTION_NUM = "actionNum";
public static final String INTENT_PARAM_FACE_DATA = "faceIdData";
public static final String INTENT_PARAM_SHOW_INCREASE_VOLUME_TIP = "showIncreaseVolumeTip";
public static final String INTENT_PARAM_USE_NEW_LAYOUT = "useNewLayout";
public static final int VOLUME_TIP_SHOW_TIME = 5000;
private SensorUtil A;
private boolean C;
private boolean D;
private String E;
private TextureView k;
private ProgressBar f249l;
private LinearLayout f250m;
private FrameLayout f251n;
private TextView f252o;
private RelativeLayout f253p;
private TextView f254q;
private LinearLayout f255r;
private TextView f256s;
private Detector f257t;
private ICamera f258u;
private Handler f259v;
private IMediaPlayer f260w;
private IDetection f261x;
private boolean y;
private FaceQualityManager z;
ITransaction j = Sentry.C("FacePPCheck", "FacePPCheck");
private FaceIdCard B = new FaceIdCard();
private Runnable F = new Runnable() {
@Override
public void run() {
LiveRecognitionActivity.this.v();
if (LiveRecognitionActivity.this.f261x.f != null) {
LiveRecognitionActivity liveRecognitionActivity = LiveRecognitionActivity.this;
liveRecognitionActivity.changeType(liveRecognitionActivity.f261x.f.get(0), 10L);
}
}
};
private boolean G = false;
private boolean H = false;
private int I = 0;
private int J = 0;
private boolean K = false;
static class AnonymousClass4 {
static final int[] a;
static final int[] b;
static {
int[] iArr = new int[FaceQualityManager.FaceQualityErrorType.values().length];
b = iArr;
try {
iArr[FaceQualityManager.FaceQualityErrorType.FACE_NOT_FOUND.ordinal()] = 1;
} catch (NoSuchFieldError unused) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_POS_DEVIATED.ordinal()] = 2;
} catch (NoSuchFieldError unused2) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_NONINTEGRITY.ordinal()] = 3;
} catch (NoSuchFieldError unused3) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_EYE_OCCLUSIVE.ordinal()] = 4;
} catch (NoSuchFieldError unused4) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_MOUTH_OCCLUSIVE.ordinal()] = 5;
} catch (NoSuchFieldError unused5) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_TOO_DARK.ordinal()] = 6;
} catch (NoSuchFieldError unused6) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_TOO_LARGE.ordinal()] = 7;
} catch (NoSuchFieldError unused7) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_TOO_SMALL.ordinal()] = 8;
} catch (NoSuchFieldError unused8) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_TOO_BLURRY.ordinal()] = 9;
} catch (NoSuchFieldError unused9) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_TOO_BRIGHT.ordinal()] = 10;
} catch (NoSuchFieldError unused10) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FACE_OUT_OF_RECT.ordinal()] = 11;
} catch (NoSuchFieldError unused11) {
}
try {
b[FaceQualityManager.FaceQualityErrorType.FRAME_NEED_HOLDING.ordinal()] = 12;
} catch (NoSuchFieldError unused12) {
}
int[] iArr2 = new int[Detector.DetectionFailedType.values().length];
a = iArr2;
try {
iArr2[Detector.DetectionFailedType.ACTIONBLEND.ordinal()] = 1;
} catch (NoSuchFieldError unused13) {
}
try {
a[Detector.DetectionFailedType.NOTVIDEO.ordinal()] = 2;
} catch (NoSuchFieldError unused14) {
}
try {
a[Detector.DetectionFailedType.TIMEOUT.ordinal()] = 3;
} catch (NoSuchFieldError unused15) {
}
}
}
public void A(long j) {
this.f252o.setText(String.format(Locale.CHINA, TimeModel.NUMBER_FORMAT, Long.valueOf(j / 1000)));
}
public void B() {
this.f255r.setVisibility(8);
}
public void C() {
if (this.y) {
return;
}
AudioManager audioManager = (AudioManager) getSystemService("audio");
int streamMaxVolume = audioManager.getStreamMaxVolume(3);
int streamVolume = audioManager.getStreamVolume(3);
if (streamMaxVolume <= 0 || streamVolume / streamMaxVolume >= 0.1d) {
this.f255r.setVisibility(8);
return;
}
if (!this.D) {
this.f255r.setVisibility(8);
G("");
return;
}
String e = EcFormatUtil.e(Long.valueOf(System.currentTimeMillis()));
if (SharedPreferenceUtils.s(this, "sp_auth_live_recognition_show_volume_tip_day", "").equals(e)) {
this.f255r.setVisibility(8);
G("");
} else {
this.f255r.setVisibility(0);
this.f255r.postDelayed(new Runnable() {
@Override
public final void run() {
LiveRecognitionActivity.this.B();
}
}, 5000L);
SharedPreferenceUtils.J(this, "sp_auth_live_recognition_show_volume_tip_day", e);
G(this.f256s.getText().toString());
}
}
public void D() {
t(true, null);
}
private void E(Detector.DetectionType detectionType) {
ThirdPartEventUtils.y(this, GeneralConstants.d[this.f261x.f.indexOf(detectionType)]);
}
private void F(String str, String str2) {
if (TextUtils.equals(this.E, str)) {
return;
}
this.E = str;
ThirdPartEventUtils.D(this, GeneralConstants.f, new JsonParamsBuilder().d("step").a(FaceDetectErrorStep.FACE_DETECT_FACEPP_V2.a()).d("errorCode").a(str).d("errorMessage").a(str2).c());
}
private void G(String str) {
ThirdPartEventUtils.D(this, GeneralConstants.e, new JsonParamsBuilder().d("text").a(str).c());
}
private void H(long j, DetectionFrame detectionFrame) {
this.j.a("visitFacePPDetectStep", "hit");
s(detectionFrame);
handleNotPass(j);
}
private void I(@NonNull SensorUtil sensorUtil) {
if (sensorUtil.b()) {
return;
}
if (sensorUtil.c == 0.0f) {
if (this.G) {
return;
}
this.j.a("sensorHasClose", "hit");
ThirdPartEventUtils.y(this, "EC_facepp_v2_sensor_lost_status");
this.G = true;
return;
}
if (this.H) {
return;
}
this.j.a("sensorNotVertical", "hit");
ThirdPartEventUtils.y(this, "EC_facepp_v2_sensor_not_vertical");
this.H = true;
}
private void r() {
if (this.K) {
this.f258u.g(this.k.getSurfaceTexture());
}
}
private void s(DetectionFrame detectionFrame) {
FaceInfo faceInfo;
this.I++;
if (detectionFrame != null && (faceInfo = detectionFrame.getFaceInfo()) != null) {
if (faceInfo.eyeLeftOcclusion > 0.5d || faceInfo.eyeRightOcclusion > 0.5d) {
if (this.I > 10) {
this.I = 0;
TextView textView = this.f254q;
int i = R.string.ec_live_no_eye;
textView.setText(getString(i));
F(FaceQualityManager.FaceQualityErrorType.FACE_EYE_OCCLUSIVE.name(), getString(i));
return;
}
return;
}
if (faceInfo.mouthOcclusion > 0.5d) {
if (this.I > 10) {
this.I = 0;
TextView textView2 = this.f254q;
int i2 = R.string.ec_live_no_mouth;
textView2.setText(getString(i2));
F(FaceQualityManager.FaceQualityErrorType.FACE_MOUTH_OCCLUSIVE.name(), getString(i2));
return;
}
return;
}
this.f261x.b(faceInfo.faceTooLarge);
}
faceInfoChecker(this.z.feedFrame(detectionFrame));
}
public static void startRecognitionActivityForResult(Activity activity, int i, int i2, boolean z, boolean z2) {
Intent intent = new Intent(activity, (Class<?>) LiveRecognitionActivity.class);
intent.putExtra(INTENT_PARAM_ACTION_NUM, i2);
intent.putExtra(INTENT_PARAM_USE_NEW_LAYOUT, z);
intent.putExtra(INTENT_PARAM_SHOW_INCREASE_VOLUME_TIP, z2);
activity.startActivityForResult(intent, i);
}
private void t(boolean z, @Nullable String str) {
if (z) {
ITransaction iTransaction = this.j;
SpanStatus spanStatus = SpanStatus.OK;
iTransaction.a("faceDetectResult", spanStatus.name());
this.j.o(spanStatus);
Intent intent = new Intent();
intent.putExtra(INTENT_PARAM_FACE_DATA, (Serializable) this.B);
setResult(-1, intent);
} else {
this.j.a("faceDetectResult", TextUtils.isEmpty(str) ? SpanStatus.UNKNOWN_ERROR.name() : str);
this.j.o(SpanStatus.INTERNAL_ERROR);
if (!TextUtils.isEmpty(str)) {
BaseUtils.p(getApplicationContext(), str);
}
}
finish();
}
private void u() {
if (this.y) {
return;
}
this.y = true;
Animation loadAnimation = AnimationUtils.loadAnimation(this, R.anim.liveness_rightin);
Animation loadAnimation2 = AnimationUtils.loadAnimation(this, R.anim.liveness_leftout);
this.f250m.startAnimation(loadAnimation2);
this.f261x.c[0].setVisibility(0);
this.f261x.c[0].startAnimation(loadAnimation);
loadAnimation2.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationEnd(Animation animation) {
LiveRecognitionActivity.this.f253p.setVisibility(0);
}
@Override
public void onAnimationRepeat(Animation animation) {
}
@Override
public void onAnimationStart(Animation animation) {
}
});
this.f259v.post(this.F);
}
public void v() {
if (this.f258u.a == null) {
return;
}
this.f249l.setVisibility(4);
this.f261x.c();
this.J = 0;
this.f257t.reset();
this.f257t.changeDetectionType(this.f261x.f.get(0));
}
public void w() {
Detector detector = new Detector(this, new DetectionConfig.Builder().build());
this.f257t = detector;
if (!detector.init(this, GeneralUtil.h(this, "MegLive_model"), "")) {
int i = R.string.ec_live_detector_init_fail;
BaseUtils.p(this, getString(i));
F(LiveErrorCode.DETECTOR_INITIALIZATION_FAILED.a(), getString(i));
}
this.f257t.setDetectionListener(this);
}
private void x() {
FaceCheckNetWarrantyUtil.i(this, new FaceCheckNetWarrantyUtil.DefaultCallBack(this) {
@Override
public void a() {
super.a();
LiveRecognitionActivity.this.finish();
}
@Override
public void b() {
super.b();
Sentry.w();
}
@Override
public void onSuccess() {
super.onSuccess();
LiveRecognitionActivity.this.w();
}
});
}
private void y() {
this.f254q = (TextView) findViewById(R.id.ll_prompt_text);
TextureView textureView = (TextureView) findViewById(R.id.txv_liveness_layout);
this.k = textureView;
textureView.setSurfaceTextureListener(this);
ProgressBar progressBar = (ProgressBar) findViewById(R.id.pb_liveness_progressbar);
this.f249l = progressBar;
progressBar.setVisibility(4);
LinearLayout linearLayout = (LinearLayout) findViewById(R.id.ll_bottom_tips_head);
this.f250m = linearLayout;
linearLayout.setVisibility(0);
this.f253p = (RelativeLayout) findViewById(R.id.rl_detection_step_timeout_rel);
this.f252o = (TextView) findViewById(R.id.tv_detection_step_timeout_garden);
FrameLayout frameLayout = (FrameLayout) findViewById(R.id.fl_face_anim);
this.f251n = frameLayout;
IDetection iDetection = new IDetection(this, frameLayout, this.C);
this.f261x = iDetection;
iDetection.i(getIntent().getIntExtra(INTENT_PARAM_ACTION_NUM, 1));
z();
}
private void z() {
if (this.C) {
this.f255r = (LinearLayout) findViewById(R.id.ll_volume_tip);
this.f256s = (TextView) findViewById(R.id.tv_volume_tip);
this.f259v.postDelayed(new Runnable() {
@Override
public final void run() {
LiveRecognitionActivity.this.C();
}
}, 10000L);
}
}
public void changeType(Detector.DetectionType detectionType, long j) {
E(detectionType);
this.f261x.a(detectionType, j);
if (this.J == 0) {
IMediaPlayer iMediaPlayer = this.f260w;
iMediaPlayer.e(iMediaPlayer.f(detectionType));
} else {
this.f260w.e(R.raw.liveness_well_done);
this.f260w.k(detectionType);
}
}
public void faceInfoChecker(List<FaceQualityManager.FaceQualityErrorType> list) {
String string;
if (list == null || list.size() == 0) {
u();
return;
}
FaceQualityManager.FaceQualityErrorType faceQualityErrorType = list.get(0);
switch (AnonymousClass4.b[faceQualityErrorType.ordinal()]) {
case 1:
case 2:
case 3:
string = getString(R.string.ec_live_let_see_face);
break;
case 4:
string = getString(R.string.ec_live_no_eye);
break;
case 5:
string = getString(R.string.ec_live_no_mouth);
break;
case 6:
string = getString(R.string.ec_live_let_light_bright);
break;
case 7:
string = getString(R.string.ec_live_leave_far);
break;
case 8:
string = getString(R.string.ec_live_leave_close);
break;
case 9:
string = getString(R.string.ec_live_away_from_too_bright_light);
break;
case 10:
string = getString(R.string.ec_live_let_light_dark);
break;
case 11:
string = getString(R.string.ec_live_keep_face_in_frame);
break;
case 12:
string = getString(R.string.ec_live_please_hold_still);
break;
default:
string = "";
break;
}
if (this.I > 10) {
this.I = 0;
this.f254q.setText(string);
}
F(faceQualityErrorType.name(), string);
}
public void handleNotPass(final long j) {
if (j > 0) {
this.f259v.post(new Runnable() {
@Override
public final void run() {
LiveRecognitionActivity.this.A(j);
}
});
}
}
protected void init() {
this.A = new SensorUtil(this);
Screen.a(this);
this.f259v = new Handler();
this.f260w = new IMediaPlayer(this);
this.f258u = new ICamera();
this.B.source = "FACEPP";
}
protected void onCreate(Bundle bundle) {
super.onCreate(bundle);
this.C = getIntent().getBooleanExtra(INTENT_PARAM_USE_NEW_LAYOUT, false);
this.D = getIntent().getBooleanExtra(INTENT_PARAM_SHOW_INCREASE_VOLUME_TIP, false);
setContentView(this.C ? R.layout.base_activity_liveness_recognition_new : R.layout.base_layout_liveness_recognition);
init();
y();
x();
FintopiaTrackDataUtils.b(this);
}
protected void onDestroy() {
super.onDestroy();
Detector detector = this.f257t;
if (detector != null) {
detector.release();
}
this.f261x.h();
this.A.c();
FintopiaTrackDataUtils.c(this);
if (this.j.isFinished()) {
return;
}
this.j.o(SpanStatus.CANCELLED);
}
public void onDetectionFailed(Detector.DetectionFailedType detectionFailedType) {
int i = R.string.ec_live_detection_failed;
int i2 = AnonymousClass4.a[detectionFailedType.ordinal()];
if (i2 == 1) {
i = R.string.ec_live_detection_failed_action_blend;
} else if (i2 == 2) {
i = R.string.ec_live_detection_failed_not_video;
} else if (i2 == 3) {
i = R.string.ec_live_detection_failed_timeout;
}
this.B.isDetectionFail = true;
t(false, getResources().getString(i));
F(detectionFailedType.name(), getResources().getString(i));
}
public Detector.DetectionType onDetectionSuccess(DetectionFrame detectionFrame) {
this.f260w.j();
int i = this.J + 1;
this.J = i;
if (i >= this.f261x.f.size()) {
this.f249l.setVisibility(0);
FaceIdCard faceIdCard = this.B;
if (faceIdCard != null) {
faceIdCard.isDetectionFail = false;
FaceIDDataStruct faceIDDataStruct = this.f257t.getFaceIDDataStruct();
FaceIdCard faceIdCard2 = this.B;
faceIdCard2.delta = faceIDDataStruct.delta;
faceIdCard2.liveImageDatum = faceIDDataStruct.images;
this.f259v.post(new Runnable() {
@Override
public final void run() {
LiveRecognitionActivity.this.D();
}
});
}
} else {
changeType(this.f261x.f.get(this.J), 10L);
}
return this.J >= this.f261x.f.size() ? Detector.DetectionType.DONE : this.f261x.f.get(this.J);
}
public void onFrameDetected(long j, DetectionFrame detectionFrame) {
I(this.A);
H(j, detectionFrame);
}
protected void onPause() {
super.onPause();
this.f259v.removeCallbacksAndMessages(null);
this.f258u.c();
this.f260w.d();
this.k.setKeepScreenOn(false);
ThirdPartEventUtils.K(this);
finish();
}
@Override
public void onPreviewFrame(byte[] bArr, Camera camera) {
if (this.f257t == null || camera == null) {
return;
}
Camera.Size previewSize = camera.getParameters().getPreviewSize();
this.f257t.doDetection(bArr, previewSize.width, previewSize.height, 360 - this.f258u.d(this));
}
protected void onResume() {
super/*androidx.fragment.app.FragmentActivity*/.onResume();
ThirdPartEventUtils.L(this);
this.y = false;
Camera f = this.f258u.f(this);
this.k.setKeepScreenOn(true);
if (f == null) {
int i = R.string.ec_live_open_front_camera_fail;
BaseUtils.p(this, getString(i));
F(LiveErrorCode.OPEN_FRONT_CAMERA_FAIL.a(), getString(i));
} else {
Camera.getCameraInfo(1, new Camera.CameraInfo());
this.k.setLayoutParams(this.f258u.e());
this.z = new FaceQualityManager(0.5f, 0.5f);
this.f261x.e = -1;
}
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i2) {
this.K = true;
r();
this.f258u.a(this);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
this.K = false;
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i2) {
}
}