Android 调用百度人脸识别
程序员文章站
2022-07-12 19:41:54
...
1.申请license,下载自动授权的sdk
2. 添加license
3.添加sdk文件
4.在继承Application类里添加
FaceSDKManager.getInstance().initialize(this, Config.licenseID, Config.licenseFileName);
FaceConfig config = FaceSDKManager.getInstance().getFaceConfig();
// SDK初始化已经设置完默认参数(推荐参数),您也根据实际需求进行数值调整
// 设置活体动作,通过设置list LivenessTypeEnum.Eye,LivenessTypeEnum.Mouth,LivenessTypeEnum.HeadUp,
// LivenessTypeEnum.HeadDown,LivenessTypeEnum.HeadLeft, LivenessTypeEnum.HeadRight,
// LivenessTypeEnum.HeadLeftOrRight
List<LivenessTypeEnum> livenessList = new ArrayList<>();
livenessList.add(LivenessTypeEnum.Mouth);
livenessList.add(LivenessTypeEnum.Eye);
livenessList.add(LivenessTypeEnum.HeadUp);
livenessList.add(LivenessTypeEnum.HeadDown);
livenessList.add(LivenessTypeEnum.HeadLeft);
livenessList.add(LivenessTypeEnum.HeadRight);
config.setLivenessTypeList(livenessList);
// 设置 活体动作是否随机 boolean
config.setLivenessRandom(true);
config.setLivenessRandomCount(2);
// 模糊度范围 (0-1) 推荐小于0.7
config.setBlurnessValue(VALUE_BLURNESS);
// 光照范围 (0-1) 推荐大于40
config.setBrightnessValue(VALUE_BRIGHTNESS);
// 裁剪人脸大小
config.setCropFaceValue(VALUE_CROP_FACE_SIZE);
// 人脸yaw,pitch,row 角度,范围(-45,45),推荐-15-15
config.setHeadPitchValue(VALUE_HEAD_PITCH);
config.setHeadRollValue(VALUE_HEAD_ROLL);
config.setHeadYawValue(VALUE_HEAD_YAW);
// 最小检测人脸(在图片人脸能够被检测到最小值)80-200, 越小越耗性能,推荐120-200
config.setMinFaceSize(VALUE_MIN_FACE_SIZE);
// 人脸置信度(0-1)推荐大于0.6
config.setNotFaceValue(VALUE_NOT_FACE_THRESHOLD);
// 人脸遮挡范围 (0-1) 推荐小于0.5
config.setOcclusionValue(VALUE_OCCLUSION);
// 是否进行质量检测
// config.setCheckFaceQuality(true);
config.setCheckFaceQuality(false);
// 人脸检测使用线程数
config.setFaceDecodeNumberOfThreads(2);
// 是否开启提示音
config.setSound(true);
FaceSDKManager.getInstance().setFaceConfig(config);
APIService.getInstance().init(this);
APIService.getInstance().setGroupId(Config.groupID);
// 用ak,sk获取token, 调用在线api,如:注册、识别等。为了ak、sk安全,建议放您的服务器,
APIService.getInstance().initAccessTokenWithAkSk(new OnResultListener<AccessToken>() {
@Override
public void onResult(AccessToken result) {
Log.i("wtf", "AccessToken->" + result.getAccessToken());
/* handler.post(new Runnable() {
@Override
public void run() {
Toast.makeText(Myapplicaition.this, "启动成功", Toast.LENGTH_LONG).show();
}
});*/
}
@Override
public void onError(FaceError error) {
Log.e("xx", "AccessTokenError:" + error);
error.printStackTrace();
}
}, this, Config.apiKey, Config.secretKey);
//sdk里人脸检测类
public class FaceDetectActivity extends Activity {
//识别出结果的方法(可能1s能调用好几下)
faceDetectManager.setOnFaceDetectListener(new FaceDetectManager.OnFaceDetectListener() {
@Override
public void onDetectFace(final int retCode, FaceInfo[] infos, ImageFrame frame) {
Log.d("retCoderetCode","retCode=="+retCode);
// Log.d("DetectLoginActivity", "retCode is:" + retCode);
String str = "";
if (retCode == 0) {
if (infos != null && infos[0] != null) {
FaceInfo info = infos[0];
boolean distance = false;
if (info != null && frame != null) {
if (info.mWidth >= (0.9 * frame.getWidth())) {
distance = false;
str = getResources().getString(R.string.detect_zoom_out);
} else if (info.mWidth <= 0.4 * frame.getWidth()) {
distance = false;
str = getResources().getString(R.string.detect_zoom_in);
} else {
distance = true;
}
}
boolean headUpDown;
if (info != null) {
if (info.headPose[0] >= ANGLE) {
headUpDown = false;
str = getResources().getString(R.string.detect_head_up);
} else if (info.headPose[0] <= -ANGLE) {
headUpDown = false;
str = getResources().getString(R.string.detect_head_down);
} else {
headUpDown = true;
}
boolean headLeftRight;
if (info.headPose[1] >= ANGLE) {
headLeftRight = false;
str = getResources().getString(R.string.detect_head_left);
} else if (info.headPose[1] <= -ANGLE) {
headLeftRight = false;
str = getResources().getString(R.string.detect_head_right);
} else {
headLeftRight = true;
}
if (distance && headUpDown && headLeftRight) {
mGoodDetect = true;
} else {
mGoodDetect = false;
}
}
}
}
//保存图片到本地
faceDetectManager.setOnTrackListener(new FaceFilter.OnTrackListener() {
@Override
public void onTrack(FaceFilter.TrackedModel trackedModel) {
if (trackedModel.meetCriteria() && mGoodDetect) {
// upload(trackedModel);
mGoodDetect = false;
saveFaceBmp(trackedModel);
}
}
});