全部产品

Android远程双录使用文档

更新时间:2020-07-02 18:20:36

创建项目工程

首先用Android Studio 创建一个项目工程

1

引入sdk

2

然后配置Gradle

3红框为配置项

Android 端流程图

3

Android 端类图

4

下面是更加详细的介绍。

详细介绍

会议相关

1. 加入会议

角色介绍

  • 保险代理:负责创建会议,调用创建会议接口和加入会议接口。
  • 投保人和受益人:负责加入会议,只调用加入会议接口。

接口的调用

首先需要调用创建会议API接口(由保险代理调用)获取会议码,会议码通过API接口获取,接口如下:

  1. //具体代码在文件com.taobao.alinnkit.test.network.ProprietaryCloudNetWork
  2. @Override
  3. public void createMeeting(String name, String userid, final NetworkCallBack callBack) {
  4. Map<String, String> parameters = getParams();
  5. parameters.put("Action", "CreateLive");
  6. parameters.put("AppId", "ulw07lvw-1");
  7. parameters.put("Name", name);
  8. parameters.put("UserId", userid);
  9. sendRequest(parameters, callBack);
  10. }

这个接口会返回一个会议码。

接下来调用加入会议接口(所有角色都需要调用):

  1. //具体代码在文件com.taobao.alinnkit.test.network.ProprietaryCloudNetWork
  2. @Override
  3. public void joinMeeting(String channle, String userid, final NetworkCallBack callBack) {
  4. Map<String, String> parameters = getParams();
  5. parameters.put("Action", "JoinLive");
  6. parameters.put("Channel", channle);
  7. parameters.put("UserId", userid);
  8. sendRequest(parameters, callBack);
  9. }

加入会议接口会返回一些必要的参数,这些参数将要传入RTC SDK中实现视频会议,拿到这些参数,打开RemoteMeetingActivity,并把这些参数传入,代码如下:

  1. //代码文件在com.taobao.alinnkit.test.StartFragment
  2. /**
  3. *
  4. * @param channelId 会议码
  5. * @param userid 手机的uuid
  6. * @param username 参会的用户名
  7. */
  8. private void startVideoCallActivityLocal(final String channelId, final String userid, final String username) {
  9. NetworkFactory.getNetwork().joinMeeting(channelId, userid, new Network.NetworkCallBack() {
  10. @Override
  11. public void success(String json) {
  12. JoinMeetResponseBean joinMeetResponseBean = new Gson().fromJson(json, JoinMeetResponseBean.class);
  13. if (joinMeetResponseBean.getCode().equals("OK")) {
  14. final JoinMeetResponseBean.DataBean.TokenDataBean data = joinMeetResponseBean.getData().getTokenData();
  15. liveId = data.getLiveId();
  16. Log.d("mmmtoken", data.getToken() + "/");
  17. RTCAuthInfo rtcAuthInfo = new RTCAuthInfo();
  18. rtcAuthInfo.data = new RTCAuthInfo.RTCAuthInfo_Data();
  19. rtcAuthInfo.data.appid = data.getAppId();
  20. rtcAuthInfo.data.nonce = data.getNonce();
  21. rtcAuthInfo.data.timestamp = data.getTimestamp();
  22. rtcAuthInfo.data.userid = data.getUserId();
  23. rtcAuthInfo.data.token = data.getToken();
  24. rtcAuthInfo.data.ConferenceId = meetingCode;
  25. List<String> gslb = data.getGslb();
  26. String[] strings = new String[gslb.size()];
  27. gslb.toArray(strings);
  28. rtcAuthInfo.data.gslb = strings;
  29. showAuthInfo(channelId, rtcAuthInfo, username);
  30. }
  31. }
  32. });
  33. }
  34. private void showAuthInfo(String channelId, RTCAuthInfo rtcAuthInfo, String username) {
  35. Intent intent = new Intent(getContext(), RemoteMeetingActivity.class);
  36. Bundle b = new Bundle();
  37. //用户名
  38. b.putString("username", username);
  39. //频道号
  40. b.putString("channel", channelId);
  41. //音频播放
  42. b.putString("liveid", liveId);
  43. b.putSerializable("rtcAuthInfo", rtcAuthInfo);
  44. intent.putExtras(b);
  45. startActivity(intent);
  46. }

RemoteMeetingActivity 是实现远程会议的主界面,接下来的所有操作都是在这个Activity中实现的,首先看下初始化:

  1. //代码位置com.taobao.alinnkit.test.remote.RemoteMeetingActivity
  2. @Override
  3. protected void onCreate(Bundle savedInstanceState) {
  4. super.onCreate(savedInstanceState);
  5. //只列出主要代码
  6. ...
  7. //界面文件,修改界面需要在这个xml修改
  8. setContentView(R.layout.activity_remote_meeting);
  9. //初始化IdrsSdk 初始化AI检测的能力
  10. IdrsSdk.getInstance().detectScreenRotate(this);
  11. IdrsSdk.getInstance().initAudioService();
  12. IdrsSdk.getInstance().startDialogforData(mHandler);
  13. Queue.getInstance().init(1024 * 1000);
  14. //初始化RTC SDK引擎
  15. initRtcEngine();
  16. //实现预览功能
  17. startPreview();
  18. //获取外部传入参数
  19. getIntentData();
  20. //把参数传入RTC SDK,实现真正的加入会议
  21. auth(displayName, channel, mRtcAuthInfo);
  22. //自定义的handler类,实现流程的控制
  23. mHandler = new RemoteHandler(mTiaoguo, completeImageView,completeTextView,completeView, mAliRtcEngine, recoder_text, this, error_content, error_view,error_text1, error_role, error_text2, liveid, androidId, scrollView, isToubaoren,isBaoXianDaili, isShouyiren, nextStatus, this);
  24. }

我们先看下initRtcEngine方法,出初始化RTC SDK引擎:

  1. protected void initRtcEngine() {
  2. AliRtcEngine.setH5CompatibleMode(1);
  3. mAliRtcEngine = AliRtcEngine.getInstance(getApplicationContext());
  4. mEventListener = new VideoAliRtcEngineEventListener();
  5. mAliRtcEngine.setRtcEngineEventListener(mEventListener);
  6. mEngineNotify = new VideoAliRtcEngineNotify();
  7. mAliRtcEngine.setRtcEngineNotify(mEngineNotify);
  8. mAliRtcEngine.enableSpeakerphone(true);
  9. //手动发布,手动订阅
  10. mAliRtcEngine.setAutoPublish(false, false);
  11. mEngineNotify.setCallBack(new com.taobao.alinnkit.test.remote.VideoAliRtcEngineNotify.VideoAliRtcEngineNotifyCallBack() {
  12. @Override
  13. public void onRemoteUserUnPublish(AliRtcEngine aliRtcEngine, String s) {
  14. updateRemoteDisplay(s, AliRtcAudioTrackNo, AliRtcVideoTrackNo);
  15. }
  16. @Override
  17. public void onRemoteUserOnLineNotify(String s) {
  18. //远端有人加入会议回调
  19. addRemoteUser(s);
  20. }
  21. @Override
  22. public void onRemoteUserOffLineNotify(String s) {
  23. //远端有人离回调,删除远端视频栏
  24. removeRemoteUser(s);
  25. }
  26. @Override
  27. public void onRemoteTrackAvailableNotify(final String uid, AliRtcEngine.AliRtcAudioTrack aliRtcAudioTrack, AliRtcEngine.AliRtcVideoTrack aliRtcVideoTrack) {
  28. runOnUiThread(new Runnable() {
  29. @Override
  30. public void run() {
  31. //手动订阅流,默认大流
  32. mAliRtcEngine.configRemoteAudio(uid, true);
  33. mAliRtcEngine.configRemoteCameraTrack(uid, true, true);
  34. mAliRtcEngine.configRemoteScreenTrack(uid, true);
  35. mAliRtcEngine.subscribe(uid);
  36. }
  37. });
  38. //实现远程的预览
  39. updateRemoteDisplay(uid, aliRtcAudioTrack, aliRtcVideoTrack);
  40. }
  41. @Override
  42. public void onPlayComplete() {
  43. //播放语音回调,由于流程需要播放语音,所以需要监听播放完成回调
  44. if (STATUS.equals(STOP_RECORD)) {
  45. upload();
  46. } else {
  47. if (!STATUS.equals(TOU_BAO_REN) && !STATUS.equals(INSURANCE_AGENT)
  48. && !STATUS.equals(SELF_INTRODUCTION) && !STATUS.equals(START_SIGN1) &&
  49. !STATUS.equals(PRIVACY) && !STATUS.equals(WARN_RISK) && !STATUS.equals(START_SIGN2)) {
  50. String s = nextStatus.get(STATUS);
  51. Message obtain = Message.obtain();
  52. obtain.what = 0;
  53. obtain.obj = s;
  54. mHandler.sendMessageDelayed(obtain, 3000);
  55. }
  56. }
  57. }
  58. });
  59. mEventListener.setCallback(new com.taobao.alinnkit.test.remote.VideoAliRtcEngineEventListener.VideoAliRtcEngineCallback() {
  60. @Override
  61. public void onJoinChannelResult(int i) {
  62. runOnUiThread(new Runnable() {
  63. @Override
  64. public void run() {
  65. //手动推送音视频流
  66. mAliRtcEngine.configLocalCameraPublish(true);
  67. mAliRtcEngine.configLocalAudioPublish(true);
  68. mAliRtcEngine.configLocalSimulcast(true, AliRtcVideoTrackCamera);
  69. mAliRtcEngine.publish();
  70. mLocalChartUserBean = new ChartUserBean();
  71. mLocalChartUserBean.mUserId = mRtcAuthInfo.data.userid;
  72. mLocalChartUserBean.mIsLocal = true;
  73. mLocalChartUserBean.mUserName = "自己";
  74. mLocalChartUserBean.mCameraSurface = mLocalAliVideoCanvas.view;
  75. //设置tag
  76. mBigVideoViewContainer.setTag(mLocalChartUserBean);
  77. //加入房间成功,将自己添加到远端用户列表第一个
  78. // recycleAdapter.updateData(mLocalChartUserBean, true);
  79. }
  80. });
  81. }
  82. @Override
  83. public void onSubscribeResult(String uid, int result, AliRtcEngine.AliRtcVideoTrack aliRtcVideoTrack, AliRtcEngine.AliRtcAudioTrack aliRtcAudioTrack) {
  84. //注册回调
  85. updateRemoteDisplay(uid, aliRtcAudioTrack, aliRtcVideoTrack);
  86. }
  87. @Override
  88. public void onUnsubscribeResult(int i, String s) {
  89. //解除注册回调
  90. updateRemoteDisplay(s, AliRtcAudioTrackNo, AliRtcVideoTrackNo);
  91. }
  92. });
  93. mAliRtcEngine.registerVideoSampleObserver(new AliRtcEngine.AliVideoObserver() {
  94. @Override
  95. public void onLocalVideoSample(AliRtcEngine.AliVideoSourceType aliVideoSourceType, AliRtcEngine.AliVideoSample aliVideoSample) {
  96. //返回本地的视频流,用于本地人脸的检测
  97. FaceManager.getInstance().init(STATUS, holder, mBigVideoCallView, mHandler, RemoteMeetingActivity.this, isBaoXianDaili, isToubaoren, isShouyiren);
  98. FaceManager.getInstance().detectFace(aliVideoSample);
  99. }
  100. @Override
  101. public void onRemoteVideoSample(String s, AliRtcEngine.AliVideoSourceType aliVideoSourceType, AliRtcEngine.AliVideoSample aliVideoSample) {
  102. //远端的视频流
  103. }
  104. });
  105. mAliRtcEngine.registerAudioObserver(AliRtcEngine.AliAudioType.RAW_DATA_OBSERVER, new AliRtcEngine.AliAudioObserver() {
  106. @Override
  107. public void onCaptureRawData(long l, int i, int i1, int i2, int i3, int i4) {
  108. //本地的音频流回调,用于激活词的识别
  109. if (isBaoXianDaili && (STATUS.equals(PRIVACY) || STATUS.equals(WARN_RISK))) {
  110. byte[] dataToBuffer = ExportVideoData.getInstance().onPcmDataToBuffer(l, i, i2);
  111. Queue.getInstance().addAll(dataToBuffer);
  112. }
  113. }
  114. @Override
  115. public void onCaptureData(long l, int i, int i1, int i2, int i3, int i4) {
  116. }
  117. @Override
  118. public void onRenderData(long l, int i, int i1, int i2, int i3, int i4) {
  119. }
  120. });
  121. }

这个方法主要是做了RTC SDK的初始化,注册一些监听,比如远端上线/下线监听用于绘制远端窗口,语音播放完成用于流程控制,本地的视频流和音频流回调用于AI检测。接下来看下startPreview()方法,实现本地的预览:

  1. /**
  2. * 开始预览
  3. */
  4. private void startPreview() {
  5. //设置本地和远程语音播报的声音大小
  6. mAliRtcEngine.setAudioAccompanyPublishVolume(mProgress);
  7. mAliRtcEngine.setAudioAccompanyPlayoutVolume(mProgress);
  8. //创建用于本地预览的SurfaceView
  9. mBigVideoCallView = new SophonSurfaceView(this);
  10. mBigVideoCallView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
  11. //配置一些设置项
  12. mLocalAliVideoCanvas = new AliRtcEngine.AliVideoCanvas();
  13. mLocalAliVideoCanvas.view = mBigVideoCallView;
  14. //设置渲染模式,一共有四种
  15. mLocalAliVideoCanvas.renderMode = AliRtcRenderModeAuto;
  16. //添加LocalView,我们在本地的xml中需要预留一个用于动态加载的本地view,
  17. //把刚才创建出的SophonSurfaceView添加到本地预留的view中
  18. mBigVideoViewContainer.removeAllViews();
  19. mBigVideoViewContainer.addView(mBigVideoCallView);
  20. //上方的设置添加到引擎
  21. mAliRtcEngine.setLocalViewConfig(mLocalAliVideoCanvas, AliRtcVideoTrackCamera);
  22. //设置横屏预览
  23. mAliRtcEngine.setDeviceOrientationMode(AliRtcOrientationModeLandscapeLeft);
  24. mAliRtcEngine.startPreview();
  25. }

这个方法主要是实现预览,首先需要在本地的xml中预留一个View用于动态加载预览的View,因为预览的View是new出来的,然后设置一些配置项给RTC引擎,实现本地的预览。

接下来看下 auth()方法:

  1. /**
  2. * 鉴权并且加入频道
  3. */
  4. public void auth(String displayName, String channel, RTCAuthInfo rtcAuthInfo) {
  5. AliRtcAuthInfo aliRtcAuthInfo = getAliRtcAuthInfo(rtcAuthInfo, channel);
  6. joinChanel(aliRtcAuthInfo, displayName);
  7. }
  8. /**
  9. * 加入频道
  10. */
  11. public void joinChanel(AliRtcAuthInfo authInfo, String displayName) {
  12. if (authInfo != null) {
  13. Log.e("mmm", "joinChanel " + displayName);
  14. mAliRtcEngine.joinChannel(authInfo, displayName);
  15. }
  16. }

这个方法真正实现加入会议,到这里就实现加入会议。

2. 预览界面的书写

这个上方大概介绍了一些流程,下面在此详细介绍一下。

预览本地

首先需要在xml中预留一个View,用于动态加载预览的View:

  1. <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
  2. xmlns:app="http://schemas.android.com/apk/res-auto"
  3. xmlns:tools="http://schemas.android.com/tools"
  4. android:layout_width="match_parent"
  5. android:layout_height="match_parent"
  6. tools:context=".remote.RemoteMeetingActivity">
  7. <FrameLayout
  8. android:id="@+id/content_view"
  9. android:layout_width="match_parent"
  10. android:layout_height="match_parent">
  11. </FrameLayout>
  12. </RelativeLayout>

比如这里我们预留了一个FrameLayout用于动态加载预览View:

  1. mBigVideoViewContainer = findViewById(R.id.content_view);

拿到这个View的对象。

接下来创建一个预览的View,动态添加到本地预留的view中实现预览:

  1. //动态创建预览View
  2. mBigVideoCallView = new SophonSurfaceView(this);
  3. mBigVideoCallView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
  4. //把预览的View赋值给本地预留的View
  5. mBigVideoViewContainer.removeAllViews();
  6. mBigVideoViewContainer.addView(mBigVideoCallView);

最后把预览的view设置给引擎:

  1. ....
  2. //把预览view设置给引擎
  3. mLocalAliVideoCanvas = new AliRtcEngine.AliVideoCanvas();
  4. mLocalAliVideoCanvas.view = mBigVideoCallView;
  5. mLocalAliVideoCanvas.renderMode = AliRtcRenderModeAuto;
  6. mAliRtcEngine.setLocalViewConfig(mLocalAliVideoCanvas, AliRtcVideoTrackCamera);
  7. ....

这个可以实现本地的预览。

预览远程

首先等待远端的通知:

  1. @Override
  2. public void onRemoteTrackAvailableNotify(final String uid, AliRtcEngine.AliRtcAudioTrack aliRtcAudioTrack, AliRtcEngine.AliRtcVideoTrack aliRtcVideoTrack) {
  3. runOnUiThread(new Runnable() {
  4. @Override
  5. public void run() {
  6. //手动订阅流,默认大流
  7. mAliRtcEngine.configRemoteAudio(uid, true);
  8. mAliRtcEngine.configRemoteCameraTrack(uid, true, true);
  9. mAliRtcEngine.configRemoteScreenTrack(uid, true);
  10. mAliRtcEngine.subscribe(uid);
  11. }
  12. });
  13. updateRemoteDisplay(uid, aliRtcAudioTrack, aliRtcVideoTrack);
  14. }

然后拿到远端的预览View:

  1. private void updateRemoteDisplay(final String uid, AliRtcEngine.AliRtcAudioTrack at, final AliRtcEngine.AliRtcVideoTrack vt) {
  2. runOnUiThread(new Runnable() {
  3. @Override
  4. public void run() {
  5. if (null == mAliRtcEngine) {
  6. return;
  7. }
  8. AliRtcRemoteUserInfo remoteUserInfo = mAliRtcEngine.getUserInfo(uid);
  9. // 如果没有,说明已经退出了或者不存在。则不需要添加,并且删除
  10. if (remoteUserInfo == null) {
  11. // remote user exit room
  12. Log.e("mmm", "updateRemoteDisplay remoteUserInfo = null, uid = " + uid);
  13. return;
  14. }
  15. //change
  16. AliRtcEngine.AliVideoCanvas cameraCanvas = remoteUserInfo.getCameraCanvas();
  17. AliRtcEngine.AliVideoCanvas screenCanvas = remoteUserInfo.getScreenCanvas();
  18. //视频情况
  19. if (vt == AliRtcVideoTrackNo) {
  20. //没有视频流
  21. cameraCanvas = null;
  22. screenCanvas = null;
  23. } else if (vt == AliRtcVideoTrackCamera) {
  24. //相机流
  25. screenCanvas = null;
  26. cameraCanvas = createCanvasIfNull(cameraCanvas);
  27. //SDK内部提供进行播放的view
  28. mAliRtcEngine.setRemoteViewConfig(cameraCanvas, uid, AliRtcVideoTrackCamera);
  29. } else if (vt == AliRtcVideoTrackScreen) {
  30. //屏幕流
  31. cameraCanvas = null;
  32. screenCanvas = createCanvasIfNull(screenCanvas);
  33. //SDK内部提供进行播放的view
  34. mAliRtcEngine.setRemoteViewConfig(screenCanvas, uid, AliRtcVideoTrackScreen);
  35. } else if (vt == AliRtcVideoTrackBoth) {
  36. //多流
  37. cameraCanvas = createCanvasIfNull(cameraCanvas);
  38. //SDK内部提供进行播放的view
  39. mAliRtcEngine.setRemoteViewConfig(cameraCanvas, uid, AliRtcVideoTrackCamera);
  40. screenCanvas = createCanvasIfNull(screenCanvas);
  41. //SDK内部提供进行播放的view
  42. mAliRtcEngine.setRemoteViewConfig(screenCanvas, uid, AliRtcVideoTrackScreen);
  43. } else {
  44. return;
  45. }
  46. ChartUserBean chartUserBean = convertRemoteUserInfo(remoteUserInfo, cameraCanvas, screenCanvas);
  47. recycleAdapter.updateData(chartUserBean, true);
  48. }
  49. });

然后就可以把这个预览的View赋值到你本地预留的View中显示,这样就可以时间远端的预览。

3. 接口的介绍

除了上方俩个创建会议接口和加入会议接口之外,还有其他的接口需要在不同接阶段调用。

开始录制接口,在你准备录制的时候调用:

  1. @Override
  2. public void startRecoder(String id, String userid, final NetworkCallBack callBack) {
  3. Map<String, String> parameters = getParams();
  4. parameters.put("Action", "UpdateLive");
  5. parameters.put("LiveId", id);
  6. parameters.put("UserId", userid);
  7. parameters.put("Status", "START_RECORDING");
  8. sendRequest(parameters, callBack);
  9. }

结束录制接口,在你准备结束录制的时候调用:

  1. @Override
  2. public void stopRecoder(String liveid, String userid, NetworkCallBack callBack) {
  3. Map<String, String> parameters = getParams();
  4. parameters.put("Action", "UpdateLive");
  5. parameters.put("LiveId", liveid);
  6. parameters.put("UserId", userid);
  7. parameters.put("Status", "STOP_RECORDING");
  8. sendRequest(parameters, callBack);
  9. }

结束会议接口,一般在结束录制之后调用:

  1. @Override
  2. public void stopMeeting(String liveid, String userid, NetworkCallBack callBack) {
  3. Map<String, String> parameters = getParams();
  4. parameters.put("Action", "UpdateLive");
  5. parameters.put("LiveId", liveid);
  6. parameters.put("UserId", userid);
  7. parameters.put("Status", "COMPLETED");
  8. sendRequest(parameters, callBack);
  9. }

离开会议,一般是在中途离开会议调用:

  1. @Override
  2. public void leaveMeeting(String channle, String userid, NetworkCallBack callBack) {
  3. Map<String, String> parameters = getParams();
  4. parameters.put("Action", "ExitLive");
  5. parameters.put("Channel", channle);
  6. parameters.put("UserId", userid);
  7. sendRequest(parameters, callBack);
  8. }

4 退出/终止会议

退出会议需要调用上方接口

  1. @Override
  2. public void leaveMeeting(String channle, String userid, NetworkCallBack callBack) {
  3. Map<String, String> parameters = getParams();
  4. parameters.put("Action", "ExitLive");
  5. parameters.put("Channel", channle);
  6. parameters.put("UserId", userid);
  7. sendRequest(parameters, callBack);
  8. }

终止会议,调用接口

  1. @Override
  2. public void stopMeeting(String liveid, String userid, NetworkCallBack callBack) {
  3. Map<String, String> parameters = getParams();
  4. parameters.put("Action", "UpdateLive");
  5. parameters.put("LiveId", liveid);
  6. parameters.put("UserId", userid);
  7. parameters.put("Status", "COMPLETED");
  8. sendRequest(parameters, callBack);
  9. }

除了调用这俩个接口还需要调用RTC接口

  1. mAliRtcEngine.leaveChannel();
  2. mAliRtcEngine.destroy();

AI检测

1. 初始化

  1. //初始化人脸检测识别活体
  2. IdrsSdk.getInstance().init(this);
  3. //初始化动作识别
  4. IdrsSdk.getInstance().initHandDetection(this);
  5. //初始化身份证识别
  6. IdrsSdk.getInstance().initOCR(this);
  7. IdrsSdk.getInstance().detectScreenRotate(this);
  8. //初始化激活词识别
  9. IdrsSdk.getInstance().initAudioService();
  10. IdrsSdk.getInstance().startDialogforData(mHandler);

Demo初始化放到了俩个地方分开初始化,如没有特殊要求可以放到一起做。

2. Ai能力检测

  1. mAliRtcEngine.registerVideoSampleObserver(new AliRtcEngine.AliVideoObserver() {
  2. @Override
  3. public void onLocalVideoSample(AliRtcEngine.AliVideoSourceType aliVideoSourceType, AliRtcEngine.AliVideoSample aliVideoSample) {
  4. //获取本地的视频流,用于ai检测
  5. FaceManager.getInstance().init(STATUS, holder, mBigVideoCallView, mHandler, RemoteMeetingActivity.this, isBaoXianDaili, isToubaoren, isShouyiren);
  6. FaceManager.getInstance().detectFace(aliVideoSample);
  7. }

Ai检测能力主要放到FaceManager类里去实现,下面分别看下实现:

  1. //有删减,只显示主要代码,逻辑代码删除
  2. public void detectFace(AliRtcEngine.AliVideoSample aliVideoSample) {
  3. byte[] data = ExportVideoData.getInstance().onYUVDataToBuffer(aliVideoSample.dataFrameY, aliVideoSample.width, aliVideoSample.height);
  4. ....
  5. //进行人脸检测
  6. DectetResult[] dectetResult = IdrsSdk.getInstance().startDetectFace(data, aliVideoSample.width, aliVideoSample.height, 270, true);
  7. if (dectetResult != null && dectetResult.length > 0) {
  8. //画人脸框
  9. boolean b = drawRect(dectetResult, aliVideoSample.width, aliVideoSample.height);
  10. //进行人脸识别
  11. mExecutor.execute(new ReconizeRunnable(data, dectetResult, aliVideoSample.width, aliVideoSample.height));
  12. //进行活体检测
  13. mExecutor.execute(new LivenessRunnable(data, dectetResult, aliVideoSample.width, aliVideoSample.height));
  14. //进行身份证识别
  15. mExecutor.execute(new OCRRunnable(aliVideoSample.width, aliVideoSample.height, data, floats, new OCRCallBack() {
  16. //进行动作识别
  17. ArrayList<HandDetectionResult> handDetectionResults = IdrsSdk.getInstance().startHandDetection(
  18. data, aliVideoSample.width, aliVideoSample.height, 270, true);
  19. if (handDetectionResults != null) {
  20. //绘制手势框
  21. drawLines(handDetectionResults, aliVideoSample.width, aliVideoSample.height);
  22. }
  23. }
  24. ...
  25. }

上边主要介绍人脸相关检测,代码和demo对比有删减,主要展示ai检测能力代码,删除逻辑代码。

3. 激活词识别

  1. //初始化激活词识别
  2. IdrsSdk.getInstance().initAudioService();
  3. IdrsSdk.getInstance().startDialogforData(mHandler);
  4. //开始检测
  5. mAliRtcEngine.registerAudioObserver(AliRtcEngine.AliAudioType.RAW_DATA_OBSERVER, new AliRtcEngine.AliAudioObserver() {
  6. @Override
  7. public void onCaptureRawData(long l, int i, int i1, int i2, int i3, int i4) {
  8. //获取本地的音频流进行激活词识别
  9. byte[] dataToBuffer = ExportVideoData.getInstance().onPcmDataToBuffer(l, i, i2);
  10. Queue.getInstance().addAll(dataToBuffer);
  11. }

初始化的时候回传入一个handler,当识别到激活词会把结果传回handler,msg.waht=100;

  1. if (msg.what == 100) {
  2. Gson gson = new Gson();
  3. AudioBean audioBean = gson.fromJson((String) msg.obj, AudioBean.class);
  4. Log.d("mmmjihuoci", audioBean.getWord());
  5. }

流程控制

demo里的流程控制主要是通过Hander进行控制的,实现类为RemoteHandler,流程控制比较复杂,先介绍下大致的过程。

基本流程介绍

首先包括11个流程

  1. public static final String START_RECORD = "开始录制";
  2. public static final String INSURANCE_AGENT = "保险代理";
  3. public static final String TOU_BAO_REN = "投保人";
  4. public static final String PRIVACY = "隐私";
  5. public static final String SELF_INTRODUCTION = "自我介绍";
  6. public static final String CONTENT_HINT = "内容提示";
  7. public static final String WARN_RISK = "风险预警";
  8. public static final String START_SIGN = "签字";
  9. public static final String START_SIGN1 = "签字1";
  10. public static final String START_SIGN2 = "签字2";
  11. public static final String STOP_RECORD = "结束录制";

按照从上到下的顺序进行。

然后包括三个角色:

  • 保险代理
  • 投保人
  • 受益人

然后每个角色的检测项是可配置的,比如

  • 保险代理包括检测项:START_RECORD,INSURANCE_AGENT,PRIVACY,WARN_RISK
  • 投保人包括检测项:TOU_BAO_REN
  • 受益人包括检测项:SELF_INTRODUCTION,START_SIGN1

每个远端代表的角色是可配置的,比如:

  • 一端为 :保险代理
  • 另一端为 :投保人和受益人

这样的话流程配置有很多种的变化,目前demo实现的就是上面介绍的这种,根据需求的不同,需要做相应的改动,这个属于业务逻辑的内容,可以按照demo去做,也可以自己去实现,demo不是最优的方式。

大概介绍下demo的实现方式。

  1. if (msg.what == 0) {
  2. //实现流程的切换
  3. }
  4. if (msg.what == 1) {
  5. //实现超时处理,假如一个流程超过了规定时间还没有完成,就会触发
  6. }
  7. if (msg.what == 4) {
  8. //成功采取保险代理头像
  9. }
  10. if (msg.what == 5) {
  11. //成功采取投保人头像
  12. }
  13. if (msg.what == 6) {
  14. //成功识别身份证
  15. }
  16. if (msg.what == 7) {
  17. //成功检测签字动作
  18. }
  19. if (msg.what == 100) {
  20. //激活词回调
  21. }

流程同步问题章节,信息同步问题

由于多端远程会议,每端都需要做检测,所以章节需要同步,检测结果也需要同步,比如:

  • 目前通过端A和B进行远程双录
  • A端目前在第2阶段
  • B端也在第2阶段
  • A端需要做人脸采集,采集完成后进入第3阶段
  • 这时B端也需要同步进入第三阶段,也需要显示A端人脸采集是否成功,信息的同步

所以设计章节的同步和信息的同步,解决这个问题采用了接口轮询的方式,涉及俩个接口:

1. 获取章节

  1. public void getSection(String liveid, final TaskCallBack callBack) {
  2. String url = HOST + "/api/lives/%s/section";
  3. String format = String.format(url, liveid);
  4. Request request = new Request.Builder()
  5. .url(format)
  6. .get()
  7. .build();
  8. Call call = mOkHttpClient.newCall(request);
  9. ....
  10. }

这个接口的作用是不断的轮询,拉取最新的章节和信息,每端都需要轮询此接口,获取最新信息。

2. 推送章节

  1. public void updataSection(String liveid, String section, final TaskCallBack callBack) {
  2. String url = HOST + "/api/lives/%s/section";
  3. String format = String.format(url, liveid);
  4. MediaType mediaType = MediaType.parse("application/json; charset=utf-8");
  5. UpdataSelectionRequestBean updataSelectionRequestBean = new UpdataSelectionRequestBean();
  6. updataSelectionRequestBean.setLiveId(liveid);
  7. updataSelectionRequestBean.setSection(section);
  8. String json = new Gson().toJson(updataSelectionRequestBean);
  9. RequestBody requestBody = RequestBody.create(mediaType, json);
  10. Request request = new Request.Builder()
  11. .url(format)
  12. .put(requestBody)
  13. .build();
  14. Call call = mOkHttpClient.newCall(request);
  15. ....
  16. }

这个的作用是把最新的章节推送到服务器,供其他端获取最新的章节。实现方式:

  • A端目前在第2阶段
  • B端也在第2阶段
  • A端需要做人脸采集,采集完成后进入第3阶段,推送最新的章节到服务器
  • 这时B端不断轮询获取章节接口,拉取到最新章节后更新

活体信息同步和人脸同框同步

  • 当一端识别非真人时,需要把所有端都同步为非真人,并且把流程暂停。
  • 当一端的角色离开视频的时候,需要同步各端有人离开,并把流程终止。这个的实现涉及三个接口:

1. 上传问题

  1. //代码在com.taobao.alinnkit.test.network.API
  2. public void putQuestion(String liveid, String userid, String question, final TaskCallBack callBack) {
  3. String url = HOST + "/api/lives/%s/event";
  4. String format = String.format(url, liveid);
  5. MediaType mediaType = MediaType.parse("application/json; charset=utf-8");
  6. QuestionBean questionBean = new QuestionBean();
  7. questionBean.setQuestion(question);
  8. questionBean.setUserId(userid);
  9. String json = new Gson().toJson(questionBean);
  10. RequestBody requestBody = RequestBody.create(mediaType, json);
  11. Request request = new Request.Builder()
  12. .url(format)
  13. .post(requestBody)
  14. .build();
  15. Call call = mOkHttpClient.newCall(request);
  16. ...
  17. }

一端出现了问题,就向服务器上报,服务器会保存异常信息。

2. 删除问题

  1. public void deleteQuestion(String liveid, String userid, final TaskCallBack callBack) {
  2. String url = HOST + "/api/lives/%s/event";
  3. String format = String.format(url, liveid);
  4. MediaType mediaType = MediaType.parse("application/json; charset=utf-8");
  5. DeleteQuetionBean deleteQuetionBean = new DeleteQuetionBean();
  6. deleteQuetionBean.setUserId(userid);
  7. String json = new Gson().toJson(deleteQuetionBean);
  8. RequestBody requestBody = RequestBody.create(mediaType, json);
  9. Request request = new Request.Builder()
  10. .url(format)
  11. .delete(requestBody)
  12. .build();
  13. Call call = mOkHttpClient.newCall(request);
  14. .....
  15. }

某端问题解除,就删除服务器的异常信息。

3. 获取异常信息

  1. public void getQuestion(String liveid, final TaskCallBack callBack) {
  2. String url = HOST + "/api/lives/%s/events";
  3. String format = String.format(url, liveid);
  4. Request request = new Request.Builder()
  5. .url(format)
  6. .get()
  7. .build();
  8. Call call = mOkHttpClient.newCall(request);
  9. }

每端都需要不断轮询这个接口查询是否有错误信息,如果有就停止流程,没有就恢复流程。

  1. 具体实现也在RemoteHandler里:
  2. if (msg.what == 8) {
  3. //活体非真人回调
  4. // 上报信息异常
  5. }
  6. if (msg.what == 9) {
  7. //活体真人回调
  8. //解除异常
  9. }
  10. if (msg.what == 13) {
  11. //屏幕前没人
  12. //上报异常
  13. }
  14. if (msg.what == 14) {
  15. //屏幕前有人
  16. //解除异常
  17. }
  18. if (msg.what == 11) {
  19. //轮询调用获取异常信息接口
  20. }
  21. if (msg.what == 3) {
  22. //轮询获取最新章节接口
  23. }

播放旁白

调用RTC的接口播放旁白

  1. mAliRtcEngine.startAudioAccompany(path, false, false, 1);

参数介绍

  • 播放的MP4文件地址,支持网络的url
  • 是否仅本地播放,true表示仅仅本地播放,false表示本地播放且推流到远端。
  • 是否替换mic的音频流,true表示伴奏音频流替换本地mic音频流,false表示伴奏音频流和mic音频流同时推
  • 循环播放次数,-1表示一直循环。

辅助信息上传

拿到上传的地址url

  1. NetworkFactory.getNetwork().ossUpdata(String fileName, NetworkCallBack callBack)

首先调用上方的接口,参数如下

  • fileName:你的meta文件的名字
  • callBack:网络请求结果回调
  • 回调结果:如果成功会返回一个url,这个url用于实现真正的上传

上方成功拿到上传的url之后,就可以实现真正的上传了

  1. NetworkFactory.getNetwork().updataFile((String url, String path, final NetworkCallBack callBack)

参数如下

  • url:上方返回的url,也是上传的地址
  • path:本地mate文件的路径
  • callBack:网络请求结果回调

这个如果调用成功,那么就上传成功了

屏幕共享

拿到屏幕流

我们这个里的具体操作不是拿到屏幕的流,是拿到屏幕截图,然后把屏幕截图转化为流,通过RTC服务传输,达到屏幕共享的目的

  1. mMediaProjectionManager = (MediaProjectionManager) getApplicationContext().getSystemService(Context.MEDIA_PROJECTION_SERVICE);
  2. Intent captureIntent = mMediaProjectionManager.createScreenCaptureIntent();
  3. startActivityForResult(captureIntent, 1002);
  4. 启动系统的Activity,通知用户要截图,并让用户同意
  5. //切换推送流为屏幕流
  6. mAliRtcEngine.configLocalCameraPublish(false);
  7. mAliRtcEngine.configLocalScreenPublish(true);
  8. mAliRtcEngine.publish();
  9. final int width = 1280;
  10. final int height = 720;
  11. final int dpi = 1;
  12. //构建屏幕截图的类
  13. mediaProjection = mMediaProjectionManager.getMediaProjection(Activity.RESULT_OK, data);
  14. //构建推送屏幕流的类
  15. AliRtcEngine.AliRawDataStreamType type = mAliRtcEngine.isLocalScreenPublishEnabled() ?
  16. AliRtcEngine.AliRawDataStreamType.AliRTCSdk_Streame_Type_Screen :
  17. AliRtcEngine.AliRawDataStreamType.AliRTCSdk_Streame_Type_Capture;
  18. //获取videoRawDataInterface对象
  19. videoRawDataInterface = mAliRtcEngine.registerVideoRawDataInterface(type, AliRtcRenderModeAuto);
  20. //真正的推屏幕流逻辑在这个里面
  21. mRecorder = new ScreenRecorder(width, height, dpi, mediaProjection, videoRawDataInterface);
  22. mRecorder.start();

这段代码主要是切换推流的类型,构建屏幕截图的类和推送屏幕流的类

推送

  1. //代码位置com.taobao.alinnkit.test.remote.ScreenRecorder
  2. private void method1() {
  3. isRun = true;
  4. try {
  5. //构建ImageReader
  6. ImageReader imageReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 2);
  7. mVirtualDisplay = mMediaProjection.createVirtualDisplay(TAG + "-display",
  8. mWidth, mHeight, mDpi, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
  9. imageReader.getSurface(), null, null);
  10. Log.d(TAG, "created virtual display: " + mVirtualDisplay);
  11. while (isRun) {
  12. SystemClock.sleep(30);
  13. //通过imageReader拿到屏幕截图
  14. Image image = imageReader.acquireNextImage();
  15. if (image == null) {
  16. continue;
  17. }
  18. int width = image.getWidth();
  19. int height = image.getHeight();
  20. final Image.Plane[] planes = image.getPlanes();
  21. final ByteBuffer buffer = planes[0].getBuffer();
  22. int piexlStride = planes[0].getPixelStride();
  23. int rowStride = planes[0].getRowStride();
  24. int rowPadding = rowStride - piexlStride * width;
  25. Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / piexlStride, height, Bitmap.Config.ARGB_8888);
  26. bitmap.copyPixelsFromBuffer(buffer);
  27. image.close();
  28. //把屏幕截图转换为nv21格式
  29. byte[] nv21 = ImageUtil.getNV21(mWidth, mHeight, bitmap);
  30. /**
  31. * 上报rtc,通过rtc把 nv21数据推送到服务器
  32. */
  33. deliverFrame(nv21);
  34. }
  35. } finally {
  36. // release();
  37. }
  38. }

这个方法负责拿到屏幕截图,然后把截图转换为nv21格式,最后通过RTC把数据推送到服务器

为什么要转换nv21?

因为RTC只支持nv21格式的数据