Android6.0编程实现双向通话自动录音功能的方法详解
本文实例讲述了android6.0编程实现双向通话自动录音功能的方法。分享给大家供大家参考,具体如下:
项目中需要实现基于android 6.0 的双向通话自动录音功能,在查阅相关android电话状态监听文章以及git上的开源录音项目后,整理出此文
首先,介绍一下android 电话状态的监听(来电和去电):
实现手机电话状态的监听,主要依靠两个类:
telephonemanger和phonestatelistener
telephonsemanger提供了取得手机基本服务的信息的一种方式。因此应用程序可以使用telephonymanager来探测手机基本服务的情况。应用程序可以注册listener来监听电话状态的改变。
我们不能对telephonymanager进行实例化,只能通过获取服务的形式:
context.getsystemservice(context.telephony_service);
注意:对手机的某些信息进行读取是需要一定许可(permission)的。
主要静态成员常量:(它们对应phonestatelistener.listen_call_state所监听到的内容)
int call_state_idle //空闲状态,没有任何活动。 int call_state_offhook //摘机状态,至少有个电话活动。该活动或是拨打(dialing)或是通话,或是 on hold。并且没有电话是ringing or waiting int call_state_ringing //来电状态,电话铃声响起的那段时间或正在通话又来新电,新来电话不得不等待的那段时间。
项目中使用服务来监听通话状态,所以需要弄清楚手机通话状态在广播中的对应值:
extra_state_idle //它在手机通话状态改变的广播中,用于表示call_state_idle状态,即空闲状态。 extra_state_offhook //它在手机通话状态改变的广播中,用于表示call_state_offhook状态,即摘机状态。 extra_state_ringing //它在手机通话状态改变的广播中,用于表示call_state_ringing状态,即来电状态 action_phone_state_changed //在广播中用action_phone_state_changed这个action来标示通话状态改变的广播(intent)。 //注:需要许可read_phone_state。 string extra_incoming_number //在手机通话状态改变的广播,用于从extra取来电号码。 string extra_state //在通话状态改变的广播,用于从extra取来通话状态。
如何实现电话监听呢?
android在电话状态改变是会发送action为android.intent.action.phone_state的广播,而拨打电话时会发送action为
public static final string action_new_outgoing_call = "android.intent.action.new_outgoing_call";
的广播。通过自定义广播接收器,接受上述两个广播便可。
下面给出java代码:(其中的toast均为方便测试而添加)
package com.example.hgx.phoneinfo60.recording; import android.content.broadcastreceiver; import android.content.context; import android.content.intent; import android.telephony.telephonymanager; import android.widget.toast; /** * created by hgx on 2016/6/13. */ public class phonecallreceiver extends broadcastreceiver { private int lastcallstate = telephonymanager.call_state_idle; private boolean isincoming = false; private static string contactnum; intent audiorecorderservice; public phonecallreceiver() { } @override public void onreceive(context context, intent intent) { //如果是去电 if (intent.getaction().equals(intent.action_new_outgoing_call)){ contactnum = intent.getextras().getstring(intent.extra_phone_number); }else //android.intent.action.phone_state.查了下android文档,貌似没有专门用于接收来电的action,所以,非去电即来电. { string state = intent.getextras().getstring(telephonymanager.extra_state); string phonenumber = intent.getextras().getstring(telephonymanager.extra_incoming_number); int statechange = 0; if (state.equals(telephonymanager.extra_state_idle)){ //空闲状态 statechange =telephonymanager.call_state_idle; if (isincoming){ onincomingcallended(context,phonenumber); }else { onoutgoingcallended(context,phonenumber); } }else if (state.equals(telephonymanager.extra_state_offhook)){ //摘机状态 statechange = telephonymanager.call_state_offhook; if (lastcallstate != telephonymanager.call_state_ringing){ //如果最近的状态不是来电响铃的话,意味着本次通话是去电 isincoming =false; onoutgoingcallstarted(context,phonenumber); }else { //否则本次通话是来电 isincoming = true; onincomingcallanswered(context, phonenumber); } }else if (state.equals(telephonymanager.extra_state_ringing)){ //来电响铃状态 statechange = telephonymanager.call_state_ringing; lastcallstate = statechange; onincomingcallreceived(context,contactnum); } } } protected void onincomingcallstarted(context context,string number){ toast.maketext(context,"incoming call is started",toast.length_long).show(); context.startservice(new intent(context,audiorecorderservice.class)); } protected void onoutgoingcallstarted(context context,string number){ toast.maketext(context, "outgoing call is started", toast.length_long).show(); context.startservice(new intent(context, audiorecorderservice.class)); } protected void onincomingcallended(context context,string number){ toast.maketext(context, "incoming call is ended", toast.length_long).show(); context.startservice(new intent(context, audiorecorderservice.class)); } protected void onoutgoingcallended(context context,string number){ toast.maketext(context, "outgoing call is ended", toast.length_long).show(); context.startservice(new intent(context, audiorecorderservice.class)); } protected void onincomingcallreceived(context context,string number){ toast.maketext(context, "incoming call is received", toast.length_long).show(); } protected void onincomingcallanswered(context context, string number) { toast.maketext(context, "incoming call is answered", toast.length_long).show(); } }
下面是audiorecorderservice的java实现:
package com.example.hgx.phoneinfo60.recording; import android.app.service; import android.content.intent; import android.media.audioformat; import android.media.audiorecord; import android.media.mediarecorder; import android.os.asynctask; import android.os.environment; import android.os.ibinder; import android.provider.mediastore; import android.util.log; import android.widget.toast; import com.example.hgx.phoneinfo60.myapplication; import java.io.dataoutputstream; import java.io.file; import java.io.fileinputstream; import java.io.filenotfoundexception; import java.io.fileoutputstream; import java.io.ioexception; import java.net.httpurlconnection; import java.net.url; /** * created by hgx on 2016/6/13. */ public class audiorecorderservice extends service { private static int record_rate = 0; private static int record_bpp = 32; private static int record_channel = audioformat.channel_in_mono; private static int record_encoder = audioformat.encoding_pcm_16bit; private audiorecord audiorecorder = null; private thread recordt = null; private boolean isrecording = false; private int bufferele = 1024, bytesperele = 2;// want to play 2048 (2k) since 2 bytes we use only 1024 2 bytes in 16bit format private static int[] recordrate ={44100 , 22050 , 11025 , 8000}; int buffersize = 0; file uploadfile; @override public ibinder onbind(intent intent) { // todo: return the communication channel to the service. //maintain the relationship between the caller activity and the callee service, currently useless here return null; } @override public void ondestroy() { if (isrecording){ stoprecord(); }else{ toast.maketext(myapplication.getcontext(), "recording is already stopped",toast.length_short).show(); } super.ondestroy(); } @override public int onstartcommand(intent intent, int flags, int startid) { if (!isrecording){ startrecord(); }else { toast.maketext(myapplication.getcontext(), "recording is already started",toast.length_short).show(); } return 1; } private void startrecord(){ audiorecorder = initializerecord(); if (audiorecorder != null){ toast.maketext(myapplication.getcontext(), "recording is started",toast.length_short).show(); audiorecorder.startrecording(); }else return; isrecording = true; recordt = new thread(new runnable() { @override public void run() { writetofile(); } },"recording thread"); recordt.start(); } private void writetofile(){ byte bdate[] = new byte[bufferele]; fileoutputstream fos =null; file recordfile = createtempfile(); try { fos = new fileoutputstream(recordfile); } catch (filenotfoundexception e) { e.printstacktrace(); } while (isrecording){ audiorecorder.read(bdate,0,bufferele); } try { fos.write(bdate); } catch (ioexception e) { e.printstacktrace(); } try { fos.close(); } catch (ioexception e) { e.printstacktrace(); } } //following function converts short data to byte data private byte[] writeshorttobyte(short[] sdata) { int size = sdata.length; byte[] bytearraydata = new byte[size * 2]; for (int i = 0; i < size; i++) { bytearraydata[i * 2] = (byte) (sdata[i] & 0x00ff); bytearraydata[(i * 2) + 1] = (byte) (sdata[i] >> 8); sdata[i] = 0; } return bytearraydata; } //creates temporary .raw file for recording private file createtempfile() { file tempfile = new file(environment.getexternalstoragedirectory(), "aditi.raw"); return tempfile; } //create file to convert to .wav format private file createwavfile() { file wavfile = new file(environment.getexternalstoragedirectory(), "aditi_" + system.currenttimemillis() + ".wav"); return wavfile; } /* * convert raw to wav file * @param java.io.file temporay raw file * @param java.io.file destination wav file * @return void * * */ private void convertrawtowavfile(file tempfile, file wavfile) { fileinputstream fin = null; fileoutputstream fos = null; long audiolength = 0; long datalength = audiolength + 36; long samplerate = record_rate; int channel = 1; long byterate = record_bpp * record_rate * channel / 8; string filename = null; byte[] data = new byte[buffersize]; try { fin = new fileinputstream(tempfile); fos = new fileoutputstream(wavfile); audiolength = fin.getchannel().size(); datalength = audiolength + 36; createwavefileheader(fos, audiolength, datalength, samplerate, channel, byterate); while (fin.read(data) != -1) { fos.write(data); } uploadfile = wavfile.getabsolutefile(); } catch (filenotfoundexception e) { //log.e("mainactivity:convertrawtowavfile",e.getmessage()); } catch (ioexception e) { //log.e("mainactivity:convertrawtowavfile",e.getmessage()); } catch (exception e) { //log.e("mainactivity:convertrawtowavfile",e.getmessage()); } } /* * to create wav file need to create header for the same * * @param java.io.fileoutputstream * @param long * @param long * @param long * @param int * @param long * @return void */ private void createwavefileheader(fileoutputstream fos, long audiolength, long datalength, long samplerate, int channel, long byterate) { byte[] header = new byte[44]; header[0] = 'r'; // riff/wave header header[1] = 'i'; header[2] = 'f'; header[3] = 'f'; header[4] = (byte) (datalength & 0xff); header[5] = (byte) ((datalength >> 8) & 0xff); header[6] = (byte) ((datalength >> 16) & 0xff); header[7] = (byte) ((datalength >> 24) & 0xff); header[8] = 'w'; header[9] = 'a'; header[10] = 'v'; header[11] = 'e'; header[12] = 'f'; // 'fmt ' chunk header[13] = 'm'; header[14] = 't'; header[15] = ' '; header[16] = 16; // 4 bytes: size of 'fmt ' chunk header[17] = 0; header[18] = 0; header[19] = 0; header[20] = 1; // format = 1 header[21] = 0; header[22] = (byte) channel; header[23] = 0; header[24] = (byte) (samplerate & 0xff); header[25] = (byte) ((samplerate >> 8) & 0xff); header[26] = (byte) ((samplerate >> 16) & 0xff); header[27] = (byte) ((samplerate >> 24) & 0xff); header[28] = (byte) (byterate & 0xff); header[29] = (byte) ((byterate >> 8) & 0xff); header[30] = (byte) ((byterate >> 16) & 0xff); header[31] = (byte) ((byterate >> 24) & 0xff); header[32] = (byte) (2 * 16 / 8); // block align header[33] = 0; header[34] = 16; // bits per sample header[35] = 0; header[36] = 'd'; header[37] = 'a'; header[38] = 't'; header[39] = 'a'; header[40] = (byte) (audiolength & 0xff); header[41] = (byte) ((audiolength >> 8) & 0xff); header[42] = (byte) ((audiolength >> 16) & 0xff); header[43] = (byte) ((audiolength >> 24) & 0xff); try { fos.write(header, 0, 44); } catch (ioexception e) { // todo auto-generated catch block //log.e("mainactivity:createwavfileheader()",e.getmessage()); } } /* * delete created temperory file * @param * @return void */ private void delettempfile() { file file = createtempfile(); file.delete(); } /* * initialize audio record * * @param * @return android.media.audiorecord */ private audiorecord initializerecord() { short[] audioformat = new short[]{audioformat.encoding_pcm_16bit, audioformat.encoding_pcm_8bit}; short[] channelconfiguration = new short[]{audioformat.channel_in_mono, audioformat.channel_in_stereo}; for (int rate : recordrate) { for (short aformat : audioformat) { for (short cconf : channelconfiguration) { //log.d("mainactivity:initializerecord()","rate"+rate+"audioformat"+aformat+"channel configuration"+cconf); try { int buffsize = audiorecord.getminbuffersize(rate, cconf, aformat); buffersize = buffsize; if (buffsize != audiorecord.error_bad_value) { audiorecord arecorder = new audiorecord(mediarecorder.audiosource.default, rate, cconf, aformat, buffsize); if (arecorder.getstate() == audiorecord.state_initialized) { record_rate = rate; //log.d("mainactivity:initializerecord - audioformat",string.valueof(aformat)); //log.d("mainactivity:initializerecord - channel",string.valueof(cconf)); //log.d("mainactivity:initialoizerecord - rceordrate", string.valueof(rate)); return arecorder; } } } catch (exception e) { //log.e("mainactivity:initializerecord()",e.getmessage()); } } } } return null; } /* * method to stop and release audio record * * @param * @return void */ private void stoprecord() { if (null != audiorecorder) { isrecording = false; audiorecorder.stop(); audiorecorder.release(); audiorecorder = null; recordt = null; toast.maketext(getapplicationcontext(), "recording is stopped", toast.length_long).show(); } convertrawtowavfile(createtempfile(), createwavfile()); if (uploadfile.exists()) { //log.d("audiorecorderservice:stoprecord()", "uploadfile exists"); } new uploadfile().execute(uploadfile); delettempfile(); } }
更多关于android相关内容感兴趣的读者可查看本站专题:《android多媒体操作技巧汇总(音频,视频,录音等)》、《android开发入门与进阶教程》、《android视图view技巧总结》、《android编程之activity操作技巧总结》、《android操作json格式数据技巧总结》、《android文件操作技巧汇总》、《android资源操作技巧汇总》及《android控件用法总结》
希望本文所述对大家android程序设计有所帮助。
下一篇: 数据库索引、优化