在Android中进行实时audio录制和播放以及线程和callback处理

我想录制现场audio并播放它。就用户界面而言,该应用程序只有三个button:一个用于开始录制和stream式传输,一个用于播放预先录制的文件和最后一个用于停止当前任务(录制/播放)。 为此,我分别使用了AudioRecordAudioTrack类进行录制和播放。 我的程序看起来像….

/ ** * @作者阿米特* * /

public class AudioRecorder extends Activity { private String LOG_TAG = null; /* variables which are required to generate and manage the UI of the App */ // private RecordButton mRecordButton = null; private Button recordBtn, stopBtn, playBtn; /* * variables which are required for the actual functioning of the recording * and playing */ private AudioRecord recorder = null; private AudioTrack player = null; private AudioManager audioManager = null; private int recorderBufSize, recordingSampleRate; private int trackBufSize; private short[] audioData; private boolean isRecording = false, isPlaying = false; private Thread startRecThread; private AudioRecord.OnRecordPositionUpdateListener posUpdateListener; /** * constructor method for initializing the variables */ public AudioRecorder() { super(); LOG_TAG = "Constructor"; recorderBufSize = recordingSampleRate = trackBufSize = 0; // init function will initialize all the necessary variables ... init(); if (recorder != null && player != null) { Log.e(LOG_TAG, "recorder and player initialized"); audioData = new short[recorderBufSize / 2]; // since we r reading shorts } else { Log.e(LOG_TAG, "Problem inside init function "); } posUpdateListener = new AudioRecord.OnRecordPositionUpdateListener() { int numShortsRead = 0; @Override public void onPeriodicNotification(AudioRecord rec) { // TODO Auto-generated method stub // String LOG_TAG = Thread.currentThread().getName(); // Log.e(LOG_TAG, "inside position listener"); audioData = new short[recorderBufSize / 2]; // divide by 2 since now we are reading shorts numShortsRead = rec.read(audioData, 0, audioData.length); player.write(audioData, 0, numShortsRead); } @Override public void onMarkerReached(AudioRecord recorder) { // TODO Auto-generated method stub Log.e(LOG_TAG, "Marker Reached"); } }; // listener will be called every time 160 frames are reached recorder.setPositionNotificationPeriod(160); recorder.setRecordPositionUpdateListener(posUpdateListener); Log.e(LOG_TAG, "inside constructor"); } private void init() { LOG_TAG = "initFunc"; // int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; short audioFormat = AudioFormat.ENCODING_PCM_16BIT; // for (int rate : mSampleRates) { this.recordingSampleRate = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_MUSIC); try { // Log.d(LOG_TAG, "Attempting rate " + rate + "Hz, bits: " + // audioFormat); int bufrSize = AudioRecord.getMinBufferSize(this.recordingSampleRate, AudioFormat.CHANNEL_IN_MONO, audioFormat); // lets find out the minimum required size for AudioTrack int audioTrackBufSize = AudioTrack.getMinBufferSize(this.recordingSampleRate, AudioFormat.CHANNEL_OUT_MONO, audioFormat); if (bufrSize != AudioRecord.ERROR_BAD_VALUE && bufrSize != AudioRecord.ERROR) { // check if we can instantiate and have a success if(audioTrackBufSize >= bufrSize){ this.recorderBufSize = audioTrackBufSize; }else{ this.recorderBufSize = bufrSize; } AudioRecord rec = new AudioRecord( MediaRecorder.AudioSource.DEFAULT, this.recordingSampleRate, AudioFormat.CHANNEL_IN_MONO, audioFormat, this.recorderBufSize); if (rec != null && rec.getState() == AudioRecord.STATE_INITIALIZED) { // storing variables for future use . . . // this.recordingSampleRate = rate; // this.recorderBufSize = bufrSize; Log.e(LOG_TAG, "Returning..(rate:channelConfig:audioFormat:recorderBufSize)" + this.recordingSampleRate + ":" + AudioFormat.CHANNEL_IN_MONO + ":" + audioFormat + ":" + this.recorderBufSize); // Now create an instance of the AudioTrack // int audioTrackBufSize = AudioTrack.getMinBufferSize(rate, // AudioFormat.CHANNEL_OUT_MONO, audioFormat); Log.e(LOG_TAG, "Audio Record / Track / Final buf size :" + bufrSize + "/ " +audioTrackBufSize + "/ "+this.recorderBufSize); this.player = new AudioTrack(AudioManager.STREAM_MUSIC, this.recordingSampleRate, AudioFormat.CHANNEL_OUT_MONO, audioFormat, this.recorderBufSize, AudioTrack.MODE_STREAM); this.recorder = rec; this.player.stop(); this.player.flush(); this.player.setPlaybackRate(this.recordingSampleRate); return; } } } catch (IllegalArgumentException e) { Log.d(LOG_TAG, this.recordingSampleRate + "Exception, keep trying.", e); } catch (Exception e) { Log.e(LOG_TAG, this.recordingSampleRate + "Some Exception!!", e); } // for loop for channel config ended here. . . . // for loop for audioFormat ended here. . . // }// for loop for sampleRate return; } private void startPlaying() { LOG_TAG = "startPlaying"; Log.e(LOG_TAG, "start Playing"); } private void stopPlaying() { LOG_TAG = "stopPlaying"; Log.e(LOG_TAG, "stop Playing"); } private void startRecording() { LOG_TAG = "startRecording"; /* start a separate recording thread from here . . . */ startRecThread = new Thread() { @Override public void run() { // TODO Auto-generated method stub android.os.Process .setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // String LOG_TAG = Thread.currentThread().getName(); if(recorder.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING){ recorder.startRecording(); } // Log.e(LOG_TAG, "running" +recorder.getRecordingState()); while (recorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) { recorder.read(audioData, 0, audioData.length); try { Thread.sleep(1000); // sleep for 2s } catch (InterruptedException e) { // TODO Auto-generated catch block Log.e("run Method", "recorder thread is interrupted"); e.printStackTrace(); } } } }; setVolumeControlStream(AudioManager.STREAM_MUSIC); audioManager.setSpeakerphoneOn(false); player.flush(); player.play(); startRecThread.start(); Log.e(LOG_TAG, "start Recording"); } private void stopRecording() { LOG_TAG = "stopRecording"; recorder.stop(); if (startRecThread != null && startRecThread.isAlive()) { startRecThread.destroy(); startRecThread = null; } player.stop(); player.flush(); Log.e(LOG_TAG, "stop Recording"); } private void stop() { if (isRecording) { isRecording = false; stopRecording(); } if (isPlaying) { isPlaying = false; stopPlaying(); } recordBtn.setEnabled(true); playBtn.setEnabled(true); } @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); LOG_TAG = "onCreate"; // Log.e(LOG_TAG, "Create Called"); // getting the audio service audioManager = (AudioManager) getSystemService(AUDIO_SERVICE); LinearLayout ll = new LinearLayout(this); // creating Buttons one by one . . . . // button to start the recording process recordBtn = new Button(this); recordBtn.setText("Record"); recordBtn.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub recordBtn.setEnabled(false); playBtn.setEnabled(false); isRecording = true; startRecording(); } }); // single button to stop recording and playing as applicable stopBtn = new Button(this); stopBtn.setText("Stop"); stopBtn.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub stop(); } }); // button to play the recorded sound playBtn = new Button(this); playBtn.setText("Play"); playBtn.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub // reverse the isPlaying isPlaying = true; recordBtn.setEnabled(false); playBtn.setEnabled(false); startPlaying(); } }); ll.addView(recordBtn, new LinearLayout.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT, 1)); ll.addView(playBtn, new LinearLayout.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT, 1)); ll.addView(stopBtn, new LinearLayout.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT, 1)); setContentView(ll); } @Override protected void onDestroy() { // Clean up code . .. super.onDestroy(); if (recorder != null) recorder.release(); if (startRecThread!=null && startRecThread.isAlive()) startRecThread.destroy(); if (recorder != null) recorder.release(); if (player != null) player.release(); startRecThread = null; recorder = null; player = null; recordBtn = null; stopBtn = null; playBtn = null; audioData = null; System.gc(); } } 

正如你可能看到startPlaying()stopPlaying()函数还没有实现,所以不要谈论它们。 目前,我只是试图logging和播放。当我运行程序它播放录制的audio,但audio似乎来自远方。 另一个问题是应用程序的UI线程挂起,虽然我有一个单独的线程来读取audio 。 请帮忙….

如果你的需求是在录音时它应该播放(意味着循环回audio),在你的while循环线程中,你正在存储录制的数据(audioData bufer),在那里你可以用while循环把它复制到player对象player.write(audioData,0,numShortsRead);)。 你说像你的UI线程卡住,这可能是因为你给audiologging线程更优先。

检查下面我用于上面的循环返回要求的代码

 boolean m_isRun=true; public void loopback() { // Prepare the AudioRecord & AudioTrack try { buffersize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); if (buffersize <= BUF_SIZE) { buffersize = BUF_SIZE; } Log.i(LOG_TAG,"Initializing Audio Record and Audio Playing objects"); m_record = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, buffersize * 1); m_track = new AudioTrack(AudioManager.STREAM_ALARM, SAMPLE_RATE, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, buffersize * 1, AudioTrack.MODE_STREAM); m_track.setPlaybackRate(SAMPLE_RATE); } catch (Throwable t) { Log.e("Error", "Initializing Audio Record and Play objects Failed "+t.getLocalizedMessage()); } m_record.startRecording(); Log.i(LOG_TAG,"Audio Recording started"); m_track.play(); Log.i(LOG_TAG,"Audio Playing started"); while (m_isRun) { m_record.read(buffer, 0, BUF_SIZE); m_track.write(buffer, 0, buffer.length); } Log.i(LOG_TAG, "loopback exit"); } private void do_loopback() { m_thread = new Thread(new Runnable() { public void run() { loopback(); } }); 

还有一件事,如果你的要求是logging几秒钟然后播放,而在播放你的logging的时候应该重新开始,你可以用一个延时处理线程做一个超时,在那个线程中你可以停止logging拷贝缓冲区,然后开始录制。

我的build议是使用asynchronous任务其他明智的无痛线程。 这是利用Android中的线程的最佳方式。 你可以利用后台处理,预先执行和发布执行方法,把你现有的程序分开。

http://android-developers.blogspot.ca/2009/05/painless-threading.html