Recorded Audio Using Androidrecord Api Fails To Play
Solution 1:
You don't have to convert it into WAV and Play.
AudioTrack
can directly play the recorded Audio.
Following is a Code snippet to Record audio into a file using AudioRecord
and playback the same using AudioTrack
API.
The operation is controlled from User using Buttons.
Code
privateint BufferSize;
byte[] buffer = newbyte[BufferSize];
/* AudioRecord and AudioTrack Object */privateAudioRecordrecord=null;
privateAudioTracktrack=null;
/* Audio Configuration */privateintsampleRate=44100;
privateintchannelConfig= AudioFormat.CHANNEL_IN_MONO;
privateintaudioFormat= AudioFormat.ENCODING_PCM_16BIT;
privatebooleanisRecording=true;
privateThreadrecordingThread=null;
The Audio Configuration can change as per device. Refer to this question.
GUI has three buttons, Record, Stop and Play
protectedvoidonCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setButtonHandlers();
/* Set Button Visibility */enableButton(R.id.btnStartRec,true);
enableButton(R.id.btnStopRec,false);
enableButton(R.id.btnStartPlay,false);
BufferSize = AudioRecord.getMinBufferSize(sampleRate,
channelConfig, audioFormat);
}
/* Function to Enable/Disable Buttons */privatevoidenableButton(int id,boolean isEnable){
((Button)findViewById(id)).setEnabled(isEnable);
}
/* Assign OnClickListener to Buttons */privatevoidsetButtonHandlers() {
((Button)findViewById(R.id.btnStartRec)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStopRec)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStartPlay)).setOnClickListener(btnClick);
}
Handling Button click:
private View.OnClickListenerbtnClick=newView.OnClickListener() {
@OverridepublicvoidonClick(View v) {
switch(v.getId()){
case R.id.btnStartRec:{
Log.d(TAG, "Start Recording");
enableButton(R.id.btnStartRec,false);
enableButton(R.id.btnStopRec,true);
startRecording();
break;
}
case R.id.btnStopRec:{
Log.d(TAG, "Stop Recording");
enableButton(R.id.btnStartRec,true);
enableButton(R.id.btnStopRec,false);
stopRecording();
enableButton(R.id.btnStartPlay,true);
break;
}
case R.id.btnStartPlay:{
Log.d(TAG, "Play Recording");
enableButton(R.id.btnStartRec,false);
enableButton(R.id.btnStopRec,false);
StartPlaying();
break;
}
}
}
};
Code for Start Recording
privatevoidstartRecording()
{
record = new AudioRecord(AudioSource.DEFAULT, sampleRate,
channelConfig, audioFormat, BufferSize);
if (AudioRecord.STATE_INITIALIZED == record.getState())
record.startRecording();
isRecording = true;
/* Run a thread for Recording */
recordingThread = new Thread(new Runnable() {
@Override
publicvoidrun() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
}
privatevoidwriteAudioDataToFile()
{
byte data[] = newbyte[BufferSize];
/* Record audio to following file */
String filename = "/sdcard/audiofile.pcm";
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
int read_bytes = 0;
if(null != os){
while(isRecording)
{
read_bytes = record.read(data, 0, BufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read_bytes){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
Code for Stop Recording
privatevoidstopRecording()
{
if(null != record)
{
isRecording = false;
if (AudioRecord.STATE_INITIALIZED == record.getState())
{
record.stop();
record.release();
Log.d(TAG, "===== Recording Audio Completed ===== ");
}
record = null;
recordingThread = null;
}
}
Code for Playing the Audio file:
publicvoidstartPlaying()
{
enableButton(R.id.btnStartPlay,false);
int minBufferSize = AudioTrack.getMinBufferSize(sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize,
AudioTrack.MODE_STREAM);
int i = 0;
byte[] temp = newbyte[minBufferSize];
try {
FileInputStream fin = new FileInputStream("/sdcard/audiofile.pcm");
Log.d(TAG, "===== Opening File for Playing : /sdcard/audiofile.pcm ===== ");
DataInputStream dis = new DataInputStream(fin);
track.play();
while((i = dis.read(temp, 0, minBufferSize)) > -1)
{
track.write(temp, 0, i);
}
Log.d(TAG, "===== Playing Audio Completed ===== ");
track.stop();
track.release();
dis.close();
fin.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
enableButton(R.id.btnStartRec,true);
}
Please include the following in AndroidManifest.xml
<uses-permissionandroid:name="android.permission.WRITE_EXTERNAL_STORAGE" ></uses-permission><uses-permissionandroid:name="android.permission.RECORD_AUDIO" ></uses-permission>
The activity_main.xml
looks like this.
The string.xml
looks like this.
The above code is working and tested.
You can also do the same, without a file and using a intermediate buffer. See: Audio Recording and Streaming in Android
Solution 2:
Yes finally I found the answer with the clue of Michael's Comment above.
Am posting here the working code.
The Client Side Code as Follow's, From the client side am streaming the audio data to the web socket server.
privateint minBufSize;
private AudioRecord recorder;
privateint sampleRate = 44100;
privateint channelConfig = AudioFormat.CHANNEL_IN_MONO;
privateint audioFormat = AudioFormat.ENCODING_PCM_16BIT;
minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,
audioFormat);
startStreaming();
publicvoidstartStreaming() {
Thread streamThread = new Thread(new Runnable() {
@Override
publicvoidrun() {
try {
byte[] buffer = newbyte[minBufSize];
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig, audioFormat, minBufSize);
Log.d(TAG, "Recorder initialized");
recorder.startRecording();
while (status) {
// reading data from MIC into buffer
minBufSize = recorder.read(buffer, 0, buffer.length);
mConnection.sendBinaryMessage(buffer);
System.out.println("MinBufferSize: " + minBufSize);
}
} catch (Exception e) {
e.printStackTrace();
Log.e(TAG, "Exception" + e.getMessage());
}
}
});
streamThread.start();
}
The Server Side Code added implementation as follows, First the server will create the .pcm from the streamed data. Then from that pcm file it will create the wave file by adding header.
@OnMessagepublicvoidonMessage(byte[] data, boolean arg1)
{
if ((!this.currentCommand.equals("stop")) &&
(this.currentCommand.equals("start")))
try {
System.out.println("Starting new recording.");
FileOutputStreamfOut=newFileOutputStream(this.f2, true);
fOut.write(data);
fOut.close();
properWAV(this.f2, 111133.0F);
}
catch (Exception e) {
e.printStackTrace();
}
}
privatevoidproperWAV(File fileToConvert, float newRecordingID)
{
try {
longmySubChunk1Size=16L;
intmyBitsPerSample=16;
intmyFormat=1;
longmyChannels=1L;
longmySampleRate=44100L;
longmyByteRate= mySampleRate * myChannels * myBitsPerSample / 8L;
intmyBlockAlign= (int)(myChannels * myBitsPerSample / 8L);
byte[] clipData = getBytesFromFile(fileToConvert);
longmyDataSize= clipData.length;
longmyChunk2Size= myDataSize * myChannels * myBitsPerSample / 8L;
longmyChunkSize=36L + myChunk2Size;
OutputStreamos=newFileOutputStream(newFile("D:/audio/" + newRecordingID + ".wav"));
BufferedOutputStreambos=newBufferedOutputStream(os);
DataOutputStreamoutFile=newDataOutputStream(bos);
outFile.writeBytes("RIFF");
outFile.write(intToByteArray((int)myChunkSize), 0, 4);
outFile.writeBytes("WAVE");
outFile.writeBytes("fmt ");
outFile.write(intToByteArray((int)mySubChunk1Size), 0, 4);
outFile.write(shortToByteArray((short)myFormat), 0, 2);
outFile.write(shortToByteArray((short)(int)myChannels), 0, 2);
outFile.write(intToByteArray((int)mySampleRate), 0, 4);
outFile.write(intToByteArray((int)myByteRate), 0, 4);
outFile.write(shortToByteArray((short)myBlockAlign), 0, 2);
outFile.write(shortToByteArray((short)myBitsPerSample), 0, 2);
outFile.writeBytes("data");
outFile.write(intToByteArray((int)myDataSize), 0, 4);
outFile.write(clipData);
outFile.flush();
outFile.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
privatestaticbyte[] intToByteArray(int i)
{
byte[] b = newbyte[4];
b[0] = (byte)(i & 0xFF);
b[1] = (byte)(i >> 8 & 0xFF);
b[2] = (byte)(i >> 16 & 0xFF);
b[3] = (byte)(i >> 24 & 0xFF);
return b;
}
publicstaticbyte[] shortToByteArray(short data)
{
returnnewbyte[] { (byte)(data & 0xFF), (byte)(data >>> 8 & 0xFF) };
}
publicbyte[] getBytesFromFile(File file)
throws IOException
{
byte[] buffer = newbyte[(int)file.length()];
InputStreamios=null;
try {
ios = newFileInputStream(file);
if (ios.read(buffer) == -1)
thrownewIOException("EOF reached while trying to read the whole file");
}
finally {
try {
if (ios != null)
ios.close();
}
catch (IOException localIOException)
{
}
}
try
{
if (ios != null)
ios.close();
}
catch (IOException localIOException1)
{
}
return buffer;
}
Hope this one saves many of the developer's time.
Post a Comment for "Recorded Audio Using Androidrecord Api Fails To Play"