Help Stereo AudioRecord

I want to capture Stereo signals on my Galaxy Nexus Prime and Galaxy Note phones usibg Audio Record.
But when do this the data captured by both left and right microphones is same(bit exact).
Is it possible to record audio in STEREO mode on Samasung phones?
 

McGilli

Well-Known Member
The Note has Stereo microphones. One at the top and one at the bottom.

I use the built in Recorder program - and it Does record in stereo.
 

ifb-online

Android Enthusiast
The Note has Stereo microphones. One at the top and one at the bottom.

I use the built in Recorder program - and it Does record in stereo.

Great :) Good to know! I guess the phone mic. for a wired headset is mono though. Or perhaps it's possible to use external Bluetooth wireless stereo mics?

Ian
 

akshaymalhotra

Lurker
Thread starter
but when I use audiorecord to capture it I wasnt able to do it...
can you please tell me if I am doing anything wrong.
I am using the following code :

class​
PlayCaptureThread extends Thread {
BufferedOutputStream
bufferedOutput = null;
BufferedOutputStream
bufferedOutput1 = null;

public volatile boolean isRunning = true;
private int channel;
private int ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private int frequency;
private int frameLength;

public static int count = 0;
private String file_name = "/sdcard/record_mic_b_";

protected BlockingQueue<byte[]> queue;

byte[] inBuffer;
byte[] inBuffer1;

PlayCaptureThread(
int channel, int frequency, int frameLength) {
this.frequency = frequency;
this.channel = channel;
this.frameLength = frameLength;

try {
// Input file for record

String convertedValue = Integer.toString(​
count);
count++;

//bufferedOutput = new BufferedOutputStream(new FileOutputStream("/sdcard/record_mic_b.pcm"));

bufferedOutput = new BufferedOutputStream(new FileOutputStream(file_name.concat(convertedValue)));
}
catch (IOException e) {
e.printStackTrace();
}
try {
// Input file for record

bufferedOutput1 = new BufferedOutputStream(new FileOutputStream("/sdcard/record_cam.pcm"));
}
catch (IOException e) {
e.printStackTrace();
}
}



public void run() {
// android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);


// Get minimum buffer size requirement for the chose stream type

int minSize = android.media.AudioTrack.getMinBufferSize(frequency,
channel,
ENCODING);

// Open an audio track for chose stream

AudioTrack oTrack =​
new AudioTrack(AudioManager.STREAM_MUSIC,
frequency,
channel,
ENCODING,
frameLength * 2 * 2/*minSize * 2*/,
AudioTrack.
MODE_STREAM);

if(oTrack == null)
{
System.
out.println("debug: " + "AudioTrack could not be initialized ");
return;
}


/*byte[] zerobuffer = new byte[frameLength*2];
// Write the byte array to the track
oTrack.write(zerobuffer, 0, zerobuffer.length);*/


// Start playing data that is written

oTrack.play();

// Get minimum buffer size requirement for the chose stream type

minSize = android.media.AudioRecord.getMinBufferSize(​
frequency,
channel,
ENCODING);

// Open an audio record instance for chosen stream

AudioRecord hRecord =​
new AudioRecord(MediaRecorder.AudioSource.DEFAULT,
frequency, channel, ENCODING, frameLength * 2 * 2 );

if(hRecord == null)
{
System.
out.println("debug: " + "AudioRecord could not be initialized ");
//Close the BufferedOutputStream

try {
if (bufferedOutput != null) {
bufferedOutput.flush();
bufferedOutput.close();
}
}
catch (IOException ex) {
ex.printStackTrace();
}

return;
}

// Start recording module

hRecord.startRecording();


/*
/////////////START//////////////////added extra.... second recorder



AudioRecord hRecord1 = new AudioRecord(MediaRecorder.AudioSource.MIC,
frequency, channel, ENCODING, frameLength * 2 * 2 );

if(hRecord1 == null)
{
System.out.println("debug: " + "AudioRecord could not be initialized ");
//Close the BufferedOutputStream
try {
if (bufferedOutput1 != null) {
bufferedOutput1.flush();
bufferedOutput1.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}

return;
}
System.out.println("debug: " + "AudioRecord1 SUCCESSFUL,.....Yes yes ");
// Start recording module
hRecord.startRecording();
hRecord1.startRecording();



//////////////END///////////////////added extra.... second recorder
*/





while(this.isRunning)
{
try

{
int i;

inBuffer = new byte[frameLength * 2];
//inBuffer1 = new byte[frameLength * 2];


// Record PCM content

hRecord.read(​
inBuffer, 0, inBuffer.length);
//hRecord.read(inBuffer1, 0, inBuffer1.length);

/*for(i=0; i<frameLength; i++)
{
outData.writeShort(inBuffer);
}*/


//queue.put(inBuffer);

// Write the byte array to the track

oTrack.write(​
inBuffer, 0, inBuffer.length);

bufferedOutput.write(inBuffer);
//bufferedOutput1.write(inBuffer1);

}
catch (IOException e) {
// TODO Auto-generated catch block

e.printStackTrace();
}

}

// Done writing to the track. Stop it.

hRecord.stop();
// hRecord1.stop();


// Release the resources for Audio Track

hRecord.release();
//hRecord1.release();

// Done writing to the track. Stop it.

oTrack.stop();

// Release the resources for Audio Track

oTrack.release();

//Close the BufferedOutputStream

try {
if (bufferedOutput != null) {
bufferedOutput.flush();
bufferedOutput.close();
}
}
catch (IOException ex) {
ex.printStackTrace();
}

/* try {
if (bufferedOutput1 != null) {
bufferedOutput1.flush();
bufferedOutput1.close();
}
} catch (IOException ex) {
ex.printStackTrace();
}
*/

Log.e(APP_GLOBAL.​
APP_NAME, "PlayCaptureThread exiting");

}
}

public​
class audioloopback extends Activity {
/** This is used for playing previews of the music files. */
private int CHANNEL_CONFIG = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
private int FREQUENCY = 8000;
private int FRAME_LENGTH = 128;

private Button btn_start_loopback, btn_stop;

PlayThread
hPlay;
CaptureThread
hRecord;
PlayCaptureThread
hPlayCapture;

/** Called when the activity is first created. */

@Override

public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.
main);

btn_stop = (Button) findViewById(R.id.ButtonStop);
btn_stop.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
stop();
}
});

btn_start_loopback = (Button) findViewById(R.id.ButtonStartLoopback);
btn_start_loopback.setOnClickListener(new Button.OnClickListener() {
public void onClick(View v) {
start_loopback();
}
});

APP_GLOBAL.
isRecordRunning = false;
APP_GLOBAL.
isPlayRunning = false;
APP_GLOBAL.
isPlayRecordRunning = false;

}


private void start_loopback() {
if ((false == APP_GLOBAL.isRecordRunning)
&& (
false == APP_GLOBAL.isPlayRunning)
&& (
false == APP_GLOBAL.isPlayRecordRunning) ) {

Log.d(APP_GLOBAL.
APP_NAME, "Starting loopback:");

/*BlockingQueue<byte[]> queue = new ArrayBlockingQueue<byte[]>(APP_GLOBAL.MAX_QUEUE_LENGTH);

hPlay = new PlayThread(CHANNEL_CONFIG, FREQUENCY, FRAME_LENGTH);
hRecord = new CaptureThread(CHANNEL_CONFIG, FREQUENCY, FRAME_LENGTH);

hPlay.queue = queue;
hRecord.queue = queue;

hPlay.start();
hRecord.start();

APP_GLOBAL.isRecordRunning = true;
APP_GLOBAL.isPlayRunning = true;
*/


int akshay= 0;

akshay = AudioRecord.getMinBufferSize (
FREQUENCY, CHANNEL_CONFIG, AudioFormat.ENCODING_PCM_16BIT);

Log.i(APP_GLOBAL.
APP_NAME, "The buffer value is ="+akshay); // prints 0


hPlayCapture = new PlayCaptureThread(CHANNEL_CONFIG, FREQUENCY, 9000);
hPlayCapture.start();

APP_GLOBAL.
isPlayRecordRunning = true;


}
else {
Log.e(APP_GLOBAL.
APP_NAME, "Loopback already in progress");
}
}

private void stop() {
Log.d(APP_GLOBAL.
APP_NAME, "Stoping threads");

if (true == APP_GLOBAL.isPlayRunning) {
hPlay.isRunning = false;
}

if (true == APP_GLOBAL.isRecordRunning) {
hRecord.isRunning = false;
}

if (true == APP_GLOBAL.isPlayRecordRunning) {
hPlayCapture.isRunning = false;
}

APP_GLOBAL.
isRecordRunning = false;
APP_GLOBAL.
isPlayRunning = false;
APP_GLOBAL.
isPlayRecordRunning = false;
}

}

 
Top