问题
I am new to android and I have been working on a Pitch Analyzer application (minimum SDK: 8). I read many articles on how to implement Audiorecord class but I wonder why it does not read any data when I record. I tried to display the values of the audioData and fftArray but zero is returned, so I assumed the problem is with the read method. Please try to check these. Here are the codes I used:
FFT.java
Complex.java
record.java
final Intent intent = new Intent("pitch.analyzer.PitZer.ASSESSMENT");
MediaRecorder recorder;
AudioRecord tuner;
int audioSource = MediaRecorder.AudioSource.MIC;
int sampleRateInHz = AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_SYSTEM);
int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int bufferSizeInBytes = 4096;
int samples;
short[] audioBuffer;
short[] audioData;
double[] temp;
TextView fft;
TextView results;
//TextView bufferSize;
Complex[] fftTempArray;
Complex[] fftArray;
Complex[] fftInverse;
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.record);
Button start=(Button)findViewById(R.id.record);
Button stop=(Button)findViewById(R.id.stop);
fft = (TextView)findViewById(R.id.fft);
results = (TextView)findViewById(R.id.results);
//bufferSize = (TextView)findViewById(R.id.bufferSize);
audioData = new short[bufferSizeInBytes];
tuner = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes);
//final AudioRecorder recorder = new AudioRecorder("/audiometer/temp");
start.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
acquire();
computeFFT();
display();
}
});
//….wait a while
stop.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
startActivity(intent);
}
});
}
public void acquire(){
try {
tuner.startRecording();
samples = tuner.read(audioData, 0, bufferSizeInBytes);
}
catch (Throwable t){
}
}
public void computeFFT(){
//Conversion from short to double
double[] micBufferData = new double[bufferSizeInBytes];//size may need to change
final int bytesPerSample = 2; // As it is 16bit PCM
final double amplification = 100.0; // choose a number as you like
for (int index = 0, floatIndex = 0; index < bufferSizeInBytes - bytesPerSample + 1; index += bytesPerSample, floatIndex++) {
double sample = 0;
for (int b = 0; b < bytesPerSample; b++) {
int v = audioData[index + b];
if (b < bytesPerSample - 1 || bytesPerSample == 1) {
v &= 0xFF;
}
sample += v << (b * 8);
}
double sample32 = amplification * (sample / 32768.0);
micBufferData[floatIndex] = sample32;
}
//Create Complex array for use in FFT
fftTempArray = new Complex[bufferSizeInBytes];
for (int i=0; i<bufferSizeInBytes; i++)
{
fftTempArray[i] = new Complex(micBufferData[i], 0);
}
//Obtain array of FFT data
fftArray = FFT.fft(fftTempArray);
fftInverse = FFT.ifft(fftTempArray);
double[] freq2 = new double[fftArray.length];
//Create an array of magnitude of fftArray
double[] magnitude = new double[fftArray.length];
for (int i=0; i<fftArray.length; i++){
magnitude[i]= fftArray[i].abs();
freq2[i] = ComputeFrequency(magnitude[i]);
}
fft.setTextColor(Color.BLUE);
//fft.setText("fftArray is "+ fftArray[500] +" and fftTempArray is "+fftTempArray[500] + " and fftInverse is "+fftInverse[500]+" and audioData is "+audioData[500]+ " and magnitude is "+ magnitude[1] + ", "+magnitude[500]+", "+magnitude[1000]+ " and freq2 is "+ freq2[1]+" You rock dude!");
/*for(int i = 2; i < samples; i++){
fft.append(" " + magnitude[i] + " Hz");
}
for(int i = 2; i < samples; i++){
fft.append(" " + freq2[i] + " Hz");
}
*/
}
private double ComputeFrequency(double arrayIndex) {
return ((1.0 * sampleRateInHz) / (1.0 * 100)) * arrayIndex;
}
public void display(){
results.setTextColor(Color.BLUE);
results.setText("results: "+audioData[1]+"");
for(int i = 2; i < samples; i++){
results.append(" " + audioData[i]);
}
results.invalidate();
//fft.setTextColor(Color.GREEN);
fft.setText("sampleRateInHz: "+sampleRateInHz);
fft.append("\nfftArray: "+fftArray[0]+" Hz");
for(int i = 1; i < samples; i++){
fft.append(" " + fftArray[i] + " Hz");
}
fft.append("\naudioData: "+audioData[1]);
fft.append("\nsamples: "+samples);
//fft.invalidate();
}
public void stop() throws IOException {
tuner.stop();
//audioInput.reset();
tuner.release();
//recorder.stop();
//recorder.reset();
//recorder.release();
}
回答1:
Before reading from the device you should start a recording(and stop it once you finish).
Here is the code i use for a simple read:
short[] audioData = new short[bufferSize];
int offset =0;
int shortRead = 0;
//start tapping into the microphone
audioRecored.startRecording();
//start reading from the microphone to an internal buffer - chuck by chunk
while (offset < bufferSize)
{
shortRead = audioRecored.read(audioData, offset ,bufferSize - offset);
offset += shortRead;
}
//stop tapping into the microphone
audioRecored.stop();
来源:https://stackoverflow.com/questions/9742259/android-audiorecord-class-does-not-read-data-audiodata-and-fftarray-return-ze