am d3aaad0e: Merge "Use getMinFrameCount() instead of querying AudioSystem directly." into gingerbread

Merge commit 'd3aaad0e2a30030db49ddc1a1cdd40011446938c' into gingerbread-plus-aosp

* commit 'd3aaad0e2a30030db49ddc1a1cdd40011446938c':
  Use getMinFrameCount() instead of querying AudioSystem directly.
This commit is contained in:
Chia-chi Yeh
2010-08-19 18:10:27 -07:00
committed by Android Git Automerger
2 changed files with 20 additions and 43 deletions

View File

@ -454,29 +454,22 @@ static jint android_media_AudioRecord_get_pos_update_period(JNIEnv *env, jobjec
static jint android_media_AudioRecord_get_min_buff_size(JNIEnv *env, jobject thiz, static jint android_media_AudioRecord_get_min_buff_size(JNIEnv *env, jobject thiz,
jint sampleRateInHertz, jint nbChannels, jint audioFormat) { jint sampleRateInHertz, jint nbChannels, jint audioFormat) {
size_t inputBuffSize = 0;
LOGV(">> android_media_AudioRecord_get_min_buff_size(%d, %d, %d)", sampleRateInHertz, nbChannels, audioFormat); LOGV(">> android_media_AudioRecord_get_min_buff_size(%d, %d, %d)", sampleRateInHertz, nbChannels, audioFormat);
status_t result = AudioSystem::getInputBufferSize( int frameCount = 0;
status_t result = AudioRecord::getMinFrameCount(&frameCount,
sampleRateInHertz, sampleRateInHertz,
(audioFormat == javaAudioRecordFields.PCM16 ? (audioFormat == javaAudioRecordFields.PCM16 ?
AudioSystem::PCM_16_BIT : AudioSystem::PCM_8_BIT), AudioSystem::PCM_16_BIT : AudioSystem::PCM_8_BIT),
nbChannels, &inputBuffSize); nbChannels);
switch(result) {
case(NO_ERROR): if (result == BAD_VALUE) {
if(inputBuffSize == 0) {
LOGV("Recording parameters are not supported: %dHz, %d channel(s), (java) format %d",
sampleRateInHertz, nbChannels, audioFormat);
return 0; return 0;
} else {
// the minimum buffer size is twice the hardware input buffer size
return 2*inputBuffSize;
} }
break; if (result != NO_ERROR) {
case(PERMISSION_DENIED):
default:
return -1; return -1;
} }
return frameCount * nbChannels * (audioFormat == javaAudioRecordFields.PCM16 ? 2 : 1);
} }

View File

@ -782,29 +782,13 @@ static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env, jobjec
// returns -1 if there was an error querying the hardware. // returns -1 if there was an error querying the hardware.
static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thiz, static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thiz,
jint sampleRateInHertz, jint nbChannels, jint audioFormat) { jint sampleRateInHertz, jint nbChannels, jint audioFormat) {
int afSamplingRate;
int afFrameCount;
uint32_t afLatency;
if (AudioSystem::getOutputSamplingRate(&afSamplingRate) != NO_ERROR) { int frameCount = 0;
if (AudioTrack::getMinFrameCount(&frameCount, AudioSystem::DEFAULT,
sampleRateInHertz) != NO_ERROR) {
return -1; return -1;
} }
if (AudioSystem::getOutputFrameCount(&afFrameCount) != NO_ERROR) { return frameCount * nbChannels * (audioFormat == javaAudioTrackFields.PCM16 ? 2 : 1);
return -1;
}
if (AudioSystem::getOutputLatency(&afLatency) != NO_ERROR) {
return -1;
}
// Ensure that buffer depth covers at least audio hardware latency
uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSamplingRate);
if (minBufCount < 2) minBufCount = 2;
uint32_t minFrameCount = (afFrameCount*sampleRateInHertz*minBufCount)/afSamplingRate;
int minBuffSize = minFrameCount
* (audioFormat == javaAudioTrackFields.PCM16 ? 2 : 1)
* nbChannels;
return minBuffSize;
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------