am d3aaad0e
: Merge "Use getMinFrameCount() instead of querying AudioSystem directly." into gingerbread
Merge commit 'd3aaad0e2a30030db49ddc1a1cdd40011446938c' into gingerbread-plus-aosp * commit 'd3aaad0e2a30030db49ddc1a1cdd40011446938c': Use getMinFrameCount() instead of querying AudioSystem directly.
This commit is contained in:
@ -453,30 +453,23 @@ static jint android_media_AudioRecord_get_pos_update_period(JNIEnv *env, jobjec
|
||||
// return -1 if there was an error querying the buffer size.
|
||||
static jint android_media_AudioRecord_get_min_buff_size(JNIEnv *env, jobject thiz,
|
||||
jint sampleRateInHertz, jint nbChannels, jint audioFormat) {
|
||||
|
||||
size_t inputBuffSize = 0;
|
||||
|
||||
LOGV(">> android_media_AudioRecord_get_min_buff_size(%d, %d, %d)", sampleRateInHertz, nbChannels, audioFormat);
|
||||
|
||||
status_t result = AudioSystem::getInputBufferSize(
|
||||
sampleRateInHertz,
|
||||
(audioFormat == javaAudioRecordFields.PCM16 ?
|
||||
AudioSystem::PCM_16_BIT : AudioSystem::PCM_8_BIT),
|
||||
nbChannels, &inputBuffSize);
|
||||
switch(result) {
|
||||
case(NO_ERROR):
|
||||
if(inputBuffSize == 0) {
|
||||
LOGV("Recording parameters are not supported: %dHz, %d channel(s), (java) format %d",
|
||||
sampleRateInHertz, nbChannels, audioFormat);
|
||||
return 0;
|
||||
} else {
|
||||
// the minimum buffer size is twice the hardware input buffer size
|
||||
return 2*inputBuffSize;
|
||||
}
|
||||
break;
|
||||
case(PERMISSION_DENIED):
|
||||
default:
|
||||
return -1;
|
||||
|
||||
int frameCount = 0;
|
||||
status_t result = AudioRecord::getMinFrameCount(&frameCount,
|
||||
sampleRateInHertz,
|
||||
(audioFormat == javaAudioRecordFields.PCM16 ?
|
||||
AudioSystem::PCM_16_BIT : AudioSystem::PCM_8_BIT),
|
||||
nbChannels);
|
||||
|
||||
if (result == BAD_VALUE) {
|
||||
return 0;
|
||||
}
|
||||
if (result != NO_ERROR) {
|
||||
return -1;
|
||||
}
|
||||
return frameCount * nbChannels * (audioFormat == javaAudioRecordFields.PCM16 ? 2 : 1);
|
||||
}
|
||||
|
||||
|
||||
|
@ -782,29 +782,13 @@ static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env, jobjec
|
||||
// returns -1 if there was an error querying the hardware.
|
||||
static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thiz,
|
||||
jint sampleRateInHertz, jint nbChannels, jint audioFormat) {
|
||||
int afSamplingRate;
|
||||
int afFrameCount;
|
||||
uint32_t afLatency;
|
||||
|
||||
if (AudioSystem::getOutputSamplingRate(&afSamplingRate) != NO_ERROR) {
|
||||
|
||||
int frameCount = 0;
|
||||
if (AudioTrack::getMinFrameCount(&frameCount, AudioSystem::DEFAULT,
|
||||
sampleRateInHertz) != NO_ERROR) {
|
||||
return -1;
|
||||
}
|
||||
if (AudioSystem::getOutputFrameCount(&afFrameCount) != NO_ERROR) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (AudioSystem::getOutputLatency(&afLatency) != NO_ERROR) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Ensure that buffer depth covers at least audio hardware latency
|
||||
uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSamplingRate);
|
||||
if (minBufCount < 2) minBufCount = 2;
|
||||
uint32_t minFrameCount = (afFrameCount*sampleRateInHertz*minBufCount)/afSamplingRate;
|
||||
int minBuffSize = minFrameCount
|
||||
* (audioFormat == javaAudioTrackFields.PCM16 ? 2 : 1)
|
||||
* nbChannels;
|
||||
return minBuffSize;
|
||||
return frameCount * nbChannels * (audioFormat == javaAudioTrackFields.PCM16 ? 2 : 1);
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
Reference in New Issue
Block a user