am 04c58e9e: Merge "Java API for AudioTrack timestamps" into klp-dev

* commit '04c58e9ec12c65c15738ba456f18a1f36416b0cc':
  Java API for AudioTrack timestamps
This commit is contained in:
Glenn Kasten
2013-09-04 15:33:20 -07:00
committed by Android Git Automerger
3 changed files with 122 additions and 0 deletions

View File

@ -741,6 +741,30 @@ static jint android_media_AudioTrack_get_latency(JNIEnv *env, jobject thiz) {
} }
// ----------------------------------------------------------------------------
static jint android_media_AudioTrack_get_timestamp(JNIEnv *env, jobject thiz, jlongArray jTimestamp) {
sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
if (lpTrack == NULL) {
ALOGE("Unable to retrieve AudioTrack pointer for getTimestamp()");
return AUDIOTRACK_ERROR;
}
AudioTimestamp timestamp;
status_t status = lpTrack->getTimestamp(timestamp);
if (status == OK) {
jlong* nTimestamp = (jlong *) env->GetPrimitiveArrayCritical(jTimestamp, NULL);
if (nTimestamp == NULL) {
ALOGE("Unable to get array for getTimestamp()");
return AUDIOTRACK_ERROR;
}
nTimestamp[0] = (jlong) timestamp.mPosition;
nTimestamp[1] = (jlong) ((timestamp.mTime.tv_sec * 1000000000LL) + timestamp.mTime.tv_nsec);
env->ReleasePrimitiveArrayCritical(jTimestamp, nTimestamp, 0);
}
return (jint) android_media_translateErrorCode(status);
}
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
static jint android_media_AudioTrack_set_loop(JNIEnv *env, jobject thiz, static jint android_media_AudioTrack_set_loop(JNIEnv *env, jobject thiz,
jint loopStart, jint loopEnd, jint loopCount) { jint loopStart, jint loopEnd, jint loopCount) {
@ -869,6 +893,7 @@ static JNINativeMethod gMethods[] = {
{"native_set_position", "(I)I", (void *)android_media_AudioTrack_set_position}, {"native_set_position", "(I)I", (void *)android_media_AudioTrack_set_position},
{"native_get_position", "()I", (void *)android_media_AudioTrack_get_position}, {"native_get_position", "()I", (void *)android_media_AudioTrack_get_position},
{"native_get_latency", "()I", (void *)android_media_AudioTrack_get_latency}, {"native_get_latency", "()I", (void *)android_media_AudioTrack_get_latency},
{"native_get_timestamp", "([J)I", (void *)android_media_AudioTrack_get_timestamp},
{"native_set_loop", "(III)I", (void *)android_media_AudioTrack_set_loop}, {"native_set_loop", "(III)I", (void *)android_media_AudioTrack_set_loop},
{"native_reload_static", "()I", (void *)android_media_AudioTrack_reload}, {"native_reload_static", "()I", (void *)android_media_AudioTrack_reload},
{"native_get_output_sample_rate", {"native_get_output_sample_rate",

View File

@ -0,0 +1,47 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media;
/**
* Structure that groups a position in frame units relative to an assumed audio stream,
* together with the estimated time when that frame was presented or is committed to be
* presented.
* In the case of audio output, "present" means that audio produced on device
* is detectable by an external observer off device.
* The time is based on the implementation's best effort, using whatever knowledge
* is available to the system, but cannot account for any delay unknown to the implementation.
*
* @see AudioTrack#getTimestamp
* @see AudioTrack.TimestampListener
*
* @hide
*/
public final class AudioTimestamp
{
/**
* Position in frames relative to start of an assumed audio stream.
* The low-order 32 bits of position is in wrapping frame units similar to
* {@link AudioTrack#getPlaybackHeadPosition}.
*/
public long framePosition;
/**
* The estimated time when frame was presented or is committed to be presented,
* in the same units and timebase as {@link java.lang.System#nanoTime}.
*/
public long nanoTime;
}

View File

@ -732,6 +732,51 @@ public class AudioTrack
return mSessionId; return mSessionId;
} }
/**
* Poll for a timestamp on demand.
*
* Use if {@link TimestampListener} is not delivered often enough for your needs,
* or if you need to get the most recent timestamp outside of the event callback handler.
* Calling this method too often may be inefficient;
* if you need a high-resolution mapping between frame position and presentation time,
* consider implementing that at application level, based on low-resolution timestamps.
* The audio data at the returned position may either already have been
* presented, or may have not yet been presented but is committed to be presented.
* It is not possible to request the time corresponding to a particular position,
* or to request the (fractional) position corresponding to a particular time.
* If you need such features, consider implementing them at application level.
*
* @param timestamp a reference to a non-null AudioTimestamp instance allocated
* and owned by caller, or null.
* @return that same instance if timestamp parameter is non-null and a timestamp is available,
* or a reference to a new AudioTimestamp instance which is now owned by caller
* if timestamp parameter is null and a timestamp is available,
* or null if no timestamp is available. In either successful case,
* the AudioTimestamp instance is filled in with a position in frame units, together
* with the estimated time when that frame was presented or is committed to
* be presented.
* In the case that no timestamp is available, any supplied instance is left unaltered.
*
* @hide
*/
public AudioTimestamp getTimestamp(AudioTimestamp timestamp)
{
// It's unfortunate, but we have to either create garbage every time or use synchronized
long[] longArray = new long[2];
int ret = native_get_timestamp(longArray);
if (ret == SUCCESS) {
if (timestamp == null) {
timestamp = new AudioTimestamp();
}
timestamp.framePosition = longArray[0];
timestamp.nanoTime = longArray[1];
} else {
timestamp = null;
}
return timestamp;
}
//-------------------------------------------------------------------------- //--------------------------------------------------------------------------
// Initialization / configuration // Initialization / configuration
//-------------------- //--------------------
@ -1321,6 +1366,11 @@ public class AudioTrack
private native final int native_get_latency(); private native final int native_get_latency();
// longArray must be a non-null array of length >= 2
// [0] is assigned the frame position
// [1] is assigned the time in CLOCK_MONOTONIC nanoseconds
private native final int native_get_timestamp(long[] longArray);
private native final int native_set_loop(int start, int end, int loopCount); private native final int native_set_loop(int start, int end, int loopCount);
static private native final int native_get_output_sample_rate(int streamType); static private native final int native_get_output_sample_rate(int streamType);