Merge "NuPlayer is now taking on the task of streaming over RTSP."

This commit is contained in:
Andreas Huber
2011-10-13 12:13:57 -07:00
committed by Android (Google) Code Review
19 changed files with 604 additions and 691 deletions

View File

@ -30,7 +30,6 @@
#include <binder/ProcessState.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/ALooper.h>
#include "include/ARTSPController.h"
#include "include/LiveSession.h"
#include "include/NuCachedSource2.h"
#include <media/stagefright/AudioPlayer.h>
@ -636,7 +635,6 @@ int main(int argc, char **argv) {
gDisplayHistogram = false;
sp<ALooper> looper;
sp<ARTSPController> rtspController;
sp<LiveSession> liveSession;
int res;
@ -948,7 +946,6 @@ int main(int argc, char **argv) {
sp<DataSource> dataSource = DataSource::CreateFromURI(filename);
if (strncasecmp(filename, "sine:", 5)
&& strncasecmp(filename, "rtsp://", 7)
&& strncasecmp(filename, "httplive://", 11)
&& dataSource == NULL) {
fprintf(stderr, "Unable to create data source.\n");
@ -984,23 +981,7 @@ int main(int argc, char **argv) {
} else {
sp<MediaExtractor> extractor;
if (!strncasecmp("rtsp://", filename, 7)) {
if (looper == NULL) {
looper = new ALooper;
looper->start();
}
rtspController = new ARTSPController(looper);
status_t err = rtspController->connect(filename);
if (err != OK) {
fprintf(stderr, "could not connect to rtsp server.\n");
return -1;
}
extractor = rtspController.get();
syncInfoPresent = false;
} else if (!strncasecmp("httplive://", filename, 11)) {
if (!strncasecmp("httplive://", filename, 11)) {
String8 uri("http://");
uri.append(filename + 11);
@ -1117,13 +1098,6 @@ int main(int argc, char **argv) {
} else {
playSource(&client, mediaSource);
}
if (rtspController != NULL) {
rtspController->disconnect();
rtspController.clear();
sleep(3);
}
}
if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {

View File

@ -32,8 +32,8 @@ LOCAL_SHARED_LIBRARIES := \
libdl
LOCAL_STATIC_LIBRARIES := \
libstagefright_rtsp \
libstagefright_nuplayer \
libstagefright_rtsp \
LOCAL_C_INCLUDES := \
$(JNI_H_INCLUDE) \

View File

@ -584,6 +584,10 @@ player_type getPlayerType(const char* url)
}
}
if (!strncasecmp("rtsp://", url, 7)) {
return NU_PLAYER;
}
// use MidiFile for MIDI extensions
int lenURL = strlen(url);
for (int i = 0; i < NELEM(FILE_EXTS); ++i) {

View File

@ -8,6 +8,7 @@ LOCAL_SRC_FILES:= \
NuPlayerDriver.cpp \
NuPlayerRenderer.cpp \
NuPlayerStreamListener.cpp \
RTSPSource.cpp \
StreamingSource.cpp \
LOCAL_C_INCLUDES := \
@ -15,6 +16,7 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/base/media/libstagefright/include \
$(TOP)/frameworks/base/media/libstagefright/mpeg2ts \
$(TOP)/frameworks/base/media/libstagefright/httplive \
$(TOP)/frameworks/base/media/libstagefright/rtsp \
LOCAL_MODULE:= libstagefright_nuplayer

View File

@ -25,6 +25,7 @@
#include "NuPlayerDriver.h"
#include "NuPlayerRenderer.h"
#include "NuPlayerSource.h"
#include "RTSPSource.h"
#include "StreamingSource.h"
#include "ATSParser.h"
@ -87,7 +88,14 @@ void NuPlayer::setDataSource(
const char *url, const KeyedVector<String8, String8> *headers) {
sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
msg->setObject("source", new HTTPLiveSource(url, headers, mUIDValid, mUID));
if (!strncasecmp(url, "rtsp://", 7)) {
msg->setObject(
"source", new RTSPSource(url, headers, mUIDValid, mUID));
} else {
msg->setObject(
"source", new HTTPLiveSource(url, headers, mUIDValid, mUID));
}
msg->post();
}
@ -568,8 +576,15 @@ void NuPlayer::finishReset() {
CHECK(mAudioDecoder == NULL);
CHECK(mVideoDecoder == NULL);
++mScanSourcesGeneration;
mScanSourcesPending = false;
mRenderer.clear();
mSource.clear();
if (mSource != NULL) {
mSource->stop();
mSource.clear();
}
if (mDriver != NULL) {
sp<NuPlayerDriver> driver = mDriver.promote();

View File

@ -68,6 +68,7 @@ private:
struct Renderer;
struct Source;
struct StreamingSource;
struct RTSPSource;
enum {
kWhatSetDataSource = '=DaS',

View File

@ -219,7 +219,9 @@ void NuPlayer::Renderer::signalAudioSinkChanged() {
bool NuPlayer::Renderer::onDrainAudioQueue() {
uint32_t numFramesPlayed;
CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
return false;
}
ssize_t numFramesAvailableToWrite =
mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);

View File

@ -28,6 +28,7 @@ struct NuPlayer::Source : public RefBase {
Source() {}
virtual void start() = 0;
virtual void stop() {}
// Returns OK iff more data was available,
// an error or ERROR_END_OF_STREAM if not.

View File

@ -0,0 +1,354 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "RTSPSource"
#include <utils/Log.h>
#include "RTSPSource.h"
#include "AnotherPacketSource.h"
#include "MyHandler.h"
#include <media/stagefright/MetaData.h>
namespace android {
NuPlayer::RTSPSource::RTSPSource(
const char *url,
const KeyedVector<String8, String8> *headers,
bool uidValid,
uid_t uid)
: mURL(url),
mUIDValid(uidValid),
mUID(uid),
mFlags(0),
mState(DISCONNECTED),
mFinalResult(OK),
mDisconnectReplyID(0) {
if (headers) {
mExtraHeaders = *headers;
ssize_t index =
mExtraHeaders.indexOfKey(String8("x-hide-urls-from-log"));
if (index >= 0) {
mFlags |= kFlagIncognito;
mExtraHeaders.removeItemsAt(index);
}
}
}
NuPlayer::RTSPSource::~RTSPSource() {
if (mLooper != NULL) {
mLooper->stop();
}
}
void NuPlayer::RTSPSource::start() {
if (mLooper == NULL) {
mLooper = new ALooper;
mLooper->setName("rtsp");
mLooper->start();
mReflector = new AHandlerReflector<RTSPSource>(this);
mLooper->registerHandler(mReflector);
}
CHECK(mHandler == NULL);
sp<AMessage> notify = new AMessage(kWhatNotify, mReflector->id());
mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID);
mLooper->registerHandler(mHandler);
CHECK_EQ(mState, (int)DISCONNECTED);
mState = CONNECTING;
mHandler->connect();
}
void NuPlayer::RTSPSource::stop() {
sp<AMessage> msg = new AMessage(kWhatDisconnect, mReflector->id());
sp<AMessage> dummy;
msg->postAndAwaitResponse(&dummy);
}
status_t NuPlayer::RTSPSource::feedMoreTSData() {
return mFinalResult;
}
sp<MetaData> NuPlayer::RTSPSource::getFormat(bool audio) {
sp<AnotherPacketSource> source = getSource(audio);
if (source == NULL) {
return NULL;
}
return source->getFormat();
}
status_t NuPlayer::RTSPSource::dequeueAccessUnit(
bool audio, sp<ABuffer> *accessUnit) {
sp<AnotherPacketSource> source = getSource(audio);
if (source == NULL) {
return -EWOULDBLOCK;
}
status_t finalResult;
if (!source->hasBufferAvailable(&finalResult)) {
return finalResult == OK ? -EWOULDBLOCK : finalResult;
}
return source->dequeueAccessUnit(accessUnit);
}
sp<AnotherPacketSource> NuPlayer::RTSPSource::getSource(bool audio) {
return audio ? mAudioTrack : mVideoTrack;
}
status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) {
*durationUs = 0ll;
int64_t audioDurationUs;
if (mAudioTrack != NULL
&& mAudioTrack->getFormat()->findInt64(
kKeyDuration, &audioDurationUs)
&& audioDurationUs > *durationUs) {
*durationUs = audioDurationUs;
}
int64_t videoDurationUs;
if (mVideoTrack != NULL
&& mVideoTrack->getFormat()->findInt64(
kKeyDuration, &videoDurationUs)
&& videoDurationUs > *durationUs) {
*durationUs = videoDurationUs;
}
return OK;
}
status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs) {
if (mState != CONNECTED) {
return UNKNOWN_ERROR;
}
mState = SEEKING;
mHandler->seek(seekTimeUs);
return OK;
}
bool NuPlayer::RTSPSource::isSeekable() {
return true;
}
void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
if (msg->what() == kWhatDisconnect) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
mDisconnectReplyID = replyID;
finishDisconnectIfPossible();
return;
}
CHECK_EQ(msg->what(), (int)kWhatNotify);
int32_t what;
CHECK(msg->findInt32("what", &what));
switch (what) {
case MyHandler::kWhatConnected:
onConnected();
break;
case MyHandler::kWhatDisconnected:
onDisconnected(msg);
break;
case MyHandler::kWhatSeekDone:
{
mState = CONNECTED;
break;
}
case MyHandler::kWhatAccessUnit:
{
size_t trackIndex;
CHECK(msg->findSize("trackIndex", &trackIndex));
CHECK_LT(trackIndex, mTracks.size());
sp<RefBase> obj;
CHECK(msg->findObject("accessUnit", &obj));
sp<ABuffer> accessUnit = static_cast<ABuffer *>(obj.get());
int32_t damaged;
if (accessUnit->meta()->findInt32("damaged", &damaged)
&& damaged) {
LOGI("dropping damaged access unit.");
break;
}
const TrackInfo &info = mTracks.editItemAt(trackIndex);
sp<AnotherPacketSource> source = info.mSource;
if (source != NULL) {
#if 1
uint32_t rtpTime;
CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
int64_t nptUs =
((double)rtpTime - (double)info.mRTPTime)
/ info.mTimeScale
* 1000000ll
+ info.mNormalPlaytimeUs;
accessUnit->meta()->setInt64("timeUs", nptUs);
#endif
source->queueAccessUnit(accessUnit);
}
break;
}
case MyHandler::kWhatEOS:
{
size_t trackIndex;
CHECK(msg->findSize("trackIndex", &trackIndex));
CHECK_LT(trackIndex, mTracks.size());
int32_t finalResult;
CHECK(msg->findInt32("finalResult", &finalResult));
CHECK_NE(finalResult, (status_t)OK);
TrackInfo *info = &mTracks.editItemAt(trackIndex);
sp<AnotherPacketSource> source = info->mSource;
if (source != NULL) {
source->signalEOS(finalResult);
}
break;
}
case MyHandler::kWhatSeekDiscontinuity:
{
size_t trackIndex;
CHECK(msg->findSize("trackIndex", &trackIndex));
CHECK_LT(trackIndex, mTracks.size());
TrackInfo *info = &mTracks.editItemAt(trackIndex);
sp<AnotherPacketSource> source = info->mSource;
if (source != NULL) {
source->queueDiscontinuity(ATSParser::DISCONTINUITY_SEEK, NULL);
}
break;
}
case MyHandler::kWhatNormalPlayTimeMapping:
{
size_t trackIndex;
CHECK(msg->findSize("trackIndex", &trackIndex));
CHECK_LT(trackIndex, mTracks.size());
uint32_t rtpTime;
CHECK(msg->findInt32("rtpTime", (int32_t *)&rtpTime));
int64_t nptUs;
CHECK(msg->findInt64("nptUs", &nptUs));
TrackInfo *info = &mTracks.editItemAt(trackIndex);
info->mRTPTime = rtpTime;
info->mNormalPlaytimeUs = nptUs;
break;
}
default:
TRESPASS();
}
}
void NuPlayer::RTSPSource::onConnected() {
CHECK(mAudioTrack == NULL);
CHECK(mVideoTrack == NULL);
size_t numTracks = mHandler->countTracks();
for (size_t i = 0; i < numTracks; ++i) {
int32_t timeScale;
sp<MetaData> format = mHandler->getTrackFormat(i, &timeScale);
const char *mime;
CHECK(format->findCString(kKeyMIMEType, &mime));
bool isAudio = !strncasecmp(mime, "audio/", 6);
bool isVideo = !strncasecmp(mime, "video/", 6);
TrackInfo info;
info.mTimeScale = timeScale;
info.mRTPTime = 0;
info.mNormalPlaytimeUs = 0ll;
if ((isAudio && mAudioTrack == NULL)
|| (isVideo && mVideoTrack == NULL)) {
sp<AnotherPacketSource> source = new AnotherPacketSource(format);
if (isAudio) {
mAudioTrack = source;
} else {
mVideoTrack = source;
}
info.mSource = source;
}
mTracks.push(info);
}
mState = CONNECTED;
}
void NuPlayer::RTSPSource::onDisconnected(const sp<AMessage> &msg) {
status_t err;
CHECK(msg->findInt32("result", &err));
CHECK_NE(err, (status_t)OK);
mLooper->unregisterHandler(mHandler->id());
mHandler.clear();
mState = DISCONNECTED;
mFinalResult = err;
if (mDisconnectReplyID != 0) {
finishDisconnectIfPossible();
}
}
void NuPlayer::RTSPSource::finishDisconnectIfPossible() {
if (mState != DISCONNECTED) {
mHandler->disconnect();
return;
}
(new AMessage)->postReply(mDisconnectReplyID);
mDisconnectReplyID = 0;
}
} // namespace android

View File

@ -0,0 +1,109 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef RTSP_SOURCE_H_
#define RTSP_SOURCE_H_
#include "NuPlayerSource.h"
#include <media/stagefright/foundation/AHandlerReflector.h>
namespace android {
struct ALooper;
struct AnotherPacketSource;
struct MyHandler;
struct NuPlayer::RTSPSource : public NuPlayer::Source {
RTSPSource(
const char *url,
const KeyedVector<String8, String8> *headers,
bool uidValid = false,
uid_t uid = 0);
virtual void start();
virtual void stop();
virtual status_t feedMoreTSData();
virtual sp<MetaData> getFormat(bool audio);
virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
virtual status_t getDuration(int64_t *durationUs);
virtual status_t seekTo(int64_t seekTimeUs);
virtual bool isSeekable();
void onMessageReceived(const sp<AMessage> &msg);
protected:
virtual ~RTSPSource();
private:
enum {
kWhatNotify = 'noti',
kWhatDisconnect = 'disc',
};
enum State {
DISCONNECTED,
CONNECTING,
CONNECTED,
SEEKING,
};
enum Flags {
// Don't log any URLs.
kFlagIncognito = 1,
};
struct TrackInfo {
sp<AnotherPacketSource> mSource;
int32_t mTimeScale;
uint32_t mRTPTime;
int64_t mNormalPlaytimeUs;
};
AString mURL;
KeyedVector<String8, String8> mExtraHeaders;
bool mUIDValid;
uid_t mUID;
uint32_t mFlags;
State mState;
status_t mFinalResult;
uint32_t mDisconnectReplyID;
sp<ALooper> mLooper;
sp<AHandlerReflector<RTSPSource> > mReflector;
sp<MyHandler> mHandler;
Vector<TrackInfo> mTracks;
sp<AnotherPacketSource> mAudioTrack;
sp<AnotherPacketSource> mVideoTrack;
sp<AnotherPacketSource> getSource(bool audio);
void onConnected();
void onDisconnected(const sp<AMessage> &msg);
void finishDisconnectIfPossible();
DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
};
} // namespace android
#endif // RTSP_SOURCE_H_

View File

@ -58,7 +58,6 @@ LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
$(TOP)/external/flac/include \
$(TOP)/external/tremolo \
$(TOP)/frameworks/base/media/libstagefright/rtsp \
$(TOP)/external/openssl/include \
LOCAL_SHARED_LIBRARIES := \
@ -88,7 +87,6 @@ LOCAL_STATIC_LIBRARIES := \
libvpx \
libstagefright_mpeg2ts \
libstagefright_httplive \
libstagefright_rtsp \
libstagefright_id3 \
libFLAC \

View File

@ -22,7 +22,6 @@
#include <dlfcn.h>
#include "include/ARTSPController.h"
#include "include/AwesomePlayer.h"
#include "include/DRMExtractor.h"
#include "include/SoftwareRenderer.h"
@ -53,7 +52,6 @@
#include <gui/SurfaceTextureClient.h>
#include <surfaceflinger/ISurfaceComposer.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
#include <cutils/properties.h>
@ -65,7 +63,6 @@ namespace android {
static int64_t kLowWaterMarkUs = 2000000ll; // 2secs
static int64_t kHighWaterMarkUs = 5000000ll; // 5secs
static int64_t kHighWaterMarkRTSPUs = 4000000ll; // 4secs
static const size_t kLowWaterMarkBytes = 40000;
static const size_t kHighWaterMarkBytes = 200000;
@ -485,9 +482,6 @@ void AwesomePlayer::reset_l() {
if (mConnectingDataSource != NULL) {
LOGI("interrupting the connection process");
mConnectingDataSource->disconnect();
} else if (mConnectingRTSPController != NULL) {
LOGI("interrupting the connection process");
mConnectingRTSPController->disconnect();
}
if (mFlags & PREPARING_CONNECTED) {
@ -534,11 +528,6 @@ void AwesomePlayer::reset_l() {
mVideoRenderer.clear();
if (mRTSPController != NULL) {
mRTSPController->disconnect();
mRTSPController.clear();
}
if (mVideoSource != NULL) {
shutdownVideoDecoder_l();
}
@ -612,10 +601,7 @@ bool AwesomePlayer::getBitrate(int64_t *bitrate) {
bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) {
int64_t bitrate;
if (mRTSPController != NULL) {
*durationUs = mRTSPController->getQueueDurationUs(eos);
return true;
} else if (mCachedSource != NULL && getBitrate(&bitrate)) {
if (mCachedSource != NULL && getBitrate(&bitrate)) {
status_t finalStatus;
size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
*durationUs = cachedDataRemaining * 8000000ll / bitrate;
@ -751,9 +737,6 @@ void AwesomePlayer::onBufferingUpdate() {
LOGV("cachedDurationUs = %.2f secs, eos=%d",
cachedDurationUs / 1E6, eos);
int64_t highWaterMarkUs =
(mRTSPController != NULL) ? kHighWaterMarkRTSPUs : kHighWaterMarkUs;
if ((mFlags & PLAYING) && !eos
&& (cachedDurationUs < kLowWaterMarkUs)) {
LOGI("cache is running low (%.2f secs) , pausing.",
@ -763,7 +746,7 @@ void AwesomePlayer::onBufferingUpdate() {
ensureCacheIsFetching_l();
sendCacheStats();
notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
} else if (eos || cachedDurationUs > highWaterMarkUs) {
} else if (eos || cachedDurationUs > kHighWaterMarkUs) {
if (mFlags & CACHE_UNDERRUN) {
LOGI("cache has filled up (%.2f secs), resuming.",
cachedDurationUs / 1E6);
@ -1264,10 +1247,7 @@ status_t AwesomePlayer::getDuration(int64_t *durationUs) {
}
status_t AwesomePlayer::getPosition(int64_t *positionUs) {
if (mRTSPController != NULL) {
*positionUs = mRTSPController->getNormalPlayTimeUs();
}
else if (mSeeking != NO_SEEK) {
if (mSeeking != NO_SEEK) {
*positionUs = mSeekTimeUs;
} else if (mVideoSource != NULL
&& (mAudioPlayer == NULL || !(mFlags & VIDEO_AT_EOS))) {
@ -1317,25 +1297,7 @@ status_t AwesomePlayer::setTimedTextTrackIndex(int32_t index) {
}
}
// static
void AwesomePlayer::OnRTSPSeekDoneWrapper(void *cookie) {
static_cast<AwesomePlayer *>(cookie)->onRTSPSeekDone();
}
void AwesomePlayer::onRTSPSeekDone() {
if (!mSeekNotificationSent) {
notifyListener_l(MEDIA_SEEK_COMPLETE);
mSeekNotificationSent = true;
}
}
status_t AwesomePlayer::seekTo_l(int64_t timeUs) {
if (mRTSPController != NULL) {
mSeekNotificationSent = false;
mRTSPController->seekAsync(timeUs, OnRTSPSeekDoneWrapper, this);
return OK;
}
if (mFlags & CACHE_UNDERRUN) {
modifyFlags(CACHE_UNDERRUN, CLEAR);
play_l();
@ -1771,7 +1733,6 @@ void AwesomePlayer::onVideoEvent() {
int64_t latenessUs = nowUs - timeUs;
if (latenessUs > 500000ll
&& mRTSPController == NULL
&& mAudioPlayer != NULL
&& mAudioPlayer->getMediaTimeMapping(
&realTimeUs, &mediaTimeUs)) {
@ -2086,34 +2047,6 @@ status_t AwesomePlayer::finishSetDataSource_l() {
return UNKNOWN_ERROR;
}
}
} else if (!strncasecmp("rtsp://", mUri.string(), 7)) {
if (mLooper == NULL) {
mLooper = new ALooper;
mLooper->setName("rtsp");
mLooper->start();
}
mRTSPController = new ARTSPController(mLooper);
mConnectingRTSPController = mRTSPController;
if (mUIDValid) {
mConnectingRTSPController->setUID(mUID);
}
mLock.unlock();
status_t err = mRTSPController->connect(mUri.string());
mLock.lock();
mConnectingRTSPController.clear();
LOGI("ARTSPController::connect returned %d", err);
if (err != OK) {
mRTSPController.clear();
return err;
}
sp<MediaExtractor> extractor = mRTSPController.get();
return setDataSource_l(extractor);
} else {
dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
}
@ -2225,7 +2158,7 @@ void AwesomePlayer::onPrepareAsyncEvent() {
modifyFlags(PREPARING_CONNECTED, SET);
if (isStreamingHTTP() || mRTSPController != NULL) {
if (isStreamingHTTP()) {
postBufferingEvent_l();
} else {
finishAsyncPrepare_l();

View File

@ -1,97 +0,0 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef A_RTSP_CONTROLLER_H_
#define A_RTSP_CONTROLLER_H_
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AHandlerReflector.h>
#include <media/stagefright/MediaExtractor.h>
namespace android {
struct ALooper;
struct MyHandler;
struct ARTSPController : public MediaExtractor {
ARTSPController(const sp<ALooper> &looper);
void setUID(uid_t uid);
status_t connect(const char *url);
void disconnect();
void seekAsync(int64_t timeUs, void (*seekDoneCb)(void *), void *cookie);
virtual size_t countTracks();
virtual sp<MediaSource> getTrack(size_t index);
virtual sp<MetaData> getTrackMetaData(
size_t index, uint32_t flags);
int64_t getNormalPlayTimeUs();
int64_t getQueueDurationUs(bool *eos);
void onMessageReceived(const sp<AMessage> &msg);
virtual uint32_t flags() const {
// Seeking 10secs forward or backward is a very expensive operation
// for rtsp, so let's not enable that.
// The user can always use the seek bar.
return CAN_PAUSE | CAN_SEEK;
}
protected:
virtual ~ARTSPController();
private:
enum {
kWhatConnectDone = 'cdon',
kWhatDisconnectDone = 'ddon',
kWhatSeekDone = 'sdon',
};
enum State {
DISCONNECTED,
CONNECTED,
CONNECTING,
};
Mutex mLock;
Condition mCondition;
State mState;
status_t mConnectionResult;
sp<ALooper> mLooper;
sp<MyHandler> mHandler;
sp<AHandlerReflector<ARTSPController> > mReflector;
bool mUIDValid;
uid_t mUID;
void (*mSeekDoneCb)(void *);
void *mSeekDoneCookie;
int64_t mLastSeekCompletedTimeUs;
DISALLOW_EVIL_CONSTRUCTORS(ARTSPController);
};
} // namespace android
#endif // A_RTSP_CONTROLLER_H_

View File

@ -38,9 +38,6 @@ struct MediaSource;
struct NuCachedSource2;
struct ISurfaceTexture;
struct ALooper;
struct ARTSPController;
class DrmManagerClinet;
class DecryptHandle;
@ -233,10 +230,6 @@ private:
sp<HTTPBase> mConnectingDataSource;
sp<NuCachedSource2> mCachedSource;
sp<ALooper> mLooper;
sp<ARTSPController> mRTSPController;
sp<ARTSPController> mConnectingRTSPController;
DrmManagerClient *mDrmManagerClient;
sp<DecryptHandle> mDecryptHandle;
@ -287,9 +280,6 @@ private:
static bool ContinuePreparation(void *cookie);
static void OnRTSPSeekDoneWrapper(void *cookie);
void onRTSPSeekDone();
bool getBitrate(int64_t *bitrate);
void finishSeekIfNecessary(int64_t videoTimeUs);

View File

@ -34,8 +34,8 @@
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/foundation/base64.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
#include <utils/Vector.h>
@ -402,43 +402,15 @@ static sp<ABuffer> MakeMPEG4VideoCodecSpecificData(
return csd;
}
static bool GetClockRate(const AString &desc, uint32_t *clockRate) {
ssize_t slashPos = desc.find("/");
if (slashPos < 0) {
return false;
}
const char *s = desc.c_str() + slashPos + 1;
char *end;
unsigned long x = strtoul(s, &end, 10);
if (end == s || (*end != '\0' && *end != '/')) {
return false;
}
*clockRate = x;
return true;
}
APacketSource::APacketSource(
const sp<ASessionDescription> &sessionDesc, size_t index)
: mInitCheck(NO_INIT),
mFormat(new MetaData),
mEOSResult(OK),
mIsAVC(false),
mScanForIDR(true),
mRTPTimeBase(0),
mNormalPlayTimeBaseUs(0),
mLastNormalPlayTimeUs(0) {
mFormat(new MetaData) {
unsigned long PT;
AString desc;
AString params;
sessionDesc->getFormatType(index, &PT, &desc, &params);
CHECK(GetClockRate(desc, &mClockRate));
int64_t durationUs;
if (sessionDesc->getDurationUs(&durationUs)) {
mFormat->setInt64(kKeyDuration, durationUs);
@ -448,8 +420,6 @@ APacketSource::APacketSource(
mInitCheck = OK;
if (!strncmp(desc.c_str(), "H264/", 5)) {
mIsAVC = true;
mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
int32_t width, height;
@ -602,137 +572,8 @@ status_t APacketSource::initCheck() const {
return mInitCheck;
}
status_t APacketSource::start(MetaData *params) {
return OK;
}
status_t APacketSource::stop() {
return OK;
}
sp<MetaData> APacketSource::getFormat() {
return mFormat;
}
status_t APacketSource::read(
MediaBuffer **out, const ReadOptions *) {
*out = NULL;
Mutex::Autolock autoLock(mLock);
while (mEOSResult == OK && mBuffers.empty()) {
mCondition.wait(mLock);
}
if (!mBuffers.empty()) {
const sp<ABuffer> buffer = *mBuffers.begin();
updateNormalPlayTime_l(buffer);
int64_t timeUs;
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
*out = mediaBuffer;
mBuffers.erase(mBuffers.begin());
return OK;
}
return mEOSResult;
}
void APacketSource::updateNormalPlayTime_l(const sp<ABuffer> &buffer) {
uint32_t rtpTime;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
mLastNormalPlayTimeUs =
(((double)rtpTime - (double)mRTPTimeBase) / mClockRate)
* 1000000ll
+ mNormalPlayTimeBaseUs;
}
void APacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
int32_t damaged;
if (buffer->meta()->findInt32("damaged", &damaged) && damaged) {
LOGV("discarding damaged AU");
return;
}
if (mScanForIDR && mIsAVC) {
// This pretty piece of code ensures that the first access unit
// fed to the decoder after stream-start or seek is guaranteed to
// be an IDR frame. This is to workaround limitations of a certain
// hardware h.264 decoder that requires this to be the case.
if (!IsIDR(buffer)) {
LOGV("skipping AU while scanning for next IDR frame.");
return;
}
mScanForIDR = false;
}
Mutex::Autolock autoLock(mLock);
mBuffers.push_back(buffer);
mCondition.signal();
}
void APacketSource::signalEOS(status_t result) {
CHECK(result != OK);
Mutex::Autolock autoLock(mLock);
mEOSResult = result;
mCondition.signal();
}
void APacketSource::flushQueue() {
Mutex::Autolock autoLock(mLock);
mBuffers.clear();
mScanForIDR = true;
}
int64_t APacketSource::getNormalPlayTimeUs() {
Mutex::Autolock autoLock(mLock);
return mLastNormalPlayTimeUs;
}
void APacketSource::setNormalPlayTimeMapping(
uint32_t rtpTime, int64_t normalPlayTimeUs) {
Mutex::Autolock autoLock(mLock);
mRTPTimeBase = rtpTime;
mNormalPlayTimeBaseUs = normalPlayTimeUs;
}
int64_t APacketSource::getQueueDurationUs(bool *eos) {
Mutex::Autolock autoLock(mLock);
*eos = (mEOSResult != OK);
if (mBuffers.size() < 2) {
return 0;
}
const sp<ABuffer> first = *mBuffers.begin();
const sp<ABuffer> last = *--mBuffers.end();
int64_t firstTimeUs;
CHECK(first->meta()->findInt64("timeUs", &firstTimeUs));
int64_t lastTimeUs;
CHECK(last->meta()->findInt64("timeUs", &lastTimeUs));
if (lastTimeUs < firstTimeUs) {
LOGE("Huh? Time moving backwards? %lld > %lld",
firstTimeUs, lastTimeUs);
return 0;
}
return lastTimeUs - firstTimeUs;
}
} // namespace android

View File

@ -19,63 +19,27 @@
#define A_PACKET_SOURCE_H_
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/MediaSource.h>
#include <utils/threads.h>
#include <utils/List.h>
#include <media/stagefright/MetaData.h>
#include <utils/RefBase.h>
namespace android {
struct ABuffer;
struct ASessionDescription;
struct APacketSource : public MediaSource {
struct APacketSource : public RefBase {
APacketSource(const sp<ASessionDescription> &sessionDesc, size_t index);
status_t initCheck() const;
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
virtual sp<MetaData> getFormat();
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options = NULL);
void queueAccessUnit(const sp<ABuffer> &buffer);
void signalEOS(status_t result);
void flushQueue();
int64_t getNormalPlayTimeUs();
void setNormalPlayTimeMapping(
uint32_t rtpTime, int64_t normalPlayTimeUs);
int64_t getQueueDurationUs(bool *eos);
protected:
virtual ~APacketSource();
private:
status_t mInitCheck;
Mutex mLock;
Condition mCondition;
sp<MetaData> mFormat;
List<sp<ABuffer> > mBuffers;
status_t mEOSResult;
bool mIsAVC;
bool mScanForIDR;
uint32_t mClockRate;
uint32_t mRTPTimeBase;
int64_t mNormalPlayTimeBaseUs;
int64_t mLastNormalPlayTimeUs;
void updateNormalPlayTime_l(const sp<ABuffer> &buffer);
DISALLOW_EVIL_CONSTRUCTORS(APacketSource);
};

View File

@ -1,214 +0,0 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ARTSPController.h"
#include "MyHandler.h"
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
namespace android {
ARTSPController::ARTSPController(const sp<ALooper> &looper)
: mState(DISCONNECTED),
mLooper(looper),
mUIDValid(false),
mSeekDoneCb(NULL),
mSeekDoneCookie(NULL),
mLastSeekCompletedTimeUs(-1) {
mReflector = new AHandlerReflector<ARTSPController>(this);
looper->registerHandler(mReflector);
}
ARTSPController::~ARTSPController() {
CHECK_EQ((int)mState, (int)DISCONNECTED);
mLooper->unregisterHandler(mReflector->id());
}
void ARTSPController::setUID(uid_t uid) {
mUIDValid = true;
mUID = uid;
}
status_t ARTSPController::connect(const char *url) {
Mutex::Autolock autoLock(mLock);
if (mState != DISCONNECTED) {
return ERROR_ALREADY_CONNECTED;
}
sp<AMessage> msg = new AMessage(kWhatConnectDone, mReflector->id());
mHandler = new MyHandler(url, mLooper, mUIDValid, mUID);
mState = CONNECTING;
mHandler->connect(msg);
while (mState == CONNECTING) {
mCondition.wait(mLock);
}
if (mState != CONNECTED) {
mHandler.clear();
}
return mConnectionResult;
}
void ARTSPController::disconnect() {
Mutex::Autolock autoLock(mLock);
if (mState == CONNECTING) {
mState = DISCONNECTED;
mConnectionResult = ERROR_IO;
mCondition.broadcast();
mHandler.clear();
return;
} else if (mState != CONNECTED) {
return;
}
sp<AMessage> msg = new AMessage(kWhatDisconnectDone, mReflector->id());
mHandler->disconnect(msg);
while (mState == CONNECTED) {
mCondition.wait(mLock);
}
mHandler.clear();
}
void ARTSPController::seekAsync(
int64_t timeUs,
void (*seekDoneCb)(void *), void *cookie) {
Mutex::Autolock autoLock(mLock);
CHECK(seekDoneCb != NULL);
CHECK(mSeekDoneCb == NULL);
// Ignore seek requests that are too soon after the previous one has
// completed, we don't want to swamp the server.
bool tooEarly =
mLastSeekCompletedTimeUs >= 0
&& ALooper::GetNowUs() < mLastSeekCompletedTimeUs + 500000ll;
if (mState != CONNECTED || tooEarly) {
(*seekDoneCb)(cookie);
return;
}
mSeekDoneCb = seekDoneCb;
mSeekDoneCookie = cookie;
sp<AMessage> msg = new AMessage(kWhatSeekDone, mReflector->id());
mHandler->seek(timeUs, msg);
}
size_t ARTSPController::countTracks() {
if (mHandler == NULL) {
return 0;
}
return mHandler->countTracks();
}
sp<MediaSource> ARTSPController::getTrack(size_t index) {
CHECK(mHandler != NULL);
return mHandler->getPacketSource(index);
}
sp<MetaData> ARTSPController::getTrackMetaData(
size_t index, uint32_t flags) {
CHECK(mHandler != NULL);
return mHandler->getPacketSource(index)->getFormat();
}
void ARTSPController::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatConnectDone:
{
Mutex::Autolock autoLock(mLock);
CHECK(msg->findInt32("result", &mConnectionResult));
mState = (mConnectionResult == OK) ? CONNECTED : DISCONNECTED;
mCondition.signal();
break;
}
case kWhatDisconnectDone:
{
Mutex::Autolock autoLock(mLock);
mState = DISCONNECTED;
mCondition.signal();
break;
}
case kWhatSeekDone:
{
LOGI("seek done");
mLastSeekCompletedTimeUs = ALooper::GetNowUs();
void (*seekDoneCb)(void *) = mSeekDoneCb;
mSeekDoneCb = NULL;
(*seekDoneCb)(mSeekDoneCookie);
break;
}
default:
TRESPASS();
break;
}
}
int64_t ARTSPController::getNormalPlayTimeUs() {
CHECK(mHandler != NULL);
return mHandler->getNormalPlayTimeUs();
}
int64_t ARTSPController::getQueueDurationUs(bool *eos) {
*eos = true;
int64_t minQueuedDurationUs = 0;
for (size_t i = 0; i < mHandler->countTracks(); ++i) {
sp<APacketSource> source = mHandler->getPacketSource(i);
bool newEOS;
int64_t queuedDurationUs = source->getQueueDurationUs(&newEOS);
if (!newEOS) {
*eos = false;
}
if (i == 0 || queuedDurationUs < minQueuedDurationUs) {
minQueuedDurationUs = queuedDurationUs;
}
}
return minQueuedDurationUs;
}
} // namespace android

View File

@ -15,7 +15,6 @@ LOCAL_SRC_FILES:= \
ARTPSource.cpp \
ARTPWriter.cpp \
ARTSPConnection.cpp \
ARTSPController.cpp \
ASessionDescription.cpp \
LOCAL_C_INCLUDES:= \

View File

@ -94,12 +94,24 @@ static bool GetAttribute(const char *s, const char *key, AString *value) {
}
struct MyHandler : public AHandler {
enum {
kWhatConnected = 'conn',
kWhatDisconnected = 'disc',
kWhatSeekDone = 'sdon',
kWhatAccessUnit = 'accU',
kWhatEOS = 'eos!',
kWhatSeekDiscontinuity = 'seeD',
kWhatNormalPlayTimeMapping = 'nptM',
};
MyHandler(
const char *url, const sp<ALooper> &looper,
const char *url,
const sp<AMessage> &notify,
bool uidValid = false, uid_t uid = 0)
: mUIDValid(uidValid),
: mNotify(notify),
mUIDValid(uidValid),
mUID(uid),
mLooper(looper),
mNetLooper(new ALooper),
mConn(new ARTSPConnection(mUIDValid, mUID)),
mRTPConn(new ARTPConnection),
@ -145,12 +157,9 @@ struct MyHandler : public AHandler {
mSessionHost = host;
}
void connect(const sp<AMessage> &doneMsg) {
mDoneMsg = doneMsg;
mLooper->registerHandler(this);
mLooper->registerHandler(mConn);
(1 ? mNetLooper : mLooper)->registerHandler(mRTPConn);
void connect() {
looper()->registerHandler(mConn);
(1 ? mNetLooper : looper())->registerHandler(mRTPConn);
sp<AMessage> notify = new AMessage('biny', id());
mConn->observeBinaryData(notify);
@ -159,33 +168,16 @@ struct MyHandler : public AHandler {
mConn->connect(mOriginalSessionURL.c_str(), reply);
}
void disconnect(const sp<AMessage> &doneMsg) {
mDoneMsg = doneMsg;
void disconnect() {
(new AMessage('abor', id()))->post();
}
void seek(int64_t timeUs, const sp<AMessage> &doneMsg) {
void seek(int64_t timeUs) {
sp<AMessage> msg = new AMessage('seek', id());
msg->setInt64("time", timeUs);
msg->setMessage("doneMsg", doneMsg);
msg->post();
}
int64_t getNormalPlayTimeUs() {
int64_t maxTimeUs = 0;
for (size_t i = 0; i < mTracks.size(); ++i) {
int64_t timeUs = mTracks.editItemAt(i).mPacketSource
->getNormalPlayTimeUs();
if (i == 0 || timeUs > maxTimeUs) {
maxTimeUs = timeUs;
}
}
return maxTimeUs;
}
static void addRR(const sp<ABuffer> &buf) {
uint8_t *ptr = buf->data() + buf->size();
ptr[0] = 0x80 | 0;
@ -619,7 +611,9 @@ struct MyHandler : public AHandler {
for (size_t i = 0; i < mTracks.size(); ++i) {
TrackInfo *info = &mTracks.editItemAt(i);
info->mPacketSource->signalEOS(ERROR_END_OF_STREAM);
if (!mFirstAccessUnit) {
postQueueEOS(i, ERROR_END_OF_STREAM);
}
if (!info->mUsingInterleavedTCP) {
mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
@ -690,11 +684,10 @@ struct MyHandler : public AHandler {
case 'quit':
{
if (mDoneMsg != NULL) {
mDoneMsg->setInt32("result", UNKNOWN_ERROR);
mDoneMsg->post();
mDoneMsg = NULL;
}
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatDisconnected);
msg->setInt32("result", UNKNOWN_ERROR);
msg->post();
break;
}
@ -795,17 +788,12 @@ struct MyHandler : public AHandler {
case 'seek':
{
sp<AMessage> doneMsg;
CHECK(msg->findMessage("doneMsg", &doneMsg));
if (mSeekPending) {
doneMsg->post();
break;
}
if (!mSeekable) {
LOGW("This is a live stream, ignoring seek request.");
doneMsg->post();
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatSeekDone);
msg->post();
break;
}
@ -831,7 +819,6 @@ struct MyHandler : public AHandler {
sp<AMessage> reply = new AMessage('see1', id());
reply->setInt64("time", timeUs);
reply->setMessage("doneMsg", doneMsg);
mConn->sendRequest(request.c_str(), reply);
break;
}
@ -842,7 +829,8 @@ struct MyHandler : public AHandler {
for (size_t i = 0; i < mTracks.size(); ++i) {
TrackInfo *info = &mTracks.editItemAt(i);
info->mPacketSource->flushQueue();
postQueueSeekDiscontinuity(i);
info->mRTPAnchor = 0;
info->mNTPAnchorUs = -1;
}
@ -866,11 +854,7 @@ struct MyHandler : public AHandler {
request.append("\r\n");
sp<AMessage> doneMsg;
CHECK(msg->findMessage("doneMsg", &doneMsg));
sp<AMessage> reply = new AMessage('see2', id());
reply->setMessage("doneMsg", doneMsg);
mConn->sendRequest(request.c_str(), reply);
break;
}
@ -915,10 +899,9 @@ struct MyHandler : public AHandler {
mSeekPending = false;
sp<AMessage> doneMsg;
CHECK(msg->findMessage("doneMsg", &doneMsg));
doneMsg->post();
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatSeekDone);
msg->post();
break;
}
@ -1056,8 +1039,14 @@ struct MyHandler : public AHandler {
LOGV("track #%d: rtpTime=%u <=> npt=%.2f", n, rtpTime, npt1);
info->mPacketSource->setNormalPlayTimeMapping(
rtpTime, (int64_t)(npt1 * 1E6));
info->mNormalPlayTimeRTP = rtpTime;
info->mNormalPlayTimeUs = (int64_t)(npt1 * 1E6);
if (!mFirstAccessUnit) {
postNormalPlayTimeMapping(
trackIndex,
info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
}
++n;
}
@ -1065,11 +1054,15 @@ struct MyHandler : public AHandler {
mSeekable = true;
}
sp<APacketSource> getPacketSource(size_t index) {
sp<MetaData> getTrackFormat(size_t index, int32_t *timeScale) {
CHECK_GE(index, 0u);
CHECK_LT(index, mTracks.size());
return mTracks.editItemAt(index).mPacketSource;
const TrackInfo &info = mTracks.itemAt(index);
*timeScale = info.mTimeScale;
return info.mPacketSource->getFormat();
}
size_t countTracks() const {
@ -1089,6 +1082,9 @@ private:
int64_t mNTPAnchorUs;
int32_t mTimeScale;
uint32_t mNormalPlayTimeRTP;
int64_t mNormalPlayTimeUs;
sp<APacketSource> mPacketSource;
// Stores packets temporarily while no notion of time
@ -1096,9 +1092,9 @@ private:
List<sp<ABuffer> > mPackets;
};
sp<AMessage> mNotify;
bool mUIDValid;
uid_t mUID;
sp<ALooper> mLooper;
sp<ALooper> mNetLooper;
sp<ARTSPConnection> mConn;
sp<ARTPConnection> mRTPConn;
@ -1127,8 +1123,6 @@ private:
Vector<TrackInfo> mTracks;
sp<AMessage> mDoneMsg;
void setupTrack(size_t index) {
sp<APacketSource> source =
new APacketSource(mSessionDesc, index);
@ -1158,6 +1152,8 @@ private:
info->mNewSegment = true;
info->mRTPAnchor = 0;
info->mNTPAnchorUs = -1;
info->mNormalPlayTimeRTP = 0;
info->mNormalPlayTimeUs = 0ll;
unsigned long PT;
AString formatDesc;
@ -1283,9 +1279,17 @@ private:
LOGV("onAccessUnitComplete track %d", trackIndex);
if (mFirstAccessUnit) {
mDoneMsg->setInt32("result", OK);
mDoneMsg->post();
mDoneMsg = NULL;
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatConnected);
msg->post();
for (size_t i = 0; i < mTracks.size(); ++i) {
TrackInfo *info = &mTracks.editItemAt(i);
postNormalPlayTimeMapping(
i,
info->mNormalPlayTimeRTP, info->mNormalPlayTimeUs);
}
mFirstAccessUnit = false;
}
@ -1303,12 +1307,12 @@ private:
track->mPackets.erase(track->mPackets.begin());
if (addMediaTimestamp(trackIndex, track, accessUnit)) {
track->mPacketSource->queueAccessUnit(accessUnit);
postQueueAccessUnit(trackIndex, accessUnit);
}
}
if (addMediaTimestamp(trackIndex, track, accessUnit)) {
track->mPacketSource->queueAccessUnit(accessUnit);
postQueueAccessUnit(trackIndex, accessUnit);
}
}
@ -1344,6 +1348,39 @@ private:
return true;
}
void postQueueAccessUnit(
size_t trackIndex, const sp<ABuffer> &accessUnit) {
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatAccessUnit);
msg->setSize("trackIndex", trackIndex);
msg->setObject("accessUnit", accessUnit);
msg->post();
}
void postQueueEOS(size_t trackIndex, status_t finalResult) {
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatEOS);
msg->setSize("trackIndex", trackIndex);
msg->setInt32("finalResult", finalResult);
msg->post();
}
void postQueueSeekDiscontinuity(size_t trackIndex) {
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatSeekDiscontinuity);
msg->setSize("trackIndex", trackIndex);
msg->post();
}
void postNormalPlayTimeMapping(
size_t trackIndex, uint32_t rtpTime, int64_t nptUs) {
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatNormalPlayTimeMapping);
msg->setSize("trackIndex", trackIndex);
msg->setInt32("rtpTime", rtpTime);
msg->setInt64("nptUs", nptUs);
msg->post();
}
DISALLOW_EVIL_CONSTRUCTORS(MyHandler);
};