am d6a85a21: Merge "Support user-supplied timescales for authoring" into gingerbread

Merge commit 'd6a85a21418338a7797bff731fa7a551f62a9e86' into gingerbread-plus-aosp

* commit 'd6a85a21418338a7797bff731fa7a551f62a9e86':
  Support user-supplied timescales for authoring
This commit is contained in:
James Dong
2010-07-15 11:18:53 -07:00
committed by Android Git Automerger
6 changed files with 152 additions and 53 deletions

View File

@ -52,6 +52,7 @@ public:
void endBox();
uint32_t interleaveDuration() const { return mInterleaveDurationUs; }
status_t setInterleaveDuration(uint32_t duration);
int32_t getTimeScale() const { return mTimeScale; }
protected:
virtual ~MPEG4Writer();
@ -72,6 +73,7 @@ private:
bool mStreamableFile;
off_t mEstimatedMoovBoxSize;
uint32_t mInterleaveDurationUs;
int32_t mTimeScale;
int64_t mStartTimestampUs;
Mutex mLock;

View File

@ -68,6 +68,7 @@ enum {
kKeyDiscNumber = 'dnum', // cstring
kKeyDate = 'date', // cstring
kKeyWriter = 'writ', // cstring
kKeyTimeScale = 'tmsl', // int32_t
// video profile and level
kKeyVideoProfile = 'vprf', // int32_t

View File

@ -381,12 +381,12 @@ status_t StagefrightRecorder::setParamInterleaveDuration(int32_t durationUs) {
return OK;
}
// If interval < 0, only the first frame is I frame, and rest are all P frames
// If interval == 0, all frames are encoded as I frames. No P frames
// If interval > 0, it is the time spacing (seconds) between 2 neighboring I frames
status_t StagefrightRecorder::setParamVideoIFramesInterval(int32_t interval) {
LOGV("setParamVideoIFramesInterval: %d seconds", interval);
mIFramesInterval = interval;
// If seconds < 0, only the first frame is I frame, and rest are all P frames
// If seconds == 0, all frames are encoded as I frames. No P frames
// If seconds > 0, it is the time spacing (seconds) between 2 neighboring I frames
status_t StagefrightRecorder::setParamVideoIFramesInterval(int32_t seconds) {
LOGV("setParamVideoIFramesInterval: %d seconds", seconds);
mIFramesIntervalSec = seconds;
return OK;
}
@ -444,6 +444,44 @@ status_t StagefrightRecorder::setParamVideoEncoderLevel(int32_t level) {
return OK;
}
status_t StagefrightRecorder::setParamMovieTimeScale(int32_t timeScale) {
LOGV("setParamMovieTimeScale: %d", timeScale);
// The range is set to be the same as the audio's time scale range
// since audio's time scale has a wider range.
if (timeScale < 600 || timeScale > 96000) {
LOGE("Time scale (%d) for movie is out of range [600, 96000]", timeScale);
return BAD_VALUE;
}
mMovieTimeScale = timeScale;
return OK;
}
status_t StagefrightRecorder::setParamVideoTimeScale(int32_t timeScale) {
LOGV("setParamVideoTimeScale: %d", timeScale);
// 60000 is chosen to make sure that each video frame from a 60-fps
// video has 1000 ticks.
if (timeScale < 600 || timeScale > 60000) {
LOGE("Time scale (%d) for video is out of range [600, 60000]", timeScale);
return BAD_VALUE;
}
mVideoTimeScale = timeScale;
return OK;
}
status_t StagefrightRecorder::setParamAudioTimeScale(int32_t timeScale) {
LOGV("setParamAudioTimeScale: %d", timeScale);
// 96000 Hz is the highest sampling rate support in AAC.
if (timeScale < 600 || timeScale > 96000) {
LOGE("Time scale (%d) for audio is out of range [600, 96000]", timeScale);
return BAD_VALUE;
}
mAudioTimeScale = timeScale;
return OK;
}
status_t StagefrightRecorder::setParameter(
const String8 &key, const String8 &value) {
LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@ -462,6 +500,11 @@ status_t StagefrightRecorder::setParameter(
if (safe_strtoi32(value.string(), &durationUs)) {
return setParamInterleaveDuration(durationUs);
}
} else if (key == "param-movie-time-scale") {
int32_t timeScale;
if (safe_strtoi32(value.string(), &timeScale)) {
return setParamMovieTimeScale(timeScale);
}
} else if (key == "param-use-64bit-offset") {
int32_t use64BitOffset;
if (safe_strtoi32(value.string(), &use64BitOffset)) {
@ -492,15 +535,20 @@ status_t StagefrightRecorder::setParameter(
if (safe_strtoi32(value.string(), &audio_bitrate)) {
return setParamAudioEncodingBitRate(audio_bitrate);
}
} else if (key == "audio-param-time-scale") {
int32_t timeScale;
if (safe_strtoi32(value.string(), &timeScale)) {
return setParamAudioTimeScale(timeScale);
}
} else if (key == "video-param-encoding-bitrate") {
int32_t video_bitrate;
if (safe_strtoi32(value.string(), &video_bitrate)) {
return setParamVideoEncodingBitRate(video_bitrate);
}
} else if (key == "video-param-i-frames-interval") {
int32_t interval;
if (safe_strtoi32(value.string(), &interval)) {
return setParamVideoIFramesInterval(interval);
int32_t seconds;
if (safe_strtoi32(value.string(), &seconds)) {
return setParamVideoIFramesInterval(seconds);
}
} else if (key == "video-param-encoder-profile") {
int32_t profile;
@ -517,6 +565,11 @@ status_t StagefrightRecorder::setParameter(
if (safe_strtoi32(value.string(), &cameraId)) {
return setParamVideoCameraId(cameraId);
}
} else if (key == "video-param-time-scale") {
int32_t timeScale;
if (safe_strtoi32(value.string(), &timeScale)) {
return setParamVideoTimeScale(timeScale);
}
} else {
LOGE("setParameter: failed to find key %s", key.string());
}
@ -637,6 +690,7 @@ sp<MediaSource> StagefrightRecorder::createAudioSource() {
encMeta->setInt32(kKeyChannelCount, mAudioChannels);
encMeta->setInt32(kKeySampleRate, mSampleRate);
encMeta->setInt32(kKeyBitRate, mAudioBitRate);
encMeta->setInt32(kKeyTimeScale, mAudioTimeScale);
OMXClient client;
CHECK_EQ(client.connect(), OK);
@ -877,10 +931,11 @@ status_t StagefrightRecorder::setupVideoEncoder(const sp<MediaWriter>& writer) {
enc_meta->setInt32(kKeyWidth, width);
enc_meta->setInt32(kKeyHeight, height);
enc_meta->setInt32(kKeyIFramesInterval, mIFramesInterval);
enc_meta->setInt32(kKeyIFramesInterval, mIFramesIntervalSec);
enc_meta->setInt32(kKeyStride, stride);
enc_meta->setInt32(kKeySliceHeight, sliceHeight);
enc_meta->setInt32(kKeyColorFormat, colorFormat);
enc_meta->setInt32(kKeyTimeScale, mVideoTimeScale);
if (mVideoEncoderProfile != -1) {
enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
}
@ -918,6 +973,7 @@ status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
if (audioEncoder == NULL) {
return UNKNOWN_ERROR;
}
writer->addSource(audioEncoder);
return OK;
}
@ -954,6 +1010,7 @@ status_t StagefrightRecorder::startMPEG4Recording() {
meta->setInt32(kKeyFileType, mOutputFormat);
meta->setInt32(kKeyBitRate, totalBitRate);
meta->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
meta->setInt32(kKeyTimeScale, mMovieTimeScale);
if (mTrackEveryNumberOfFrames > 0) {
meta->setInt32(kKeyTrackFrameStatus, mTrackEveryNumberOfFrames);
}
@ -1024,9 +1081,12 @@ status_t StagefrightRecorder::reset() {
mAudioChannels = 1;
mAudioBitRate = 12200;
mInterleaveDurationUs = 0;
mIFramesInterval = 1;
mIFramesIntervalSec = 1;
mAudioSourceNode = 0;
mUse64BitFileOffset = false;
mMovieTimeScale = 1000;
mAudioTimeScale = 1000;
mVideoTimeScale = 1000;
mCameraId = 0;
mVideoEncoderProfile = -1;
mVideoEncoderLevel = -1;
@ -1108,7 +1168,7 @@ status_t StagefrightRecorder::dump(int fd, const Vector<String16>& args) const {
result.append(buffer);
snprintf(buffer, SIZE, " Encoder level: %d\n", mVideoEncoderLevel);
result.append(buffer);
snprintf(buffer, SIZE, " I frames interval (s): %d\n", mIFramesInterval);
snprintf(buffer, SIZE, " I frames interval (s): %d\n", mIFramesIntervalSec);
result.append(buffer);
snprintf(buffer, SIZE, " Frame size (pixels): %dx%d\n", mVideoWidth, mVideoHeight);
result.append(buffer);

View File

@ -81,10 +81,13 @@ private:
int32_t mAudioChannels;
int32_t mSampleRate;
int32_t mInterleaveDurationUs;
int32_t mIFramesInterval;
int32_t mIFramesIntervalSec;
int32_t mCameraId;
int32_t mVideoEncoderProfile;
int32_t mVideoEncoderLevel;
int32_t mMovieTimeScale;
int32_t mVideoTimeScale;
int32_t mAudioTimeScale;
int64_t mMaxFileSizeBytes;
int64_t mMaxFileDurationUs;
int32_t mTrackEveryNumberOfFrames;
@ -109,17 +112,20 @@ private:
status_t setParamAudioEncodingBitRate(int32_t bitRate);
status_t setParamAudioNumberOfChannels(int32_t channles);
status_t setParamAudioSamplingRate(int32_t sampleRate);
status_t setParamAudioTimeScale(int32_t timeScale);
status_t setParamVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoIFramesInterval(int32_t interval);
status_t setParamVideoIFramesInterval(int32_t seconds);
status_t setParamVideoEncoderProfile(int32_t profile);
status_t setParamVideoEncoderLevel(int32_t level);
status_t setParamVideoCameraId(int32_t cameraId);
status_t setParamVideoTimeScale(int32_t timeScale);
status_t setParamTrackTimeStatus(int64_t timeDurationUs);
status_t setParamTrackFrameStatus(int32_t nFrames);
status_t setParamInterleaveDuration(int32_t durationUs);
status_t setParam64BitFileOffset(bool use64BitFileOffset);
status_t setParamMaxFileDurationUs(int64_t timeUs);
status_t setParamMaxFileSizeBytes(int64_t bytes);
status_t setParamMovieTimeScale(int32_t timeScale);
void clipVideoBitRate();
void clipVideoFrameRate();
void clipVideoFrameWidth();

View File

@ -41,6 +41,7 @@ namespace android {
class MPEG4Writer::Track {
public:
Track(MPEG4Writer *owner, const sp<MediaSource> &source);
~Track();
status_t start(MetaData *params);
@ -61,12 +62,13 @@ private:
volatile bool mResumed;
int64_t mMaxTimeStampUs;
int64_t mEstimatedTrackSizeBytes;
int32_t mTimeScale;
pthread_t mThread;
struct SampleInfo {
size_t size;
int64_t timestamp;
int64_t timestampUs;
};
List<SampleInfo> mSampleInfos;
bool mSamplesHaveSameSize;
@ -92,11 +94,11 @@ private:
struct SttsTableEntry {
SttsTableEntry(uint32_t count, uint32_t duration)
: sampleCount(count), sampleDuration(duration) {}
SttsTableEntry(uint32_t count, uint32_t durationUs)
: sampleCount(count), sampleDurationUs(durationUs) {}
uint32_t sampleCount;
uint32_t sampleDuration;
uint32_t sampleDurationUs;
};
List<SttsTableEntry> mSttsTableEntries;
@ -270,6 +272,13 @@ status_t MPEG4Writer::start(MetaData *param) {
return OK;
}
if (!param ||
!param->findInt32(kKeyTimeScale, &mTimeScale)) {
mTimeScale = 1000;
}
CHECK(mTimeScale > 0);
LOGV("movie time scale: %d", mTimeScale);
mStreamableFile = true;
mWriteMoovBoxToMemory = false;
mMoovBoxBuffer = NULL;
@ -336,14 +345,14 @@ void MPEG4Writer::stop() {
return;
}
int64_t max_duration = 0;
int64_t maxDurationUs = 0;
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
(*it)->stop();
int64_t duration = (*it)->getDurationUs();
if (duration > max_duration) {
max_duration = duration;
int64_t durationUs = (*it)->getDurationUs();
if (durationUs > maxDurationUs) {
maxDurationUs = durationUs;
}
}
@ -367,8 +376,7 @@ void MPEG4Writer::stop() {
mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
mMoovBoxBufferOffset = 0;
CHECK(mMoovBoxBuffer != NULL);
int32_t timeScale = 1000;
int32_t duration = max_duration / timeScale;
int32_t duration = (maxDurationUs * mTimeScale) / 1E6;
beginBox("moov");
@ -376,7 +384,7 @@ void MPEG4Writer::stop() {
writeInt32(0); // version=0, flags=0
writeInt32(now); // creation time
writeInt32(now); // modification time
writeInt32(timeScale); // timescale
writeInt32(mTimeScale); // mvhd timescale
writeInt32(duration);
writeInt32(0x10000); // rate: 1.0
writeInt16(0x100); // volume
@ -655,7 +663,6 @@ void MPEG4Writer::setStartTimestampUs(int64_t timeUs) {
}
int64_t MPEG4Writer::getStartTimestampUs() {
LOGI("getStartTimestampUs: %lld", mStartTimestampUs);
Mutex::Autolock autoLock(mLock);
return mStartTimestampUs;
}
@ -683,6 +690,11 @@ MPEG4Writer::Track::Track(
mGotAllCodecSpecificData(false),
mReachedEOS(false) {
getCodecSpecificDataFromInputFormatIfPossible();
if (!mMeta->findInt32(kKeyTimeScale, &mTimeScale)) {
mTimeScale = 1000;
}
CHECK(mTimeScale > 0);
}
void MPEG4Writer::Track::getCodecSpecificDataFromInputFormatIfPossible() {
@ -927,9 +939,9 @@ void MPEG4Writer::Track::threadEntry() {
int64_t chunkTimestampUs = 0;
int32_t nChunks = 0;
int32_t nZeroLengthFrames = 0;
int64_t lastTimestamp = 0; // Timestamp of the previous sample
int64_t lastDuration = 0; // Time spacing between the previous two samples
int32_t sampleCount = 1; // Sample count in the current stts table entry
int64_t lastTimestampUs = 0; // Previous sample time stamp in ms
int64_t lastDurationUs = 0; // Between the previous two samples in ms
int32_t sampleCount = 1; // Sample count in the current stts table entry
uint32_t previousSampleSize = 0; // Size of the previous sample
int64_t previousPausedDurationUs = 0;
sp<MetaData> meta_data;
@ -1113,7 +1125,7 @@ void MPEG4Writer::Track::threadEntry() {
}
if (mResumed) {
previousPausedDurationUs += (timestampUs - mMaxTimeStampUs - 1000 * lastDuration);
previousPausedDurationUs += (timestampUs - mMaxTimeStampUs - lastDurationUs);
mResumed = false;
}
@ -1124,12 +1136,11 @@ void MPEG4Writer::Track::threadEntry() {
mMaxTimeStampUs = timestampUs;
}
// Our timestamp is in ms.
info.timestamp = (timestampUs + 500) / 1000;
info.timestampUs = timestampUs;
mSampleInfos.push_back(info);
if (mSampleInfos.size() > 2) {
if (lastDuration != info.timestamp - lastTimestamp) {
SttsTableEntry sttsEntry(sampleCount, lastDuration);
if (lastDurationUs != info.timestampUs - lastTimestampUs) {
SttsTableEntry sttsEntry(sampleCount, lastDurationUs);
mSttsTableEntries.push_back(sttsEntry);
sampleCount = 1;
} else {
@ -1142,8 +1153,8 @@ void MPEG4Writer::Track::threadEntry() {
}
previousSampleSize = info.size;
}
lastDuration = info.timestamp - lastTimestamp;
lastTimestamp = info.timestamp;
lastDurationUs = info.timestampUs - lastTimestampUs;
lastTimestampUs = info.timestampUs;
if (isSync != 0) {
mStssTableEntries.push_back(mSampleInfos.size());
@ -1213,11 +1224,11 @@ void MPEG4Writer::Track::threadEntry() {
// there is no frame time after it, just repeat the previous
// frame's duration.
if (mSampleInfos.size() == 1) {
lastDuration = 0; // A single sample's duration
lastDurationUs = 0; // A single sample's duration
} else {
++sampleCount; // Count for the last sample
}
SttsTableEntry sttsEntry(sampleCount, lastDuration);
SttsTableEntry sttsEntry(sampleCount, lastDurationUs);
mSttsTableEntries.push_back(sttsEntry);
mReachedEOS = true;
LOGI("Received total/0-length (%d/%d) buffers and encoded %d frames - %s",
@ -1249,12 +1260,13 @@ void MPEG4Writer::Track::trackProgressStatus(int32_t nFrames, int64_t timeUs) {
void MPEG4Writer::Track::findMinAvgMaxSampleDurationMs(
int32_t *min, int32_t *avg, int32_t *max) {
CHECK(!mSampleInfos.empty());
int32_t avgSampleDurationMs = mMaxTimeStampUs / 1000/ mSampleInfos.size();
int32_t avgSampleDurationMs = mMaxTimeStampUs / 1000 / mSampleInfos.size();
int32_t minSampleDurationMs = 0x7FFFFFFF;
int32_t maxSampleDurationMs = 0;
for (List<SttsTableEntry>::iterator it = mSttsTableEntries.begin();
it != mSttsTableEntries.end(); ++it) {
int32_t sampleDurationMs = static_cast<int32_t>(it->sampleDuration);
int32_t sampleDurationMs =
(static_cast<int32_t>(it->sampleDurationUs) + 500) / 1000;
if (sampleDurationMs > maxSampleDurationMs) {
maxSampleDurationMs = sampleDurationMs;
} else if (sampleDurationMs < minSampleDurationMs) {
@ -1370,10 +1382,13 @@ void MPEG4Writer::Track::writeTrackHeader(
CHECK(success);
bool is_audio = !strncasecmp(mime, "audio/", 6);
int32_t timeScale = 1000;
int32_t duration = getDurationUs() / timeScale;
LOGV("%s track time scale: %d",
is_audio? "Audio": "Video", mTimeScale);
time_t now = time(NULL);
int32_t mvhdTimeScale = mOwner->getTimeScale();
int64_t trakDurationUs = getDurationUs();
mOwner->beginBox("trak");
@ -1385,7 +1400,9 @@ void MPEG4Writer::Track::writeTrackHeader(
mOwner->writeInt32(now); // modification time
mOwner->writeInt32(trackID);
mOwner->writeInt32(0); // reserved
mOwner->writeInt32(duration);
int32_t tkhdDuration =
(trakDurationUs * mvhdTimeScale + 5E5) / 1E6;
mOwner->writeInt32(tkhdDuration); // in mvhd timescale
mOwner->writeInt32(0); // reserved
mOwner->writeInt32(0); // reserved
mOwner->writeInt16(0); // layer
@ -1423,12 +1440,17 @@ void MPEG4Writer::Track::writeTrackHeader(
mOwner->beginBox("elst");
mOwner->writeInt32(0); // version=0, flags=0: 32-bit time
mOwner->writeInt32(2); // never ends with an empty list
int64_t durationMs =
(mStartTimestampUs - moovStartTimeUs) / 1000;
mOwner->writeInt32(durationMs); // edit duration
mOwner->writeInt32(-1); // empty edit box to signal starting time offset
mOwner->writeInt32(1 << 16); // x1 rate
mOwner->writeInt32(duration);
// First elst entry: specify the starting time offset
int64_t offsetUs = mStartTimestampUs - moovStartTimeUs;
int32_t seg = (offsetUs * mvhdTimeScale + 5E5) / 1E6;
mOwner->writeInt32(seg); // in mvhd timecale
mOwner->writeInt32(-1); // starting time offset
mOwner->writeInt32(1 << 16); // rate = 1.0
// Second elst entry: specify the track duration
seg = (trakDurationUs * mvhdTimeScale + 5E5) / 1E6;
mOwner->writeInt32(seg); // in mvhd timescale
mOwner->writeInt32(0);
mOwner->writeInt32(1 << 16);
mOwner->endBox();
@ -1441,8 +1463,9 @@ void MPEG4Writer::Track::writeTrackHeader(
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt32(now); // creation time
mOwner->writeInt32(now); // modification time
mOwner->writeInt32(timeScale); // timescale
mOwner->writeInt32(duration); // duration
mOwner->writeInt32(mTimeScale); // media timescale
int32_t mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6;
mOwner->writeInt32(mdhdDuration); // use media timescale
// Language follows the three letter standard ISO-639-2/T
// 'e', 'n', 'g' for "English", for instance.
// Each character is packed as the difference between its ASCII value and 0x60.
@ -1664,7 +1687,8 @@ void MPEG4Writer::Track::writeTrackHeader(
for (List<SttsTableEntry>::iterator it = mSttsTableEntries.begin();
it != mSttsTableEntries.end(); ++it) {
mOwner->writeInt32(it->sampleCount);
mOwner->writeInt32(it->sampleDuration);
int32_t dur = (it->sampleDurationUs * mTimeScale + 5E5) / 1E6;
mOwner->writeInt32(dur);
}
mOwner->endBox(); // stts
@ -1717,7 +1741,7 @@ void MPEG4Writer::Track::writeTrackHeader(
mOwner->writeInt64((*it));
}
}
mOwner->endBox(); // co64
mOwner->endBox(); // stco or co64
mOwner->endBox(); // stbl
mOwner->endBox(); // minf

View File

@ -3212,6 +3212,12 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
mOutputFormat = new MetaData;
mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
if (mIsEncoder) {
int32_t timeScale;
if (inputFormat->findInt32(kKeyTimeScale, &timeScale)) {
mOutputFormat->setInt32(kKeyTimeScale, timeScale);
}
}
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);