Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit bcc30e06 authored by Wonsik Kim's avatar Wonsik Kim Committed by Android (Google) Code Review
Browse files

Merge changes Iafb91645,I56a0ec7b into nyc-dr1-dev

* changes:
  mediarecorder: fix timestamp adjustment for early pause/resume
  Revert "media: Mediarecorder drops the mediabuffer with negative adjusted timestamp."
parents 3f41b466 a0c11a3e
Loading
Loading
Loading
Loading
+5 −0
Original line number Diff line number Diff line
@@ -50,6 +50,7 @@ struct MediaCodecSource : public MediaSource,
    bool isVideo() const { return mIsVideo; }
    sp<IGraphicBufferProducer> getGraphicBufferProducer();
    void setInputBufferTimeOffset(int64_t timeOffsetUs);
    int64_t getFirstSampleSystemTimeUs();

    // MediaSource
    virtual status_t start(MetaData *params = NULL);
@@ -79,6 +80,7 @@ private:
        kWhatStop,
        kWhatPause,
        kWhatSetInputBufferTimeOffset,
        kWhatGetFirstSampleSystemTimeUs,
        kWhatStopStalled,
    };

@@ -90,6 +92,7 @@ private:
            uint32_t flags = 0);

    status_t onStart(MetaData *params);
    void onPause();
    status_t init();
    status_t initEncoder();
    void releaseEncoder();
@@ -123,6 +126,8 @@ private:
    List<size_t> mAvailEncoderInputIndices;
    List<int64_t> mDecodingTimeQueue; // decoding time (us) for video
    int64_t mInputBufferTimeOffsetUs;
    int64_t mFirstSampleSystemTimeUs;
    bool mPausePending;

    // audio drift time
    int64_t mFirstSampleTimeUs;
+28 −9
Original line number Diff line number Diff line
@@ -1807,19 +1807,38 @@ status_t StagefrightRecorder::resume() {
        return OK;
    }

    int64_t bufferStartTimeUs = 0;
    bool allSourcesStarted = true;
    for (const auto &source : { mAudioEncoderSource, mVideoEncoderSource }) {
        if (source == nullptr) {
            continue;
        }
        int64_t timeUs = source->getFirstSampleSystemTimeUs();
        if (timeUs < 0) {
            allSourcesStarted = false;
        }
        if (bufferStartTimeUs < timeUs) {
            bufferStartTimeUs = timeUs;
        }
    }

    if (allSourcesStarted) {
        if (mPauseStartTimeUs < bufferStartTimeUs) {
            mPauseStartTimeUs = bufferStartTimeUs;
        }
        // 30 ms buffer to avoid timestamp overlap
        mTotalPausedDurationUs += (systemTime() / 1000) - mPauseStartTimeUs - 30000;
    }
    double timeOffset = -mTotalPausedDurationUs;
    if (mCaptureFpsEnable) {
        timeOffset *= mCaptureFps / mFrameRate;
    }
    if (mAudioEncoderSource != NULL) {
        mAudioEncoderSource->setInputBufferTimeOffset((int64_t)timeOffset);
        mAudioEncoderSource->start();
    for (const auto &source : { mAudioEncoderSource, mVideoEncoderSource }) {
        if (source == nullptr) {
            continue;
        }
    if (mVideoEncoderSource != NULL) {
        mVideoEncoderSource->setInputBufferTimeOffset((int64_t)timeOffset);
        mVideoEncoderSource->start();
        source->setInputBufferTimeOffset((int64_t)timeOffset);
        source->start();
    }
    mPauseStartTimeUs = 0;

+64 −26
Original line number Diff line number Diff line
@@ -342,6 +342,17 @@ void MediaCodecSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
    postSynchronouslyAndReturnError(msg);
}

int64_t MediaCodecSource::getFirstSampleSystemTimeUs() {
    sp<AMessage> msg = new AMessage(kWhatGetFirstSampleSystemTimeUs, mReflector);
    sp<AMessage> response;
    msg->postAndAwaitResponse(&response);
    int64_t timeUs;
    if (!response->findInt64("time-us", &timeUs)) {
        timeUs = -1ll;
    }
    return timeUs;
}

status_t MediaCodecSource::start(MetaData* params) {
    sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
    msg->setObject("meta", params);
@@ -408,6 +419,8 @@ MediaCodecSource::MediaCodecSource(
      mEncoderDataSpace(0),
      mGraphicBufferConsumer(consumer),
      mInputBufferTimeOffsetUs(0),
      mFirstSampleSystemTimeUs(-1ll),
      mPausePending(false),
      mFirstSampleTimeUs(-1ll),
      mGeneration(0) {
    CHECK(mLooper != NULL);
@@ -646,16 +659,18 @@ status_t MediaCodecSource::feedEncoderInputBuffers() {

        if (mbuf != NULL) {
            CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
            timeUs += mInputBufferTimeOffsetUs;

            // Due to the extra delay adjustment at the beginning of start/resume,
            // the adjusted timeUs may be negative if MediaCodecSource goes into pause
            // state before feeding any buffers to the encoder. Drop the buffer in this
            // case.
            if (timeUs < 0) {
            if (mFirstSampleSystemTimeUs < 0ll) {
                mFirstSampleSystemTimeUs = systemTime() / 1000;
                if (mPausePending) {
                    mPausePending = false;
                    onPause();
                    mbuf->release();
                    mAvailEncoderInputIndices.push_back(bufferIndex);
                    return OK;
                }
            }

            timeUs += mInputBufferTimeOffsetUs;

            // push decoding time for video, or drift time for audio
            if (mIsVideo) {
@@ -665,7 +680,6 @@ status_t MediaCodecSource::feedEncoderInputBuffers() {
                if (mFirstSampleTimeUs < 0ll) {
                    mFirstSampleTimeUs = timeUs;
                }

                int64_t driftTimeUs = 0;
                if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
                        && driftTimeUs) {
@@ -717,6 +731,10 @@ status_t MediaCodecSource::onStart(MetaData *params) {

    if (mStarted) {
        ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
        if (mPausePending) {
            mPausePending = false;
            return OK;
        }
        if (mIsVideo) {
            mEncoder->requestIDRFrame();
        }
@@ -763,6 +781,15 @@ status_t MediaCodecSource::onStart(MetaData *params) {
    return OK;
}

void MediaCodecSource::onPause() {
    if (mFlags & FLAG_USE_SURFACE_INPUT) {
        suspend();
    } else {
        CHECK(mPuller != NULL);
        mPuller->pause();
    }
}

void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
    case kWhatPullerNotify:
@@ -832,25 +859,27 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
            }

            MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
            mbuf->add_ref();

            if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
                if (mIsVideo) {
                    int64_t decodingTimeUs;
                    if (mFlags & FLAG_USE_SURFACE_INPUT) {
                        if (mFirstSampleSystemTimeUs < 0ll) {
                            mFirstSampleSystemTimeUs = systemTime() / 1000;
                            if (mPausePending) {
                                mPausePending = false;
                                onPause();
                                mbuf->release();
                                break;
                            }
                        }
                        // Time offset is not applied at
                        // feedEncoderInputBuffer() in surface input case.
                        timeUs += mInputBufferTimeOffsetUs;

                        // Due to the extra delay adjustment at the beginning of
                        // start/resume, the adjusted timeUs may be negative if
                        // MediaCodecSource goes into pause state before feeding
                        // any buffers to the encoder. Drop the buffer in this case.
                        if (timeUs < 0) {
                            mEncoder->releaseOutputBuffer(index);
                            break;
                        }

                        // GraphicBufferSource is supposed to discard samples
                        // queued before start, and offset timeUs by start time
                        CHECK_GE(timeUs, 0ll);
                        // TODO:
                        // Decoding time for surface source is unavailable,
                        // use presentation time for now. May need to move
@@ -883,8 +912,8 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
            if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
                mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
            }
            memcpy(mbuf->data(), outbuf->data(), outbuf->size());
            mbuf->setObserver(this);
            mbuf->add_ref();

            {
                Mutexed<Output>::Locked output(mOutput);
@@ -975,11 +1004,10 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {

    case kWhatPause:
    {
        if (mFlags & FLAG_USE_SURFACE_INPUT) {
            suspend();
        if (mFirstSampleSystemTimeUs < 0) {
            mPausePending = true;
        } else {
            CHECK(mPuller != NULL);
            mPuller->pause();
            onPause();
        }
        break;
    }
@@ -994,6 +1022,16 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
        response->postReply(replyID);
        break;
    }
    case kWhatGetFirstSampleSystemTimeUs:
    {
        sp<AReplyToken> replyID;
        CHECK(msg->senderAwaitsResponse(&replyID));

        sp<AMessage> response = new AMessage;
        response->setInt64("time-us", mFirstSampleSystemTimeUs);
        response->postReply(replyID);
        break;
    }
    default:
        TRESPASS();
    }