Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit d0c3b930 authored by Wei Jia's avatar Wei Jia Committed by Android (Google) Code Review
Browse files

Merge "NuPlayer: handle audio output format change in a clean way."

parents ede2b25b 9a3101b2
Loading
Loading
Loading
Loading
+16 −9
Original line number Diff line number Diff line
@@ -203,6 +203,18 @@ void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
            break;
        }

        case kWhatAudioOutputFormatChanged:
        {
            if (!isStaleReply(msg)) {
                status_t err;
                if (msg->findInt32("err", &err) && err != OK) {
                    ALOGE("Renderer reported 0x%x when changing audio output format", err);
                    handleError(err);
                }
            }
            break;
        }

        case kWhatSetVideoSurface:
        {
            sp<AReplyToken> replyID;
@@ -699,15 +711,10 @@ void NuPlayer::Decoder::handleOutputFormatChange(const sp<AMessage> &format) {
            flags = AUDIO_OUTPUT_FLAG_NONE;
        }

        // TODO: This is a temporary fix to flush audio buffers in renderer. The real
        // fix should be to wait for all buffers rendered normally, then open a new
        // AudioSink.
        mRenderer->flush(true /* audio */, false /* notifyComplete */);
        status_t err = mRenderer->openAudioSink(
                format, false /* offloadOnly */, hasVideo, flags, NULL /* isOffloaed */);
        if (err != OK) {
            handleError(err);
        }
        sp<AMessage> reply = new AMessage(kWhatAudioOutputFormatChanged, this);
        reply->setInt32("generation", mBufferGeneration);
        mRenderer->changeAudioFormat(
                format, false /* offloadOnly */, hasVideo, flags, reply);
    }
}

+2 −1
Original line number Diff line number Diff line
@@ -56,7 +56,8 @@ private:
    enum {
        kWhatCodecNotify         = 'cdcN',
        kWhatRenderBuffer        = 'rndr',
        kWhatSetVideoSurface     = 'sSur'
        kWhatSetVideoSurface     = 'sSur',
        kWhatAudioOutputFormatChanged = 'aofc'
    };

    enum {
+107 −18
Original line number Diff line number Diff line
@@ -144,9 +144,10 @@ NuPlayer::Renderer::~Renderer() {

    // Try to avoid racing condition in case callback is still on.
    Mutex::Autolock autoLock(mLock);
    mUseAudioCallback = false;
    if (mUseAudioCallback) {
        flushQueue(&mAudioQueue);
        flushQueue(&mVideoQueue);
    }
    mWakeLock.clear();
    mMediaClock.clear();
    mVideoScheduler.clear();
@@ -297,7 +298,7 @@ void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
            ++mVideoDrainGeneration;
        }

        clearAnchorTime_l();
        mMediaClock->clearAnchor();
        mVideoLateByUs = 0;
        mSyncQueues = false;
    }
@@ -374,7 +375,8 @@ void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
    }
}

void NuPlayer::Renderer::clearAnchorTime_l() {
// Called on renderer looper.
void NuPlayer::Renderer::clearAnchorTime() {
    mMediaClock->clearAnchor();
    mAnchorTimeMediaUs = -1;
    mAnchorNumFramesWritten = -1;
@@ -423,6 +425,25 @@ void NuPlayer::Renderer::closeAudioSink() {
    msg->postAndAwaitResponse(&response);
}

void NuPlayer::Renderer::changeAudioFormat(
        const sp<AMessage> &format,
        bool offloadOnly,
        bool hasVideo,
        uint32_t flags,
        const sp<AMessage> &notify) {
    sp<AMessage> meta = new AMessage;
    meta->setMessage("format", format);
    meta->setInt32("offload-only", offloadOnly);
    meta->setInt32("has-video", hasVideo);
    meta->setInt32("flags", flags);

    sp<AMessage> msg = new AMessage(kWhatChangeAudioFormat, this);
    msg->setInt32("queueGeneration", getQueueGeneration(true /* audio */));
    msg->setMessage("notify", notify);
    msg->setMessage("meta", meta);
    msg->post();
}

void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {
        case kWhatOpenAudioSink:
@@ -470,6 +491,41 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
            break;
        }

        case kWhatChangeAudioFormat:
        {
            int32_t queueGeneration;
            CHECK(msg->findInt32("queueGeneration", &queueGeneration));

            sp<AMessage> notify;
            CHECK(msg->findMessage("notify", &notify));

            if (offloadingAudio()) {
                ALOGW("changeAudioFormat should NOT be called in offload mode");
                notify->setInt32("err", INVALID_OPERATION);
                notify->post();
                break;
            }

            sp<AMessage> meta;
            CHECK(msg->findMessage("meta", &meta));

            if (queueGeneration != getQueueGeneration(true /* audio */)
                    || mAudioQueue.empty()) {
                onChangeAudioFormat(meta, notify);
                break;
            }

            QueueEntry entry;
            entry.mNotifyConsumed = notify;
            entry.mMeta = meta;

            Mutex::Autolock autoLock(mLock);
            mAudioQueue.push_back(entry);
            postDrainAudioQueue_l();

            break;
        }

        case kWhatDrainAudioQueue:
        {
            mDrainAudioQueuePending = false;
@@ -869,7 +925,7 @@ void NuPlayer::Renderer::drainAudioQueueUntilLastEOS() {
    while (it != mAudioQueue.end()) {
        int32_t eos;
        QueueEntry *entry = &*it++;
        if (entry->mBuffer == NULL
        if ((entry->mBuffer == nullptr && entry->mNotifyConsumed == nullptr)
                || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) {
            itEOS = it;
            foundEOS = true;
@@ -879,9 +935,14 @@ void NuPlayer::Renderer::drainAudioQueueUntilLastEOS() {
    if (foundEOS) {
        // post all replies before EOS and drop the samples
        for (it = mAudioQueue.begin(); it != itEOS; it++) {
            if (it->mBuffer == NULL) {
            if (it->mBuffer == nullptr) {
                if (it->mNotifyConsumed == nullptr) {
                    // delay doesn't matter as we don't even have an AudioTrack
                    notifyEOS(true /* audio */, it->mFinalResult);
                } else {
                    // TAG for re-opening audio sink.
                    onChangeAudioFormat(it->mMeta, it->mNotifyConsumed);
                }
            } else {
                it->mNotifyConsumed->post();
            }
@@ -933,9 +994,14 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
    while (!mAudioQueue.empty()) {
        QueueEntry *entry = &*mAudioQueue.begin();

        mLastAudioBufferDrained = entry->mBufferOrdinal;

        if (entry->mBuffer == NULL) {
            if (entry->mNotifyConsumed != nullptr) {
                // TAG for re-open audio sink.
                onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
                mAudioQueue.erase(mAudioQueue.begin());
                continue;
            }

            // EOS
            int64_t postEOSDelayUs = 0;
            if (mAudioSink->needsTrailingPadding()) {
@@ -956,6 +1022,8 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
            return false;
        }

        mLastAudioBufferDrained = entry->mBufferOrdinal;

        // ignore 0-sized buffer which could be EOS marker with no data
        if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
            int64_t mediaTimeUs;
@@ -1291,8 +1359,7 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
        if (!mVideoSampleReceived && !mHasAudio) {
            // This will ensure that the first frame after a flush won't be used as anchor
            // when renderer is in paused state, because resume can happen any time after seek.
            Mutex::Autolock autoLock(mLock);
            clearAnchorTime_l();
            clearAnchorTime();
        }
    }

@@ -1502,8 +1569,8 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
        // Therefore we'll stop syncing the queues if at least one of them
        // is flushed.
        syncQueuesDone_l();
        clearAnchorTime_l();
    }
    clearAnchorTime();

    ALOGV("flushing %s", audio ? "audio" : "video");
    if (audio) {
@@ -1574,6 +1641,9 @@ void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {

        if (entry->mBuffer != NULL) {
            entry->mNotifyConsumed->post();
        } else if (entry->mNotifyConsumed != nullptr) {
            // Is it needed to open audio sink now?
            onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
        }

        queue->erase(queue->begin());
@@ -1611,10 +1681,7 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
    }
    CHECK(!mDrainAudioQueuePending);
    mNumFramesWritten = 0;
    {
        Mutex::Autolock autoLock(mLock);
    mAnchorNumFramesWritten = -1;
    }
    uint32_t written;
    if (mAudioSink->getFramesWritten(&written) == OK) {
        mNumFramesWritten = written;
@@ -1962,5 +2029,27 @@ void NuPlayer::Renderer::onCloseAudioSink() {
    mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
}

void NuPlayer::Renderer::onChangeAudioFormat(
        const sp<AMessage> &meta, const sp<AMessage> &notify) {
    sp<AMessage> format;
    CHECK(meta->findMessage("format", &format));

    int32_t offloadOnly;
    CHECK(meta->findInt32("offload-only", &offloadOnly));

    int32_t hasVideo;
    CHECK(meta->findInt32("has-video", &hasVideo));

    uint32_t flags;
    CHECK(meta->findInt32("flags", (int32_t *)&flags));

    status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags);

    if (err != OK) {
        notify->setInt32("err", err);
    }
    notify->post();
}

}  // namespace android
+15 −3
Original line number Diff line number Diff line
@@ -60,8 +60,6 @@ struct NuPlayer::Renderer : public AHandler {

    void signalTimeDiscontinuity();

    void signalAudioSinkChanged();

    void signalDisableOffloadAudio();
    void signalEnableOffloadAudio();

@@ -81,6 +79,14 @@ struct NuPlayer::Renderer : public AHandler {
            bool *isOffloaded);
    void closeAudioSink();

    // re-open audio sink after all pending audio buffers played.
    void changeAudioFormat(
            const sp<AMessage> &format,
            bool offloadOnly,
            bool hasVideo,
            uint32_t flags,
            const sp<AMessage> &notify);

    enum {
        kWhatEOS                      = 'eos ',
        kWhatFlushComplete            = 'fluC',
@@ -118,14 +124,19 @@ private:
        kWhatResume              = 'resm',
        kWhatOpenAudioSink       = 'opnA',
        kWhatCloseAudioSink      = 'clsA',
        kWhatChangeAudioFormat   = 'chgA',
        kWhatStopAudioSink       = 'stpA',
        kWhatDisableOffloadAudio = 'noOA',
        kWhatEnableOffloadAudio  = 'enOA',
        kWhatSetVideoFrameRate   = 'sVFR',
    };

    // if mBuffer != nullptr, it's a buffer containing real data.
    // else if mNotifyConsumed == nullptr, it's EOS.
    // else it's a tag for re-opening audio sink in different format.
    struct QueueEntry {
        sp<MediaCodecBuffer> mBuffer;
        sp<AMessage> mMeta;
        sp<AMessage> mNotifyConsumed;
        size_t mOffset;
        status_t mFinalResult;
@@ -220,7 +231,7 @@ private:
    int64_t getPendingAudioPlayoutDurationUs(int64_t nowUs);
    void postDrainAudioQueue_l(int64_t delayUs = 0);

    void clearAnchorTime_l();
    void clearAnchorTime();
    void clearAudioFirstAnchorTime_l();
    void setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs);
    void setVideoLateByUs(int64_t lateUs);
@@ -258,6 +269,7 @@ private:
            bool hasVideo,
            uint32_t flags);
    void onCloseAudioSink();
    void onChangeAudioFormat(const sp<AMessage> &meta, const sp<AMessage> &notify);

    void notifyEOS(bool audio, status_t finalResult, int64_t delayUs = 0);
    void notifyFlushComplete(bool audio);