Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 69cd79b7 authored by Yixiao Luo's avatar Yixiao Luo Committed by Automerger Merge Worker
Browse files

Merge "Use LinearBlock buffer for thumbnail extraction" into main am: 5d729cd7 am: 0d2d31d0

parents e5006d7f 0d2d31d0
Loading
Loading
Loading
Loading
+7 −0
Original line number Original line Diff line number Diff line
@@ -133,3 +133,10 @@ flag {
  description: "Feature flag to track teamfood population"
  description: "Feature flag to track teamfood population"
  bug: "328770262"
  bug: "328770262"
}
}

flag {
  name: "thumbnail_block_model"
  namespace: "codec_fwk"
  description: "Feature flag for using block model decoder in thumbnail generation"
  bug: "329521645"
}
+314 −35
Original line number Original line Diff line number Diff line
@@ -18,19 +18,14 @@
#define LOG_TAG "FrameDecoder"
#define LOG_TAG "FrameDecoder"
#define ATRACE_TAG  ATRACE_TAG_VIDEO
#define ATRACE_TAG  ATRACE_TAG_VIDEO
#include "include/FrameDecoder.h"
#include "include/FrameDecoder.h"
#include "include/FrameCaptureLayer.h"
#include <android_media_codec.h>
#include "include/HevcUtils.h"
#include <binder/MemoryBase.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <binder/MemoryHeapBase.h>
#include <gui/Surface.h>
#include <gui/Surface.h>
#include <inttypes.h>
#include <inttypes.h>
#include <mediadrm/ICrypto.h>
#include <media/IMediaSource.h>
#include <media/IMediaSource.h>
#include <media/MediaCodecBuffer.h>
#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/avc_utils.h>
#include <media/stagefright/CodecBase.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ColorUtils.h>
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/FrameCaptureProcessor.h>
#include <media/stagefright/FrameCaptureProcessor.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBuffer.h>
@@ -39,13 +34,24 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
#include <media/stagefright/Utils.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ColorUtils.h>
#include <media/stagefright/foundation/avc_utils.h>
#include <mediadrm/ICrypto.h>
#include <private/media/VideoFrame.h>
#include <private/media/VideoFrame.h>
#include <utils/Log.h>
#include <utils/Log.h>
#include <utils/Trace.h>
#include <utils/Trace.h>
#include "include/FrameCaptureLayer.h"
#include "include/HevcUtils.h"

#include <C2Buffer.h>
#include <Codec2BufferUtils.h>


namespace android {
namespace android {


static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
static const int64_t kAsyncBufferTimeOutUs = 2000000LL; // 2000 msec
static const size_t kRetryCount = 100; // must be >0
static const size_t kRetryCount = 100; // must be >0
static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
// For codec, 0 is the highest importance; higher the number lesser important.
// For codec, 0 is the highest importance; higher the number lesser important.
@@ -232,6 +238,104 @@ bool getDstColorFormat(
    return false;
    return false;
}
}


AsyncCodecHandler::AsyncCodecHandler(const wp<FrameDecoder>& frameDecoder) {
    mFrameDecoder = frameDecoder;
}

void AsyncCodecHandler::onMessageReceived(const sp<AMessage>& msg) {
    switch (msg->what()) {
        case FrameDecoder::kWhatCallbackNotify:
            int32_t callbackId;
            if (!msg->findInt32("callbackID", &callbackId)) {
                ALOGE("kWhatCallbackNotify: callbackID is expected.");
                break;
            }
            switch (callbackId) {
                case MediaCodec::CB_INPUT_AVAILABLE: {
                    int32_t index;
                    if (!msg->findInt32("index", &index)) {
                        ALOGE("CB_INPUT_AVAILABLE: index is expected.");
                        break;
                    }
                    ALOGD("CB_INPUT_AVAILABLE received, index is %d", index);
                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
                    if (frameDecoder != nullptr) {
                        frameDecoder->handleInputBufferAsync(index);
                    }
                    break;
                }
                case MediaCodec::CB_OUTPUT_AVAILABLE: {
                    int32_t index;
                    int64_t timeUs;
                    CHECK(msg->findInt32("index", &index));
                    CHECK(msg->findInt64("timeUs", &timeUs));
                    ALOGD("CB_OUTPUT_AVAILABLE received, index is %d", index);
                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
                    if (frameDecoder != nullptr) {
                        frameDecoder->handleOutputBufferAsync(index, timeUs);
                    }
                    break;
                }
                case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
                    ALOGD("CB_OUTPUT_FORMAT_CHANGED received");
                    sp<AMessage> format;
                    if (!msg->findMessage("format", &format) || format == nullptr) {
                        ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
                        break;
                    }
                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
                    if (frameDecoder != nullptr) {
                        frameDecoder->handleOutputFormatChangeAsync(format);
                    }
                    break;
                }
                case MediaCodec::CB_ERROR: {
                    status_t err;
                    int32_t actionCode;
                    AString detail;
                    if (!msg->findInt32("err", &err)) {
                        ALOGE("CB_ERROR: err is expected.");
                        break;
                    }
                    if (!msg->findInt32("actionCode", &actionCode)) {
                        ALOGE("CB_ERROR: actionCode is expected.");
                        break;
                    }
                    msg->findString("detail", &detail);
                    ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)", err,
                          StrMediaError(err).c_str(), actionCode, detail.c_str());
                    break;
                }
                default:
                    ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", callbackId);
                    break;
            }
            break;
        default:
            ALOGE("unexpected message received: %s", msg->debugString().c_str());
            break;
    }
}

void InputBufferIndexQueue::enqueue(int32_t index) {
    std::scoped_lock<std::mutex> lock(mMutex);
    mQueue.push(index);
    mCondition.notify_one();
}

bool InputBufferIndexQueue::dequeue(int32_t* index, int32_t timeOutUs) {
    std::unique_lock<std::mutex> lock(mMutex);
    bool hasAvailableIndex = mCondition.wait_for(lock, std::chrono::microseconds(timeOutUs),
                                                 [this] { return !mQueue.empty(); });
    if (hasAvailableIndex) {
        *index = mQueue.front();
        mQueue.pop();
        return true;
    } else {
        return false;
    }
}

//static
//static
sp<IMemory> FrameDecoder::getMetadataOnly(
sp<IMemory> FrameDecoder::getMetadataOnly(
        const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail, uint32_t bitDepth) {
        const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail, uint32_t bitDepth) {
@@ -281,6 +385,7 @@ FrameDecoder::FrameDecoder(
        const sp<MetaData> &trackMeta,
        const sp<MetaData> &trackMeta,
        const sp<IMediaSource> &source)
        const sp<IMediaSource> &source)
    : mComponentName(componentName),
    : mComponentName(componentName),
      mUseBlockModel(false),
      mTrackMeta(trackMeta),
      mTrackMeta(trackMeta),
      mSource(source),
      mSource(source),
      mDstFormat(OMX_COLOR_Format16bitRGB565),
      mDstFormat(OMX_COLOR_Format16bitRGB565),
@@ -290,6 +395,10 @@ FrameDecoder::FrameDecoder(
}
}


FrameDecoder::~FrameDecoder() {
FrameDecoder::~FrameDecoder() {
    if (mHandler != NULL) {
        mAsyncLooper->stop();
        mAsyncLooper->unregisterHandler(mHandler->id());
    }
    if (mDecoder != NULL) {
    if (mDecoder != NULL) {
        mDecoder->release();
        mDecoder->release();
        mSource->stop();
        mSource->stop();
@@ -333,8 +442,18 @@ status_t FrameDecoder::init(
        return (decoder.get() == NULL) ? NO_MEMORY : err;
        return (decoder.get() == NULL) ? NO_MEMORY : err;
    }
    }


    if (mUseBlockModel) {
        mAsyncLooper = new ALooper;
        mAsyncLooper->start();
        mHandler = new AsyncCodecHandler(wp<FrameDecoder>(this));
        mAsyncLooper->registerHandler(mHandler);
        sp<AMessage> callbackMsg = new AMessage(kWhatCallbackNotify, mHandler);
        decoder->setCallback(callbackMsg);
    }

    err = decoder->configure(
    err = decoder->configure(
            videoFormat, mSurface, NULL /* crypto */, 0 /* flags */);
            videoFormat, mSurface, NULL /* crypto */,
            mUseBlockModel ? MediaCodec::CONFIGURE_FLAG_USE_BLOCK_MODEL : 0 /* flags */);
    if (err != OK) {
    if (err != OK) {
        ALOGW("configure returned error %d (%s)", err, asString(err));
        ALOGW("configure returned error %d (%s)", err, asString(err));
        decoder->release();
        decoder->release();
@@ -362,10 +481,18 @@ status_t FrameDecoder::init(
sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
    ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ExtractFrame");
    ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ExtractFrame");
    status_t err = onExtractRect(rect);
    status_t err = onExtractRect(rect);
    if (err == OK) {
    if (err != OK) {
        ALOGE("onExtractRect error %d", err);
        return NULL;
    }

    if (!mUseBlockModel) {
        err = extractInternal();
        err = extractInternal();
    } else {
        err = extractInternalUsingBlockModel();
    }
    }
    if (err != OK) {
    if (err != OK) {
        ALOGE("extractInternal error %d", err);
        return NULL;
        return NULL;
    }
    }


@@ -380,6 +507,7 @@ status_t FrameDecoder::extractInternal() {
        ALOGE("decoder is not initialized");
        ALOGE("decoder is not initialized");
        return NO_INIT;
        return NO_INIT;
    }
    }

    do {
    do {
        size_t index;
        size_t index;
        int64_t ptsUs = 0LL;
        int64_t ptsUs = 0LL;
@@ -433,7 +561,8 @@ status_t FrameDecoder::extractInternal() {
                        (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
                        (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
                        mediaBuffer->range_length());
                        mediaBuffer->range_length());


                onInputReceived(codecBuffer, mediaBuffer->meta_data(), mFirstSample, &flags);
                onInputReceived(codecBuffer->data(), codecBuffer->size(), mediaBuffer->meta_data(),
                                mFirstSample, &flags);
                mFirstSample = false;
                mFirstSample = false;
            }
            }


@@ -487,11 +616,14 @@ status_t FrameDecoder::extractInternal() {
                        ALOGE("failed to get output buffer %zu", index);
                        ALOGE("failed to get output buffer %zu", index);
                        break;
                        break;
                    }
                    }
                    uint8_t* frameData = videoFrameBuffer->data();
                    sp<ABuffer> imageData;
                    videoFrameBuffer->meta()->findBuffer("image-data", &imageData);
                    if (mSurface != nullptr) {
                    if (mSurface != nullptr) {
                        mDecoder->renderOutputBufferAndRelease(index);
                        mDecoder->renderOutputBufferAndRelease(index);
                        err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
                        err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
                    } else {
                    } else {
                        err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
                        err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
                        mDecoder->releaseOutputBuffer(index);
                        mDecoder->releaseOutputBuffer(index);
                    }
                    }
                } else {
                } else {
@@ -510,6 +642,73 @@ status_t FrameDecoder::extractInternal() {
    return err;
    return err;
}
}


status_t FrameDecoder::extractInternalUsingBlockModel() {
    status_t err = OK;
    MediaBufferBase* mediaBuffer = NULL;
    int64_t ptsUs = 0LL;
    uint32_t flags = 0;
    int32_t index;
    mHandleOutputBufferAsyncDone = false;

    err = mSource->read(&mediaBuffer, &mReadOptions);
    mReadOptions.clearSeekTo();
    if (err != OK) {
        ALOGW("Input Error: err=%d", err);
        mediaBuffer->release();
        return err;
    }

    size_t inputSize = mediaBuffer->range_length();
    std::shared_ptr<C2LinearBlock> block =
            MediaCodec::FetchLinearBlock(inputSize, {std::string{mComponentName.c_str()}});
    C2WriteView view{block->map().get()};
    if (view.error() != C2_OK) {
        ALOGE("Fatal error: failed to allocate and map a block");
        mediaBuffer->release();
        return NO_MEMORY;
    }
    if (inputSize > view.capacity()) {
        ALOGE("Fatal error: allocated block is too small "
              "(input size %zu; block cap %u)",
              inputSize, view.capacity());
        mediaBuffer->release();
        return BAD_VALUE;
    }
    CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
    memcpy(view.base(), (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
           inputSize);
    std::shared_ptr<C2Buffer> c2Buffer =
            C2Buffer::CreateLinearBuffer(block->share(0, inputSize, C2Fence{}));
    onInputReceived(view.base(), inputSize, mediaBuffer->meta_data(), true /* firstSample */,
                    &flags);
    flags |= MediaCodec::BUFFER_FLAG_EOS;
    mediaBuffer->release();

    std::vector<AccessUnitInfo> infoVec;
    infoVec.emplace_back(flags, inputSize, ptsUs);
    sp<BufferInfosWrapper> infos = new BufferInfosWrapper{std::move(infoVec)};

    if (!mInputBufferIndexQueue.dequeue(&index, kAsyncBufferTimeOutUs)) {
        ALOGE("No available input buffer index for async mode.");
        return TIMED_OUT;
    }

    AString errorDetailMsg;
    ALOGD("QueueLinearBlock: index=%d size=%zu ts=%" PRId64 " us flags=%x",
            index, inputSize, ptsUs,flags);
    err = mDecoder->queueBuffer(index, c2Buffer, infos, nullptr, &errorDetailMsg);
    if (err != OK) {
        ALOGE("failed to queueBuffer (err %d): %s", err, errorDetailMsg.c_str());
        return err;
    }

    // wait for handleOutputBufferAsync() to finish
    std::unique_lock _lk(mMutex);
    mOutputFramePending.wait_for(_lk, std::chrono::microseconds(kAsyncBufferTimeOutUs),
                                 [this] { return mHandleOutputBufferAsyncDone; });
    return mHandleOutputBufferAsyncDone ? OK : TIMED_OUT;
}

//////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////


VideoFrameDecoder::VideoFrameDecoder(
VideoFrameDecoder::VideoFrameDecoder(
@@ -525,6 +724,81 @@ VideoFrameDecoder::VideoFrameDecoder(
      mDefaultSampleDurationUs(0) {
      mDefaultSampleDurationUs(0) {
}
}


status_t FrameDecoder::handleOutputFormatChangeAsync(sp<AMessage> format) {
    // Here format is MediaCodec's internal copy of output format.
    // Make a copy since the client might modify it.
    mOutputFormat = format->dup();
    ALOGD("receive output format in async mode: %s", mOutputFormat->debugString().c_str());
    return OK;
}

status_t FrameDecoder::handleInputBufferAsync(int32_t index) {
    mInputBufferIndexQueue.enqueue(index);
    return OK;
}

status_t FrameDecoder::handleOutputBufferAsync(int32_t index, int64_t timeUs) {
    if (mHandleOutputBufferAsyncDone) {
        // we have already processed an output buffer, skip others
        return OK;
    }

    status_t err = OK;
    sp<MediaCodecBuffer> videoFrameBuffer;
    err = mDecoder->getOutputBuffer(index, &videoFrameBuffer);
    if (err != OK || videoFrameBuffer == nullptr) {
        ALOGE("failed to get output buffer %d", index);
        return err;
    }

    bool onOutputReceivedDone = false;
    if (mSurface != nullptr) {
        mDecoder->renderOutputBufferAndRelease(index);
        // frameData and imgObj will be fetched by captureSurface() inside onOutputReceived()
        // explicitly pass null here
        err = onOutputReceived(nullptr, nullptr, mOutputFormat, timeUs, &onOutputReceivedDone);
    } else {
        // get stride and frame data for block model buffer
        std::shared_ptr<C2Buffer> c2buffer = videoFrameBuffer->asC2Buffer();
        if (!c2buffer
                || c2buffer->data().type() != C2BufferData::GRAPHIC
                || c2buffer->data().graphicBlocks().size() == 0u) {
            ALOGE("C2Buffer precond fail");
            return ERROR_MALFORMED;
        }

        std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
            c2buffer->data().graphicBlocks()[0].map().get()));
        GraphicView2MediaImageConverter converter(*view, mOutputFormat, false /* copy */);
        if (converter.initCheck() != OK) {
            ALOGE("Converter init failed: %d", converter.initCheck());
            return NO_INIT;
        }

        uint8_t* frameData = converter.wrap()->data();
        sp<ABuffer> imageData = converter.imageData();
        if (imageData != nullptr) {
            mOutputFormat->setBuffer("image-data", imageData);
            MediaImage2 *img = (MediaImage2*) imageData->data();
            if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
                int32_t stride = img->mPlane[0].mRowInc;
                mOutputFormat->setInt32(KEY_STRIDE, stride);
                ALOGD("updating stride = %d", stride);
            }
        }

        err = onOutputReceived(frameData, imageData, mOutputFormat, timeUs, &onOutputReceivedDone);
        mDecoder->releaseOutputBuffer(index);
    }

    if (err == OK && onOutputReceivedDone) {
        std::lock_guard _lm(mMutex);
        mHandleOutputBufferAsyncDone = true;
        mOutputFramePending.notify_one();
    }
    return err;
}

sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
        int64_t frameTimeUs, int seekMode,
        int64_t frameTimeUs, int seekMode,
        MediaSource::ReadOptions *options,
        MediaSource::ReadOptions *options,
@@ -575,9 +849,14 @@ sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
    if (!isSeekingClosest) {
    if (!isSeekingClosest) {
        if (mComponentName.startsWithIgnoreCase("c2.")) {
            mUseBlockModel = android::media::codec::provider_->thumbnail_block_model();
        } else {
            // OMX Codec
            videoFormat->setInt32("android._num-input-buffers", 1);
            videoFormat->setInt32("android._num-input-buffers", 1);
            videoFormat->setInt32("android._num-output-buffers", 1);
            videoFormat->setInt32("android._num-output-buffers", 1);
        }
        }
    }


    if (isHDR(videoFormat)) {
    if (isHDR(videoFormat)) {
        *window = initSurface();
        *window = initSurface();
@@ -601,9 +880,8 @@ sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
    return videoFormat;
    return videoFormat;
}
}


status_t VideoFrameDecoder::onInputReceived(
status_t VideoFrameDecoder::onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
        const sp<MediaCodecBuffer> &codecBuffer,
                                            bool firstSample, uint32_t* flags) {
        MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
    bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
            || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);


@@ -612,10 +890,7 @@ status_t VideoFrameDecoder::onInputReceived(
        ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
        ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
    }
    }


    if (!isSeekingClosest
    if (!isSeekingClosest && ((mIsAvc && IsIDR(data, size)) || (mIsHevc && IsIDR(data, size)))) {
            && ((mIsAvc && IsIDR(codecBuffer->data(), codecBuffer->size()))
            || (mIsHevc && IsIDR(
            codecBuffer->data(), codecBuffer->size())))) {
        // Only need to decode one IDR frame, unless we're seeking with CLOSEST
        // Only need to decode one IDR frame, unless we're seeking with CLOSEST
        // option, in which case we need to actually decode to targetTimeUs.
        // option, in which case we need to actually decode to targetTimeUs.
        *flags |= MediaCodec::BUFFER_FLAG_EOS;
        *flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -630,7 +905,8 @@ status_t VideoFrameDecoder::onInputReceived(
}
}


status_t VideoFrameDecoder::onOutputReceived(
status_t VideoFrameDecoder::onOutputReceived(
        const sp<MediaCodecBuffer> &videoFrameBuffer,
        uint8_t* frameData,
        sp<ABuffer> imgObj,
        const sp<AMessage> &outputFormat,
        const sp<AMessage> &outputFormat,
        int64_t timeUs, bool *done) {
        int64_t timeUs, bool *done) {
    int64_t durationUs = mDefaultSampleDurationUs;
    int64_t durationUs = mDefaultSampleDurationUs;
@@ -703,7 +979,6 @@ status_t VideoFrameDecoder::onOutputReceived(
        }
        }


        mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());
        mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());

        setFrame(frameMem);
        setFrame(frameMem);
    }
    }


@@ -712,7 +987,7 @@ status_t VideoFrameDecoder::onOutputReceived(
    if (mCaptureLayer != nullptr) {
    if (mCaptureLayer != nullptr) {
        return captureSurface();
        return captureSurface();
    }
    }
    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
    ColorConverter colorConverter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());


    uint32_t standard, range, transfer;
    uint32_t standard, range, transfer;
    if (!outputFormat->findInt32("color-standard", (int32_t*)&standard)) {
    if (!outputFormat->findInt32("color-standard", (int32_t*)&standard)) {
@@ -724,22 +999,25 @@ status_t VideoFrameDecoder::onOutputReceived(
    if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
    if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
        transfer = 0;
        transfer = 0;
    }
    }
    sp<ABuffer> imgObj;

    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
    if (imgObj != nullptr) {
        MediaImage2 *imageData = nullptr;
        MediaImage2 *imageData = nullptr;
        imageData = (MediaImage2 *)(imgObj.get()->data());
        imageData = (MediaImage2 *)(imgObj.get()->data());
        if (imageData != nullptr) {
        if (imageData != nullptr) {
            converter.setSrcMediaImage2(*imageData);
            colorConverter.setSrcMediaImage2(*imageData);
        }
        }
    }
    }
    if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
    if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
        return ERROR_UNSUPPORTED;
        return ERROR_UNSUPPORTED;
    }
    }
    converter.setSrcColorSpace(standard, range, transfer);
    colorConverter.setSrcColorSpace(standard, range, transfer);
    if (converter.isValid()) {
    if (colorConverter.isValid()) {
        ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ColorConverter");
        ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ColorConverter");
        converter.convert(
        if (frameData == nullptr) {
                (const uint8_t *)videoFrameBuffer->data(),
            ALOGD("frameData is null for ColorConverter");
        }
        colorConverter.convert(
                (const uint8_t *)frameData,
                width, height, stride,
                width, height, stride,
                crop_left, crop_top, crop_right, crop_bottom,
                crop_left, crop_top, crop_right, crop_bottom,
                mFrame->getFlattenedData(),
                mFrame->getFlattenedData(),
@@ -955,7 +1233,8 @@ status_t MediaImageDecoder::onExtractRect(FrameRect *rect) {
}
}


status_t MediaImageDecoder::onOutputReceived(
status_t MediaImageDecoder::onOutputReceived(
        const sp<MediaCodecBuffer> &videoFrameBuffer,
        uint8_t* frameData,
        sp<ABuffer> imgObj,
        const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
        const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
    if (outputFormat == NULL) {
    if (outputFormat == NULL) {
        return ERROR_MALFORMED;
        return ERROR_MALFORMED;
@@ -1008,8 +1287,8 @@ status_t MediaImageDecoder::onOutputReceived(
    if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
    if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
        transfer = 0;
        transfer = 0;
    }
    }
    sp<ABuffer> imgObj;

    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
    if (imgObj != nullptr) {
        MediaImage2 *imageData = nullptr;
        MediaImage2 *imageData = nullptr;
        imageData = (MediaImage2 *)(imgObj.get()->data());
        imageData = (MediaImage2 *)(imgObj.get()->data());
        if (imageData != nullptr) {
        if (imageData != nullptr) {
@@ -1058,7 +1337,7 @@ status_t MediaImageDecoder::onOutputReceived(


    if (converter.isValid()) {
    if (converter.isValid()) {
        converter.convert(
        converter.convert(
                (const uint8_t *)videoFrameBuffer->data(),
                (const uint8_t *)frameData,
                width, height, stride,
                width, height, stride,
                crop_left, crop_top, crop_right, crop_bottom,
                crop_left, crop_top, crop_right, crop_bottom,
                mFrame->getFlattenedData(),
                mFrame->getFlattenedData(),