Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 2edda09a authored by Lajos Molnar's avatar Lajos Molnar
Browse files

stagefright: fix surface input handling of software encoders

- added SoftVideoEncoder for common color conversion and
  extension handling logic
- fix YUV420 SemiPlanar handling that should be NV12 not NV21

Bug: 17935149
Change-Id: I9b8d05678b1862dd37bf349ea83d67bdf1bb5560
parent 512e9792
Loading
Loading
Loading
Loading
+19 −97
Original line number Diff line number Diff line
@@ -111,36 +111,6 @@ static status_t ConvertAvcSpecLevelToOmxAvcLevel(
    return BAD_VALUE;
}

inline static void ConvertYUV420SemiPlanarToYUV420Planar(
        uint8_t *inyuv, uint8_t* outyuv,
        int32_t width, int32_t height) {

    int32_t outYsize = width * height;
    uint32_t *outy =  (uint32_t *) outyuv;
    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));

    /* Y copying */
    memcpy(outy, inyuv, outYsize);

    /* U & V copying */
    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
    for (int32_t i = height >> 1; i > 0; --i) {
        for (int32_t j = width >> 2; j > 0; --j) {
            uint32_t temp = *inyuv_4++;
            uint32_t tempU = temp & 0xFF;
            tempU = tempU | ((temp >> 8) & 0xFF00);

            uint32_t tempV = (temp >> 8) & 0xFF;
            tempV = tempV | ((temp >> 16) & 0xFF00);

            // Flip U and V
            *outcb++ = tempV;
            *outcr++ = tempU;
        }
    }
}

static void* MallocWrapper(
        void * /* userData */, int32_t size, int32_t /* attrs */) {
    void *ptr = malloc(size);
@@ -178,7 +148,7 @@ SoftAVCEncoder::SoftAVCEncoder(
            const OMX_CALLBACKTYPE *callbacks,
            OMX_PTR appData,
            OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
    : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
      mVideoWidth(176),
      mVideoHeight(144),
      mVideoFrameRate(30),
@@ -260,9 +230,10 @@ OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {

    mEncParams->use_overrun_buffer = AVC_OFF;

    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
            || mStoreMetaDataInBuffers) {
        // Color conversion is needed.
        CHECK(mInputFrameData == NULL);
        free(mInputFrameData);
        mInputFrameData =
            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
        CHECK(mInputFrameData != NULL);
@@ -348,10 +319,10 @@ OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
    PVAVCCleanUpEncoder(mHandle);
    releaseOutputBuffers();

    delete mInputFrameData;
    free(mInputFrameData);
    mInputFrameData = NULL;

    delete mSliceGroup;
    free(mSliceGroup);
    mSliceGroup = NULL;

    delete mEncParams;
@@ -713,11 +684,7 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
                    mStoreMetaDataInBuffers ? " true" : "false");

            if (mStoreMetaDataInBuffers) {
                mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
                if (mInputFrameData == NULL) {
                    mInputFrameData =
                            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
                }
                mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
            }

            return OMX_ErrorNone;
@@ -801,8 +768,6 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
            }
        }

        buffer_handle_t srcBuffer = NULL; // for MetaDataMode only

        // Get next input video frame
        if (mReadyForNextFrame) {
            // Save the input buffer info so that it can be
@@ -823,7 +788,7 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
                videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
                videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
                videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
                uint8_t *inputData = NULL;
                const uint8_t *inputData = NULL;
                if (mStoreMetaDataInBuffers) {
                    if (inHeader->nFilledLen != 8) {
                        ALOGE("MetaData buffer is wrong size! "
@@ -833,8 +798,10 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
                        return;
                    }
                    inputData =
                            extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
                                    &srcBuffer);
                        extractGraphicBuffer(
                                mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
                                inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
                                mVideoWidth, mVideoHeight);
                    if (inputData == NULL) {
                        ALOGE("Unable to extract gralloc buffer in metadata mode");
                        mSignalledError = true;
@@ -843,16 +810,16 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
                    }
                    // TODO: Verify/convert pixel format enum
                } else {
                    inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
                }

                    inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
                    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
                        ConvertYUV420SemiPlanarToYUV420Planar(
                            inputData, mInputFrameData, mVideoWidth, mVideoHeight);
                        inputData = mInputFrameData;
                    }
                }

                CHECK(inputData != NULL);
                videoInput.YCbCr[0] = inputData;
                videoInput.YCbCr[0] = (uint8_t *)inputData;
                videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
                videoInput.YCbCr[2] = videoInput.YCbCr[1] +
                    ((videoInput.height * videoInput.pitch) >> 2);
@@ -869,14 +836,12 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
                    if (encoderStatus < AVCENC_SUCCESS) {
                        ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
                        mSignalledError = true;
                        releaseGrallocData(srcBuffer);
                        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
                        return;
                    } else {
                        ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
                        inQueue.erase(inQueue.begin());
                        inInfo->mOwnedByUs = false;
                        releaseGrallocData(srcBuffer);
                        notifyEmptyBufferDone(inHeader);
                        return;
                    }
@@ -916,7 +881,6 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
            if (encoderStatus < AVCENC_SUCCESS) {
                ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
                mSignalledError = true;
                releaseGrallocData(srcBuffer);
                notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
                return;
            }
@@ -926,7 +890,6 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {

        inQueue.erase(inQueue.begin());
        inInfo->mOwnedByUs = false;
        releaseGrallocData(srcBuffer);
        notifyEmptyBufferDone(inHeader);

        outQueue.erase(outQueue.begin());
@@ -974,47 +937,6 @@ void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
    ALOGV("signalBufferReturned: %p", buffer);
}

OMX_ERRORTYPE SoftAVCEncoder::getExtensionIndex(
        const char *name, OMX_INDEXTYPE *index) {
    if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
        *(int32_t*)index = kStoreMetaDataExtensionIndex;
        return OMX_ErrorNone;
    }
    return OMX_ErrorUndefined;
}

uint8_t *SoftAVCEncoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
    OMX_U32 type = *(OMX_U32*)data;
    status_t res;
    if (type != kMetadataBufferTypeGrallocSource) {
        ALOGE("Data passed in with metadata mode does not have type "
                "kMetadataBufferTypeGrallocSource (%d), has type %d instead",
                kMetadataBufferTypeGrallocSource, type);
        return NULL;
    }
    buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);

    const Rect rect(mVideoWidth, mVideoHeight);
    uint8_t *img;
    res = GraphicBufferMapper::get().lock(imgBuffer,
            GRALLOC_USAGE_HW_VIDEO_ENCODER,
            rect, (void**)&img);
    if (res != OK) {
        ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
                imgBuffer);
        return NULL;
    }

    *buffer = imgBuffer;
    return img;
}

void SoftAVCEncoder::releaseGrallocData(buffer_handle_t buffer) {
    if (mStoreMetaDataInBuffers) {
        GraphicBufferMapper::get().unlock(buffer);
    }
}

}  // namespace android

android::SoftOMXComponent *createSoftOMXComponent(
+2 −10
Original line number Diff line number Diff line
@@ -22,14 +22,14 @@
#include <utils/Vector.h>

#include "avcenc_api.h"
#include "SimpleSoftOMXComponent.h"
#include "SoftVideoEncoderOMXComponent.h"

namespace android {

struct MediaBuffer;

struct SoftAVCEncoder : public MediaBufferObserver,
                        public SimpleSoftOMXComponent {
                        public SoftVideoEncoderOMXComponent {
    SoftAVCEncoder(
            const char *name,
            const OMX_CALLBACKTYPE *callbacks,
@@ -45,11 +45,6 @@ struct SoftAVCEncoder : public MediaBufferObserver,

    virtual void onQueueFilled(OMX_U32 portIndex);

    // Override SoftOMXComponent methods

    virtual OMX_ERRORTYPE getExtensionIndex(
            const char *name, OMX_INDEXTYPE *index);

    // Implement MediaBufferObserver
    virtual void signalBufferReturned(MediaBuffer *buffer);

@@ -105,9 +100,6 @@ private:
    OMX_ERRORTYPE releaseEncoder();
    void releaseOutputBuffers();

    uint8_t* extractGrallocData(void *data, buffer_handle_t *buffer);
    void releaseGrallocData(buffer_handle_t buffer);

    DISALLOW_EVIL_CONSTRUCTORS(SoftAVCEncoder);
};

+29 −93
Original line number Diff line number Diff line
@@ -46,42 +46,12 @@ static void InitOMXParams(T *params) {
    params->nVersion.s.nStep = 0;
}

inline static void ConvertYUV420SemiPlanarToYUV420Planar(
        uint8_t *inyuv, uint8_t* outyuv,
        int32_t width, int32_t height) {

    int32_t outYsize = width * height;
    uint32_t *outy =  (uint32_t *) outyuv;
    uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
    uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));

    /* Y copying */
    memcpy(outy, inyuv, outYsize);

    /* U & V copying */
    uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
    for (int32_t i = height >> 1; i > 0; --i) {
        for (int32_t j = width >> 2; j > 0; --j) {
            uint32_t temp = *inyuv_4++;
            uint32_t tempU = temp & 0xFF;
            tempU = tempU | ((temp >> 8) & 0xFF00);

            uint32_t tempV = (temp >> 8) & 0xFF;
            tempV = tempV | ((temp >> 16) & 0xFF00);

            // Flip U and V
            *outcb++ = tempV;
            *outcr++ = tempU;
        }
    }
}

SoftMPEG4Encoder::SoftMPEG4Encoder(
            const char *name,
            const OMX_CALLBACKTYPE *callbacks,
            OMX_PTR appData,
            OMX_COMPONENTTYPE **component)
    : SimpleSoftOMXComponent(name, callbacks, appData, component),
    : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
      mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
      mVideoWidth(176),
      mVideoHeight(144),
@@ -149,9 +119,10 @@ OMX_ERRORTYPE SoftMPEG4Encoder::initEncParams() {
    mEncParams->quantType[0] = 0;
    mEncParams->noFrameSkipped = PV_OFF;

    if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
    if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
            || mStoreMetaDataInBuffers) {
        // Color conversion is needed.
        CHECK(mInputFrameData == NULL);
        free(mInputFrameData);
        mInputFrameData =
            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
        CHECK(mInputFrameData != NULL);
@@ -216,7 +187,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::releaseEncoder() {

    PVCleanUpVideoEncoder(mHandle);

    delete mInputFrameData;
    free(mInputFrameData);
    mInputFrameData = NULL;

    delete mEncParams;
@@ -486,6 +457,17 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
                mVideoHeight = def->format.video.nFrameHeight;
                mVideoFrameRate = def->format.video.xFramerate >> 16;
                mVideoColorFormat = def->format.video.eColorFormat;

                OMX_PARAM_PORTDEFINITIONTYPE *portDef =
                    &editPortInfo(0)->mDef;
                portDef->format.video.nFrameWidth = mVideoWidth;
                portDef->format.video.nFrameHeight = mVideoHeight;
                portDef->format.video.xFramerate = def->format.video.xFramerate;
                portDef->format.video.eColorFormat =
                    (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
                portDef = &editPortInfo(1)->mDef;
                portDef->format.video.nFrameWidth = mVideoWidth;
                portDef->format.video.nFrameHeight = mVideoHeight;
            } else {
                mVideoBitRate = def->format.video.nBitrate;
            }
@@ -607,11 +589,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
                    mStoreMetaDataInBuffers ? " true" : "false");

            if (mStoreMetaDataInBuffers) {
                mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
                if (mInputFrameData == NULL) {
                    mInputFrameData =
                            (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
                }
                mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
            }

            return OMX_ErrorNone;
@@ -679,9 +657,8 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
            mSawInputEOS = true;
        }

        buffer_handle_t srcBuffer = NULL; // for MetaDataMode only
        if (inHeader->nFilledLen > 0) {
            uint8_t *inputData = NULL;
            const uint8_t *inputData = NULL;
            if (mStoreMetaDataInBuffers) {
                if (inHeader->nFilledLen != 8) {
                    ALOGE("MetaData buffer is wrong size! "
@@ -691,24 +668,25 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
                    return;
                }
                inputData =
                        extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
                                &srcBuffer);
                    extractGraphicBuffer(
                            mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
                            inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
                            mVideoWidth, mVideoHeight);
                if (inputData == NULL) {
                    ALOGE("Unable to extract gralloc buffer in metadata mode");
                    mSignalledError = true;
                    notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
                        return;
                }
                // TODO: Verify/convert pixel format enum
            } else {
                inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
            }

                inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
                if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
                    ConvertYUV420SemiPlanarToYUV420Planar(
                        inputData, mInputFrameData, mVideoWidth, mVideoHeight);
                    inputData = mInputFrameData;
                }
            }

            CHECK(inputData != NULL);

            VideoEncFrameIO vin, vout;
@@ -717,7 +695,7 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
            vin.height = ((mVideoHeight  + 15) >> 4) << 4;
            vin.pitch = ((mVideoWidth + 15) >> 4) << 4;
            vin.timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
            vin.yChan = inputData;
            vin.yChan = (uint8_t *)inputData;
            vin.uChan = vin.yChan + vin.height * vin.pitch;
            vin.vChan = vin.uChan + ((vin.height * vin.pitch) >> 2);

@@ -744,7 +722,6 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {

        inQueue.erase(inQueue.begin());
        inInfo->mOwnedByUs = false;
        releaseGrallocData(srcBuffer);
        notifyEmptyBufferDone(inHeader);

        outQueue.erase(outQueue.begin());
@@ -759,47 +736,6 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
    }
}

OMX_ERRORTYPE SoftMPEG4Encoder::getExtensionIndex(
        const char *name, OMX_INDEXTYPE *index) {
    if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
        *(int32_t*)index = kStoreMetaDataExtensionIndex;
        return OMX_ErrorNone;
    }
    return OMX_ErrorUndefined;
}

uint8_t *SoftMPEG4Encoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
    OMX_U32 type = *(OMX_U32*)data;
    status_t res;
    if (type != kMetadataBufferTypeGrallocSource) {
        ALOGE("Data passed in with metadata mode does not have type "
                "kMetadataBufferTypeGrallocSource (%d), has type %d instead",
                kMetadataBufferTypeGrallocSource, type);
        return NULL;
    }
    buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);

    const Rect rect(mVideoWidth, mVideoHeight);
    uint8_t *img;
    res = GraphicBufferMapper::get().lock(imgBuffer,
            GRALLOC_USAGE_HW_VIDEO_ENCODER,
            rect, (void**)&img);
    if (res != OK) {
        ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
                imgBuffer);
        return NULL;
    }

    *buffer = imgBuffer;
    return img;
}

void SoftMPEG4Encoder::releaseGrallocData(buffer_handle_t buffer) {
    if (mStoreMetaDataInBuffers) {
        GraphicBufferMapper::get().unlock(buffer);
    }
}

}  // namespace android

android::SoftOMXComponent *createSoftOMXComponent(
+2 −10
Original line number Diff line number Diff line
@@ -19,7 +19,7 @@

#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/foundation/ABase.h>
#include "SimpleSoftOMXComponent.h"
#include "SoftVideoEncoderOMXComponent.h"
#include "mp4enc_api.h"


@@ -27,7 +27,7 @@ namespace android {

struct MediaBuffer;

struct SoftMPEG4Encoder : public SimpleSoftOMXComponent {
struct SoftMPEG4Encoder : public SoftVideoEncoderOMXComponent {
    SoftMPEG4Encoder(
            const char *name,
            const OMX_CALLBACKTYPE *callbacks,
@@ -43,11 +43,6 @@ struct SoftMPEG4Encoder : public SimpleSoftOMXComponent {

    virtual void onQueueFilled(OMX_U32 portIndex);

    // Override SoftOMXComponent methods

    virtual OMX_ERRORTYPE getExtensionIndex(
            const char *name, OMX_INDEXTYPE *index);

protected:
    virtual ~SoftMPEG4Encoder();

@@ -86,9 +81,6 @@ private:
    OMX_ERRORTYPE initEncoder();
    OMX_ERRORTYPE releaseEncoder();

    uint8_t* extractGrallocData(void *data, buffer_handle_t *buffer);
    void releaseGrallocData(buffer_handle_t buffer);

    DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4Encoder);
};

+0 −4
Original line number Diff line number Diff line
@@ -12,10 +12,6 @@ LOCAL_C_INCLUDES := \
        frameworks/av/media/libstagefright/include \
        frameworks/native/include/media/openmax \

ifeq ($(TARGET_DEVICE), manta)
    LOCAL_CFLAGS += -DSURFACE_IS_BGR32
endif

LOCAL_STATIC_LIBRARIES := \
        libvpx

Loading