Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit b7acdfb8 authored by James Dong's avatar James Dong Committed by Android (Google) Code Review
Browse files

Merge "Fix green or corrupted video frames in the exported movies" into jb-dev

parents 28ed2f93 3e1e78cc
Loading
Loading
Loading
Loading
+3 −0
Original line number Original line Diff line number Diff line
@@ -273,6 +273,9 @@
/**
/**
 * Output file must be 3GPP or MP3 */
 * Output file must be 3GPP or MP3 */
#define M4VSS3GPP_ERR_OUTPUT_FILE_TYPE_ERROR         M4OSA_ERR_CREATE( M4_ERR, M4VSS3GPP, 0x0117)
#define M4VSS3GPP_ERR_OUTPUT_FILE_TYPE_ERROR         M4OSA_ERR_CREATE( M4_ERR, M4VSS3GPP, 0x0117)
/**
 * Can not find a valid video frame */
#define M4VSS3GPP_ERR_NO_VALID_VID_FRAME         M4OSA_ERR_CREATE( M4_ERR, M4VSS3GPP, 0x0118)


#endif /* __M4VSS3GPP_ErrorCodes_H__ */
#endif /* __M4VSS3GPP_ErrorCodes_H__ */
+59 −26
Original line number Original line Diff line number Diff line
@@ -20,6 +20,8 @@
 * @note
 * @note
 ******************************************************************************
 ******************************************************************************
 */
 */
#undef M4OSA_TRACE_LEVEL
#define M4OSA_TRACE_LEVEL 1


/****************/
/****************/
/*** Includes ***/
/*** Includes ***/
@@ -491,7 +493,6 @@ M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC )
                    }
                    }


#endif                                   //M4VSS_SUPPORT_OMX_CODECS
#endif                                   //M4VSS_SUPPORT_OMX_CODECS

                }
                }
                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
                {
                {
@@ -655,7 +656,6 @@ M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC )
                    }
                    }


#endif //M4VSS_SUPPORT_OMX_CODECS
#endif //M4VSS_SUPPORT_OMX_CODECS

                }
                }
                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
                {
                {
@@ -1198,7 +1198,7 @@ M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
        if ((pC->pC1->isRenderDup == M4OSA_TRUE) ||
        if ((pC->pC1->isRenderDup == M4OSA_TRUE) ||
             (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) {
             (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) {
            pTmp = pC->yuv1;
            pTmp = pC->yuv1;
            if (pC->pC1->lastDecodedPlane != M4NO_ERROR) {
            if (pC->pC1->lastDecodedPlane != M4OSA_NULL) {
                /* Copy last decoded plane to output plane */
                /* Copy last decoded plane to output plane */
                memcpy((void *)pTmp[0].pac_data,
                memcpy((void *)pTmp[0].pac_data,
                    (void *)pC->pC1->lastDecodedPlane[0].pac_data,
                    (void *)pC->pC1->lastDecodedPlane[0].pac_data,
@@ -1209,6 +1209,12 @@ M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
                memcpy((void *)pTmp[2].pac_data,
                memcpy((void *)pTmp[2].pac_data,
                    (void *)pC->pC1->lastDecodedPlane[2].pac_data,
                    (void *)pC->pC1->lastDecodedPlane[2].pac_data,
                    (pTmp[2].u_height * pTmp[2].u_width));
                    (pTmp[2].u_height * pTmp[2].u_width));
            } else {
                err = M4VSS3GPP_ERR_NO_VALID_VID_FRAME;
                M4OSA_TRACE1_3("Can not find an input frame. Set error 0x%x in %s (%d)",
                   err, __FILE__, __LINE__);
                pC->ewc.VppError = err;
                return M4NO_ERROR;
            }
            }
            pC->pC1->lastDecodedPlane = pTmp;
            pC->pC1->lastDecodedPlane = pTmp;
        }
        }
@@ -1238,7 +1244,7 @@ M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
        if ((pC->pC2->isRenderDup == M4OSA_TRUE) ||
        if ((pC->pC2->isRenderDup == M4OSA_TRUE) ||
             (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) {
             (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) {
            pTmp = pC->yuv2;
            pTmp = pC->yuv2;
            if (pC->pC2->lastDecodedPlane != M4NO_ERROR) {
            if (pC->pC2->lastDecodedPlane != M4OSA_NULL) {
                /* Copy last decoded plane to output plane */
                /* Copy last decoded plane to output plane */
                memcpy((void *)pTmp[0].pac_data,
                memcpy((void *)pTmp[0].pac_data,
                    (void *)pC->pC2->lastDecodedPlane[0].pac_data,
                    (void *)pC->pC2->lastDecodedPlane[0].pac_data,
@@ -1249,6 +1255,12 @@ M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
                memcpy((void *)pTmp[2].pac_data,
                memcpy((void *)pTmp[2].pac_data,
                    (void *)pC->pC2->lastDecodedPlane[2].pac_data,
                    (void *)pC->pC2->lastDecodedPlane[2].pac_data,
                    (pTmp[2].u_height * pTmp[2].u_width));
                    (pTmp[2].u_height * pTmp[2].u_width));
            } else {
                err = M4VSS3GPP_ERR_NO_VALID_VID_FRAME;
                M4OSA_TRACE1_3("Can not find an input frame. Set error 0x%x in %s (%d)",
                   err, __FILE__, __LINE__);
                pC->ewc.VppError = err;
                return M4NO_ERROR;
            }
            }
            pC->pC2->lastDecodedPlane = pTmp;
            pC->pC2->lastDecodedPlane = pTmp;
        }
        }
@@ -1505,17 +1517,29 @@ M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
            if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) {
            if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) {
                /**
                /**
                * Copy last decoded plane to output plane */
                * Copy last decoded plane to output plane */
                if (pC->pC1->lastDecodedPlane != M4OSA_NULL) {

                    memcpy((void *)pC->pC1->m_pPreResizeFrame[0].pac_data,
                    memcpy((void *)pC->pC1->m_pPreResizeFrame[0].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[0].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[0].pac_data,
                 (pC->pC1->m_pPreResizeFrame[0].u_height * pC->pC1->m_pPreResizeFrame[0].u_width));
                        (pC->pC1->m_pPreResizeFrame[0].u_height * \
                         pC->pC1->m_pPreResizeFrame[0].u_width));


                    memcpy((void *)pC->pC1->m_pPreResizeFrame[1].pac_data,
                    memcpy((void *)pC->pC1->m_pPreResizeFrame[1].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[1].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[1].pac_data,
                 (pC->pC1->m_pPreResizeFrame[1].u_height * pC->pC1->m_pPreResizeFrame[1].u_width));
                        (pC->pC1->m_pPreResizeFrame[1].u_height * \
                         pC->pC1->m_pPreResizeFrame[1].u_width));


                    memcpy((void *)pC->pC1->m_pPreResizeFrame[2].pac_data,
                    memcpy((void *)pC->pC1->m_pPreResizeFrame[2].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[2].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[2].pac_data,
                 (pC->pC1->m_pPreResizeFrame[2].u_height * pC->pC1->m_pPreResizeFrame[2].u_width));
                        (pC->pC1->m_pPreResizeFrame[2].u_height * \
                         pC->pC1->m_pPreResizeFrame[2].u_width));
                } else {
                    err = M4VSS3GPP_ERR_NO_VALID_VID_FRAME;
                    M4OSA_TRACE1_3("Can not find an input frame. Set error 0x%x in %s (%d)",
                        err, __FILE__, __LINE__);
                    pC->ewc.VppError = err;
                    return M4NO_ERROR;
                }


                if(pC->nbActiveEffects > 0) {
                if(pC->nbActiveEffects > 0) {
                    /**
                    /**
@@ -1587,6 +1611,8 @@ M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
                }
                }
                /**
                /**
                 * Copy last decoded plane to output plane */
                 * Copy last decoded plane to output plane */
                if (pC->pC1->lastDecodedPlane != M4OSA_NULL &&
                    pLastDecodedFrame != M4OSA_NULL) {
                    memcpy((void *)pLastDecodedFrame[0].pac_data,
                    memcpy((void *)pLastDecodedFrame[0].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[0].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[0].pac_data,
                        (pLastDecodedFrame[0].u_height * pLastDecodedFrame[0].u_width));
                        (pLastDecodedFrame[0].u_height * pLastDecodedFrame[0].u_width));
@@ -1598,6 +1624,13 @@ M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
                    memcpy((void *)pLastDecodedFrame[2].pac_data,
                    memcpy((void *)pLastDecodedFrame[2].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[2].pac_data,
                        (void *)pC->pC1->lastDecodedPlane[2].pac_data,
                        (pLastDecodedFrame[2].u_height * pLastDecodedFrame[2].u_width));
                        (pLastDecodedFrame[2].u_height * pLastDecodedFrame[2].u_width));
                } else {
                    err = M4VSS3GPP_ERR_NO_VALID_VID_FRAME;
                    M4OSA_TRACE1_3("Can not find an input frame. Set error 0x%x in %s (%d)",
                        err, __FILE__, __LINE__);
                    pC->ewc.VppError = err;
                    return M4NO_ERROR;
                }


                pTmp = pPlaneOut;
                pTmp = pPlaneOut;
                /**
                /**
+3 −0
Original line number Original line Diff line number Diff line
@@ -115,6 +115,9 @@ typedef struct {
    ARect                   mCropRect;  // These are obtained from kKeyCropRect.
    ARect                   mCropRect;  // These are obtained from kKeyCropRect.
    I420ColorConverter*     mI420ColorConverter;
    I420ColorConverter*     mI420ColorConverter;


    // Time interval between two consequtive/neighboring video frames.
    M4_MediaTime            mFrameIntervalMs;

} VideoEditorVideoDecoder_Context;
} VideoEditorVideoDecoder_Context;


} //namespace android
} //namespace android
+24 −10
Original line number Original line Diff line number Diff line
@@ -982,6 +982,11 @@ M4OSA_ERR VideoEditorVideoDecoder_create(M4OSA_Context *pContext,
    pDecShellContext->mLastOutputCts     = -1;
    pDecShellContext->mLastOutputCts     = -1;
    pDecShellContext->m_pDecBufferPool   = M4OSA_NULL;
    pDecShellContext->m_pDecBufferPool   = M4OSA_NULL;


    // Calculate the interval between two video frames.
    CHECK(pDecShellContext->m_pVideoStreamhandler->m_averageFrameRate > 0);
    pDecShellContext->mFrameIntervalMs =
            1000.0 / pDecShellContext->m_pVideoStreamhandler->m_averageFrameRate;

    /**
    /**
     * StageFright graph building
     * StageFright graph building
     */
     */
@@ -1423,8 +1428,25 @@ M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context,
        ALOGV("VideoEditorVideoDecoder_decode,decoded frametime = %lf,size = %d",
        ALOGV("VideoEditorVideoDecoder_decode,decoded frametime = %lf,size = %d",
            (M4_MediaTime)lFrameTime, pDecoderBuffer->size() );
            (M4_MediaTime)lFrameTime, pDecoderBuffer->size() );


        // If bJump is false, we need to save every decoded buffer
        /*
        if (!bJump) {
         * We need to save a buffer if bJump == false to a queue. These
         * buffers have a timestamp >= the target time, *pTime (for instance,
         * the transition between two videos, or a trimming postion inside
         * one video), since they are part of the transition clip or the
         * trimmed video.
         *
         * If *pTime does not have the same value as any of the existing
         * video frames, we would like to get the buffer right before *pTime
         * and in the transcoding phrase, this video frame will be encoded
         * as a key frame and becomes the first video frame for the transition or the
         * trimmed video to be generated. This buffer must also be queued.
         *
         */
        int64_t targetTimeMs =
                pDecShellContext->m_lastDecodedCTS +
                pDecShellContext->mFrameIntervalMs +
                tolerance;
        if (!bJump || targetTimeMs > *pTime) {
            lerr = copyBufferToQueue(pDecShellContext, pDecoderBuffer);
            lerr = copyBufferToQueue(pDecShellContext, pDecoderBuffer);
            if (lerr != M4NO_ERROR) {
            if (lerr != M4NO_ERROR) {
                goto VIDEOEDITOR_VideoDecode_cleanUP;
                goto VIDEOEDITOR_VideoDecode_cleanUP;
@@ -1432,14 +1454,6 @@ M4OSA_ERR VideoEditorVideoDecoder_decode(M4OSA_Context context,
        }
        }
    }
    }


    // If bJump is true, we only need to copy the last buffer
    if (bJump) {
        lerr = copyBufferToQueue(pDecShellContext, pDecoderBuffer);
        if (lerr != M4NO_ERROR) {
            goto VIDEOEDITOR_VideoDecode_cleanUP;
        }
    }

    pDecShellContext->mNbOutputFrames++;
    pDecShellContext->mNbOutputFrames++;
    if ( 0 > pDecShellContext->mFirstOutputCts ) {
    if ( 0 > pDecShellContext->mFirstOutputCts ) {
        pDecShellContext->mFirstOutputCts = *pTime;
        pDecShellContext->mFirstOutputCts = *pTime;