Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e100b813 authored by Lajos Molnar's avatar Lajos Molnar Committed by Android (Google) Code Review
Browse files

Merge "StageFrightRecorder: do not use meta-data mode for software encoders" into klp-dev

parents a7291156 9c7f8457
Loading
Loading
Loading
Loading
+4 −2
Original line number Diff line number Diff line
@@ -41,7 +41,8 @@ public:
        Size videoSize,
        int32_t videoFrameRate,
        const sp<IGraphicBufferProducer>& surface,
        int64_t timeBetweenTimeLapseFrameCaptureUs);
        int64_t timeBetweenTimeLapseFrameCaptureUs,
        bool storeMetaDataInVideoBuffers = true);

    virtual ~CameraSourceTimeLapse();

@@ -116,7 +117,8 @@ private:
        Size videoSize,
        int32_t videoFrameRate,
        const sp<IGraphicBufferProducer>& surface,
        int64_t timeBetweenTimeLapseFrameCaptureUs);
        int64_t timeBetweenTimeLapseFrameCaptureUs,
        bool storeMetaDataInVideoBuffers = true);

    // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
    // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
+25 −6
Original line number Diff line number Diff line
@@ -70,8 +70,9 @@ StagefrightRecorder::StagefrightRecorder()
      mOutputFd(-1),
      mAudioSource(AUDIO_SOURCE_CNT),
      mVideoSource(VIDEO_SOURCE_LIST_END),
      mStarted(false), mSurfaceMediaSource(NULL),
      mCaptureTimeLapse(false) {
      mCaptureTimeLapse(false),
      mStarted(false),
      mSurfaceMediaSource(NULL) {

    ALOGV("Constructor");
    reset();
@@ -1089,7 +1090,22 @@ void StagefrightRecorder::clipVideoFrameWidth() {
    }
}

status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
status_t StagefrightRecorder::checkVideoEncoderCapabilities(
        bool *supportsCameraSourceMetaDataMode) {
    /* hardware codecs must support camera source meta data mode */
    Vector<CodecCapabilities> codecs;
    OMXClient client;
    CHECK_EQ(client.connect(), (status_t)OK);
    QueryCodecs(
            client.interface(),
            (mVideoEncoder == VIDEO_ENCODER_H263 ? MEDIA_MIMETYPE_VIDEO_H263 :
             mVideoEncoder == VIDEO_ENCODER_MPEG_4_SP ? MEDIA_MIMETYPE_VIDEO_MPEG4 :
             mVideoEncoder == VIDEO_ENCODER_H264 ? MEDIA_MIMETYPE_VIDEO_AVC : ""),
            false /* decoder */, true /* hwCodec */, &codecs);
    *supportsCameraSourceMetaDataMode = codecs.size() > 0;
    ALOGV("encoder %s camera source meta-data mode",
            *supportsCameraSourceMetaDataMode ? "supports" : "DOES NOT SUPPORT");

    if (!mCaptureTimeLapse) {
        // Dont clip for time lapse capture as encoder will have enough
        // time to encode because of slow capture rate of time lapse.
@@ -1307,7 +1323,9 @@ status_t StagefrightRecorder::setupSurfaceMediaSource() {
status_t StagefrightRecorder::setupCameraSource(
        sp<CameraSource> *cameraSource) {
    status_t err = OK;
    if ((err = checkVideoEncoderCapabilities()) != OK) {
    bool encoderSupportsCameraSourceMetaDataMode;
    if ((err = checkVideoEncoderCapabilities(
                &encoderSupportsCameraSourceMetaDataMode)) != OK) {
        return err;
    }
    Size videoSize;
@@ -1323,13 +1341,14 @@ status_t StagefrightRecorder::setupCameraSource(
        mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
                mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
                videoSize, mFrameRate, mPreviewSurface,
                mTimeBetweenTimeLapseFrameCaptureUs);
                mTimeBetweenTimeLapseFrameCaptureUs,
                encoderSupportsCameraSourceMetaDataMode);
        *cameraSource = mCameraSourceTimeLapse;
    } else {
        *cameraSource = CameraSource::CreateFromCamera(
                mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,
                videoSize, mFrameRate,
                mPreviewSurface, true /*storeMetaDataInVideoBuffers*/);
                mPreviewSurface, encoderSupportsCameraSourceMetaDataMode);
    }
    mCamera.clear();
    mCameraProxy.clear();
+2 −1
Original line number Diff line number Diff line
@@ -139,7 +139,8 @@ private:
    status_t startRTPRecording();
    status_t startMPEG2TSRecording();
    sp<MediaSource> createAudioSource();
    status_t checkVideoEncoderCapabilities();
    status_t checkVideoEncoderCapabilities(
            bool *supportsCameraSourceMetaDataMode);
    status_t checkAudioEncoderCapabilities();
    // Generic MediaSource set-up. Returns the appropriate
    // source (CameraSource or SurfaceMediaSource)
+8 −4
Original line number Diff line number Diff line
@@ -41,13 +41,15 @@ CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
        Size videoSize,
        int32_t videoFrameRate,
        const sp<IGraphicBufferProducer>& surface,
        int64_t timeBetweenFrameCaptureUs) {
        int64_t timeBetweenFrameCaptureUs,
        bool storeMetaDataInVideoBuffers) {

    CameraSourceTimeLapse *source = new
            CameraSourceTimeLapse(camera, proxy, cameraId,
                clientName, clientUid,
                videoSize, videoFrameRate, surface,
                timeBetweenFrameCaptureUs);
                timeBetweenFrameCaptureUs,
                storeMetaDataInVideoBuffers);

    if (source != NULL) {
        if (source->initCheck() != OK) {
@@ -67,9 +69,11 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(
        Size videoSize,
        int32_t videoFrameRate,
        const sp<IGraphicBufferProducer>& surface,
        int64_t timeBetweenFrameCaptureUs)
        int64_t timeBetweenFrameCaptureUs,
        bool storeMetaDataInVideoBuffers)
      : CameraSource(camera, proxy, cameraId, clientName, clientUid,
                videoSize, videoFrameRate, surface, true),
                videoSize, videoFrameRate, surface,
                storeMetaDataInVideoBuffers),
      mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
      mLastTimeLapseFrameRealTimestampUs(0),
      mSkipCurrentFrame(false) {