Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit d26920ad authored by Nipun Kwatra's avatar Nipun Kwatra
Browse files

Adding timelapse capture from videocamera.

Current implementation looks at the timestamps of all incoming frames in
CameraSource::dataCallbackTimestamp().
It drops all frames until enough time has elapsed to get the next time lapse frame.
When enough time has passed to capture the next time lapse frame, the frame is no longer dropped
and the timestamp of this frame is modified to be one frame time (1/framerate) ahead of the
last encoded frame's time stamp.

Change-Id: I82b9d5e96113dffa6901aac3b8a8ef999ffc1d0b
parent 3bf8430e
Loading
Loading
Loading
Loading
+12 −0
Original line number Diff line number Diff line
@@ -35,6 +35,10 @@ public:
    static CameraSource *Create();
    static CameraSource *CreateFromCamera(const sp<Camera> &camera);

    void enableTimeLapseMode(
            int64_t timeBetweenTimeLapseFrameCaptureUs, int32_t videoFrameRate);
    void disableTimeLapseMode();

    virtual ~CameraSource();

    virtual status_t start(MetaData *params = NULL);
@@ -71,6 +75,14 @@ private:
    bool mCollectStats;
    bool mStarted;

    // Time between capture of two frames during time lapse recording
    // Negative value indicates that timelapse is disabled.
    int64_t mTimeBetweenTimeLapseFrameCaptureUs;
    // Time between two frames in final video (1/frameRate)
    int64_t mTimeBetweenTimeLapseVideoFramesUs;
    // Real timestamp of the last encoded time lapse frame
    int64_t mLastTimeLapseFrameRealTimestampUs;

    CameraSource(const sp<Camera> &camera);

    void dataCallbackTimestamp(
+5 −1
Original line number Diff line number Diff line
@@ -815,6 +815,9 @@ status_t StagefrightRecorder::setupVideoEncoder(const sp<MediaWriter>& writer) {

    sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
    CHECK(cameraSource != NULL);
    if(mCaptureTimeLapse) {
        cameraSource->enableTimeLapseMode(1E6, mFrameRate);
    }

    sp<MetaData> enc_meta = new MetaData;
    enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
@@ -892,7 +895,7 @@ status_t StagefrightRecorder::startMPEG4Recording() {
    sp<MediaWriter> writer = new MPEG4Writer(dup(mOutputFd));

    // Add audio source first if it exists
    if (mAudioSource != AUDIO_SOURCE_LIST_END) {
    if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_LIST_END)) {
        err = setupAudioEncoder(writer);
        if (err != OK) return err;
        totalBitRate += mAudioBitRate;
@@ -994,6 +997,7 @@ status_t StagefrightRecorder::reset() {
    mCameraId        = 0;
    mTrackEveryNumberOfFrames = 0;
    mTrackEveryTimeDurationUs = 0;
    mCaptureTimeLapse = false;
    mEncoderProfiles = MediaProfiles::getInstance();

    mOutputFd = -1;
+2 −0
Original line number Diff line number Diff line
@@ -87,6 +87,8 @@ private:
    int32_t mTrackEveryNumberOfFrames;
    int64_t mTrackEveryTimeDurationUs;

    bool mCaptureTimeLapse;

    String8 mParams;
    int mOutputFd;
    int32_t mFlags;
+46 −1
Original line number Diff line number Diff line
@@ -116,6 +116,19 @@ CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
    return new CameraSource(camera);
}

void CameraSource::enableTimeLapseMode(
        int64_t timeBetweenTimeLapseFrameCaptureUs, int32_t videoFrameRate) {
    LOGV("starting time lapse mode");
    mTimeBetweenTimeLapseFrameCaptureUs = timeBetweenTimeLapseFrameCaptureUs;
    mTimeBetweenTimeLapseVideoFramesUs = (1E6/videoFrameRate);
}

void CameraSource::disableTimeLapseMode() {
    LOGV("stopping time lapse mode");
    mTimeBetweenTimeLapseFrameCaptureUs = -1;
    mTimeBetweenTimeLapseVideoFramesUs = 0;
}

CameraSource::CameraSource(const sp<Camera> &camera)
    : mCamera(camera),
      mFirstFrameTimeUs(0),
@@ -126,7 +139,10 @@ CameraSource::CameraSource(const sp<Camera> &camera)
      mNumGlitches(0),
      mGlitchDurationThresholdUs(200000),
      mCollectStats(false),
      mStarted(false) {
      mStarted(false),
      mTimeBetweenTimeLapseFrameCaptureUs(-1),
      mTimeBetweenTimeLapseVideoFramesUs(0),
      mLastTimeLapseFrameRealTimestampUs(0) {

    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    String8 s = mCamera->getParameters();
@@ -316,6 +332,35 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        ++mNumGlitches;
    }

    // time lapse
    if(mTimeBetweenTimeLapseFrameCaptureUs >= 0) {
        if(mLastTimeLapseFrameRealTimestampUs == 0) {
            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
            // to current time (timestampUs) and save frame data.
            LOGV("dataCallbackTimestamp timelapse: initial frame");

            mLastTimeLapseFrameRealTimestampUs = timestampUs;
        } else if (timestampUs <
                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
            // Skip all frames from last encoded frame until
            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
            // Tell the camera to release its recording frame and return.
            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");

            releaseOneRecordingFrame(data);
            return;
        } else {
            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
            // of the last encoded frame's time stamp.
            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");

            mLastTimeLapseFrameRealTimestampUs = timestampUs;
            timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
        }
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;