Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f91f2971 authored by Nipun Kwatra's avatar Nipun Kwatra Committed by Android (Google) Code Review
Browse files

Merge "Adding support for timelapse capture using still camera's takepicture....

Merge "Adding support for timelapse capture using still camera's takepicture. Also moving entire implementation into a new class CameraSourceTimeLapse which inherits from CameraSource."
parents aee71b4e f9b80182
Loading
Loading
Loading
Loading
+25 −25
Original line number Diff line number Diff line
@@ -22,7 +22,6 @@
#include <media/stagefright/MediaSource.h>
#include <utils/List.h>
#include <utils/RefBase.h>
#include <utils/threads.h>

namespace android {

@@ -35,10 +34,6 @@ public:
    static CameraSource *Create();
    static CameraSource *CreateFromCamera(const sp<Camera> &camera);

    void enableTimeLapseMode(
            int64_t timeBetweenTimeLapseFrameCaptureUs, int32_t videoFrameRate);
    void disableTimeLapseMode();

    virtual ~CameraSource();

    virtual status_t start(MetaData *params = NULL);
@@ -51,12 +46,34 @@ public:

    virtual void signalBufferReturned(MediaBuffer* buffer);

private:
    friend class CameraSourceListener;

protected:
    sp<Camera> mCamera;
    sp<MetaData> mMeta;

    int64_t mStartTimeUs;
    int32_t mNumFramesReceived;
    int64_t mLastFrameTimestampUs;
    bool mStarted;

    CameraSource(const sp<Camera> &camera);

    virtual void startCameraRecording();
    virtual void stopCameraRecording();
    virtual void releaseRecordingFrame(const sp<IMemory>& frame);

    // Returns true if need to skip the current frame.
    // Called from dataCallbackTimestamp.
    virtual bool skipCurrentFrame(int64_t timestampUs) {return false;}

    // Callback called when still camera raw data is available.
    virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {}

    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
            const sp<IMemory> &data);

private:
    friend class CameraSourceListener;

    Mutex mLock;
    Condition mFrameAvailableCondition;
    Condition mFrameCompleteCondition;
@@ -64,29 +81,12 @@ private:
    List<sp<IMemory> > mFramesBeingEncoded;
    List<int64_t> mFrameTimes;

    int64_t mStartTimeUs;
    int64_t mFirstFrameTimeUs;
    int64_t mLastFrameTimestampUs;
    int32_t mNumFramesReceived;
    int32_t mNumFramesEncoded;
    int32_t mNumFramesDropped;
    int32_t mNumGlitches;
    int64_t mGlitchDurationThresholdUs;
    bool mCollectStats;
    bool mStarted;

    // Time between capture of two frames during time lapse recording
    // Negative value indicates that timelapse is disabled.
    int64_t mTimeBetweenTimeLapseFrameCaptureUs;
    // Time between two frames in final video (1/frameRate)
    int64_t mTimeBetweenTimeLapseVideoFramesUs;
    // Real timestamp of the last encoded time lapse frame
    int64_t mLastTimeLapseFrameRealTimestampUs;

    CameraSource(const sp<Camera> &camera);

    void dataCallbackTimestamp(
            int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);

    void releaseQueuedFrames();
    void releaseOneRecordingFrame(const sp<IMemory>& frame);
+129 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2010 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef CAMERA_SOURCE_TIME_LAPSE_H_

#define CAMERA_SOURCE_TIME_LAPSE_H_

#include <pthread.h>

#include <utils/RefBase.h>
#include <utils/threads.h>

namespace android {

class ICamera;
class IMemory;
class ISurface;
class Camera;

class CameraSourceTimeLapse : public CameraSource {
public:
    static CameraSourceTimeLapse *Create(bool useStillCameraForTimeLapse,
        int64_t timeBetweenTimeLapseFrameCaptureUs,
        int32_t videoFrameRate);

    static CameraSourceTimeLapse *CreateFromCamera(const sp<Camera> &camera,
        bool useStillCameraForTimeLapse,
        int64_t timeBetweenTimeLapseFrameCaptureUs,
        int32_t videoFrameRate);

    virtual ~CameraSourceTimeLapse();

private:
    // If true, will use still camera takePicture() for time lapse frames
    // If false, will use the videocamera frames instead.
    bool mUseStillCameraForTimeLapse;

    // Time between capture of two frames during time lapse recording
    // Negative value indicates that timelapse is disabled.
    int64_t mTimeBetweenTimeLapseFrameCaptureUs;

    // Time between two frames in final video (1/frameRate)
    int64_t mTimeBetweenTimeLapseVideoFramesUs;

    // Real timestamp of the last encoded time lapse frame
    int64_t mLastTimeLapseFrameRealTimestampUs;

    // Thread id of thread which takes still picture and sleeps in a loop.
    pthread_t mThreadTimeLapse;

    // Variable set in dataCallbackTimestamp() to help skipCurrentFrame()
    // to know if current frame needs to be skipped.
    bool mSkipCurrentFrame;

    CameraSourceTimeLapse(const sp<Camera> &camera,
        bool useStillCameraForTimeLapse,
        int64_t timeBetweenTimeLapseFrameCaptureUs,
        int32_t videoFrameRate);

    // For still camera case starts a thread which calls camera's takePicture()
    // in a loop. For video camera case, just starts the camera's video recording.
    virtual void startCameraRecording();

    // For still camera case joins the thread created in startCameraRecording().
    // For video camera case, just stops the camera's video recording.
    virtual void stopCameraRecording();

    // For still camera case don't need to do anything as memory is locally
    // allocated with refcounting.
    // For video camera case just tell the camera to release the frame.
    virtual void releaseRecordingFrame(const sp<IMemory>& frame);

    // mSkipCurrentFrame is set to true in dataCallbackTimestamp() if the current
    // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
    virtual bool skipCurrentFrame(int64_t timestampUs);

    // Handles the callback to handle raw frame data from the still camera.
    // Creates a copy of the frame data as the camera can reuse the frame memory
    // once this callback returns. The function also sets a new timstamp corresponding
    // to one frame time ahead of the last encoded frame's time stamp. It then
    // calls dataCallbackTimestamp() of the base class with the copied data and the
    // modified timestamp, which will think that it recieved the frame from a video
    // camera and proceed as usual.
    virtual void dataCallback(int32_t msgType, const sp<IMemory> &data);

    // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
    // timestamp and set mSkipCurrentFrame.
    // Then it calls the base CameraSource::dataCallbackTimestamp()
    virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
            const sp<IMemory> &data);

    // When video camera is used for time lapse capture, returns true
    // until enough time has passed for the next time lapse frame. When
    // the frame needs to be encoded, it returns false and also modifies
    // the time stamp to be one frame time ahead of the last encoded
    // frame's time stamp.
    bool skipFrameAndModifyTimeStamp(int64_t *timestampUs);

    // Wrapper to enter threadTimeLapseEntry()
    static void *ThreadTimeLapseWrapper(void *me);

    // Runs a loop which sleeps until a still picture is required
    // and then calls mCamera->takePicture() to take the still picture.
    // Used only in the case mUseStillCameraForTimeLapse = true.
    void threadTimeLapseEntry();

    // Creates a copy of source_data into a new memory of final type MemoryBase.
    sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);

    CameraSourceTimeLapse(const CameraSourceTimeLapse &);
    CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
};

}  // namespace android

#endif  // CAMERA_SOURCE_TIME_LAPSE_H_
+4 −4
Original line number Diff line number Diff line
@@ -24,6 +24,7 @@
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/CameraSource.h>
#include <media/stagefright/CameraSourceTimeLapse.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
@@ -895,11 +896,10 @@ status_t StagefrightRecorder::setupVideoEncoder(const sp<MediaWriter>& writer) {
    status_t err = setupCameraSource();
    if (err != OK) return err;

    sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
    sp<CameraSource> cameraSource = (mCaptureTimeLapse) ?
        CameraSourceTimeLapse::CreateFromCamera(mCamera, true, 3E6, mFrameRate):
        CameraSource::CreateFromCamera(mCamera);
    CHECK(cameraSource != NULL);
    if(mCaptureTimeLapse) {
        cameraSource->enableTimeLapseMode(1E6, mFrameRate);
    }

    sp<MetaData> enc_meta = new MetaData;
    enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
+1 −0
Original line number Diff line number Diff line
@@ -10,6 +10,7 @@ LOCAL_SRC_FILES:= \
        AudioSource.cpp                   \
        AwesomePlayer.cpp                 \
        CameraSource.cpp                  \
        CameraSourceTimeLapse.cpp                  \
        DataSource.cpp                    \
        ESDS.cpp                          \
        FileSource.cpp                    \
+30 −53
Original line number Diff line number Diff line
@@ -65,6 +65,11 @@ void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
    LOGV("postData(%d, ptr:%p, size:%d)",
         msgType, dataPtr->pointer(), dataPtr->size());

    sp<CameraSource> source = mSource.promote();
    if (source.get() != NULL) {
        source->dataCallback(msgType, dataPtr);
    }
}

void CameraSourceListener::postDataTimestamp(
@@ -116,33 +121,17 @@ CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
    return new CameraSource(camera);
}

void CameraSource::enableTimeLapseMode(
        int64_t timeBetweenTimeLapseFrameCaptureUs, int32_t videoFrameRate) {
    LOGV("starting time lapse mode");
    mTimeBetweenTimeLapseFrameCaptureUs = timeBetweenTimeLapseFrameCaptureUs;
    mTimeBetweenTimeLapseVideoFramesUs = (1E6/videoFrameRate);
}

void CameraSource::disableTimeLapseMode() {
    LOGV("stopping time lapse mode");
    mTimeBetweenTimeLapseFrameCaptureUs = -1;
    mTimeBetweenTimeLapseVideoFramesUs = 0;
}

CameraSource::CameraSource(const sp<Camera> &camera)
    : mCamera(camera),
      mFirstFrameTimeUs(0),
      mLastFrameTimestampUs(0),
      mNumFramesReceived(0),
      mLastFrameTimestampUs(0),
      mStarted(false),
      mFirstFrameTimeUs(0),
      mNumFramesEncoded(0),
      mNumFramesDropped(0),
      mNumGlitches(0),
      mGlitchDurationThresholdUs(200000),
      mCollectStats(false),
      mStarted(false),
      mTimeBetweenTimeLapseFrameCaptureUs(-1),
      mTimeBetweenTimeLapseVideoFramesUs(0),
      mLastTimeLapseFrameRealTimestampUs(0) {
      mCollectStats(false) {

    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    String8 s = mCamera->getParameters();
@@ -177,7 +166,6 @@ CameraSource::CameraSource(const sp<Camera> &camera)
    mMeta->setInt32(kKeyHeight, height);
    mMeta->setInt32(kKeyStride, stride);
    mMeta->setInt32(kKeySliceHeight, sliceHeight);

}

CameraSource::~CameraSource() {
@@ -186,6 +174,10 @@ CameraSource::~CameraSource() {
    }
}

void CameraSource::startCameraRecording() {
    CHECK_EQ(OK, mCamera->startRecording());
}

status_t CameraSource::start(MetaData *meta) {
    CHECK(!mStarted);

@@ -203,13 +195,17 @@ status_t CameraSource::start(MetaData *meta) {

    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    mCamera->setListener(new CameraSourceListener(this));
    CHECK_EQ(OK, mCamera->startRecording());
    startCameraRecording();
    IPCThreadState::self()->restoreCallingIdentity(token);

    mStarted = true;
    return OK;
}

void CameraSource::stopCameraRecording() {
    mCamera->stopRecording();
}

status_t CameraSource::stop() {
    LOGV("stop");
    Mutex::Autolock autoLock(mLock);
@@ -218,7 +214,7 @@ status_t CameraSource::stop() {

    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    mCamera->setListener(NULL);
    mCamera->stopRecording();
    stopCameraRecording();
    releaseQueuedFrames();
    while (!mFramesBeingEncoded.empty()) {
        LOGI("Waiting for outstanding frames being encoded: %d",
@@ -238,11 +234,15 @@ status_t CameraSource::stop() {
    return OK;
}

void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
    mCamera->releaseRecordingFrame(frame);
}

void CameraSource::releaseQueuedFrames() {
    List<sp<IMemory> >::iterator it;
    while (!mFramesReceived.empty()) {
        it = mFramesReceived.begin();
        mCamera->releaseRecordingFrame(*it);
        releaseRecordingFrame(*it);
        mFramesReceived.erase(it);
        ++mNumFramesDropped;
    }
@@ -254,7 +254,7 @@ sp<MetaData> CameraSource::getFormat() {

void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    mCamera->releaseRecordingFrame(frame);
    releaseRecordingFrame(frame);
    IPCThreadState::self()->restoreCallingIdentity(token);
}

@@ -263,7 +263,6 @@ void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
         it != mFramesBeingEncoded.end(); ++it) {
        if ((*it)->pointer() ==  buffer->data()) {

            releaseOneRecordingFrame((*it));
            mFramesBeingEncoded.erase(it);
            ++mNumFramesEncoded;
@@ -332,33 +331,11 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
        ++mNumGlitches;
    }

    // time lapse
    if(mTimeBetweenTimeLapseFrameCaptureUs >= 0) {
        if(mLastTimeLapseFrameRealTimestampUs == 0) {
            // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
            // to current time (timestampUs) and save frame data.
            LOGV("dataCallbackTimestamp timelapse: initial frame");

            mLastTimeLapseFrameRealTimestampUs = timestampUs;
        } else if (timestampUs <
                (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
            // Skip all frames from last encoded frame until
            // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
            // Tell the camera to release its recording frame and return.
            LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");

    // May need to skip frame or modify timestamp. Currently implemented
    // by the subclass CameraSourceTimeLapse.
    if(skipCurrentFrame(timestampUs)) {
        releaseOneRecordingFrame(data);
        return;
        } else {
            // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
            // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
            // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
            // of the last encoded frame's time stamp.
            LOGV("dataCallbackTimestamp timelapse: got timelapse frame");

            mLastTimeLapseFrameRealTimestampUs = timestampUs;
            timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
        }
    }

    mLastFrameTimestampUs = timestampUs;
Loading