Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit dfb1dd6a authored by James Dong's avatar James Dong
Browse files

Avoid copying for input recording frames in CameraSource

This is the part one.

- Let CameraSource be a MediaBufferObserver. It releases the recording frame when the ref count of a MediaBuffer containing
  the recording frame drops to 0.

  This reduces the CPU load from 90+% down to 50-60%.

Part two is related to the avoidance of copying the input video frames to the video encoder.
However, we are not able to use OMX_UseBuffer directly. Still work on the second part.

Change-Id: I906f1d054ae8bdcf82e1617f1fc120152f2eb2eb
parent c0046aab
Loading
Loading
Loading
Loading
+6 −4
Original line number Diff line number Diff line
@@ -19,7 +19,6 @@
#define CAMERA_SOURCE_H_

#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaSource.h>
#include <utils/List.h>
#include <utils/RefBase.h>
@@ -31,7 +30,7 @@ class ICamera;
class IMemory;
class Camera;

class CameraSource : public MediaSource {
class CameraSource : public MediaSource, public MediaBufferObserver {
public:
    static CameraSource *Create();
    static CameraSource *CreateFromCamera(const sp<Camera> &camera);
@@ -46,6 +45,8 @@ public:
    virtual status_t read(
            MediaBuffer **buffer, const ReadOptions *options = NULL);

    virtual void signalBufferReturned(MediaBuffer* buffer);

private:
    friend class CameraSourceListener;

@@ -53,7 +54,9 @@ private:

    Mutex mLock;
    Condition mFrameAvailableCondition;
    List<sp<IMemory> > mFrames;
    Condition mFrameCompleteCondition;
    List<sp<IMemory> > mFramesReceived;
    List<sp<IMemory> > mFramesBeingEncoded;
    List<int64_t> mFrameTimes;

    int mWidth, mHeight;
@@ -62,7 +65,6 @@ private:
    int32_t mNumFramesReceived;
    int32_t mNumFramesEncoded;
    int32_t mNumFramesDropped;
    MediaBufferGroup *mBufferGroup;
    bool mStarted;

    CameraSource(const sp<Camera> &camera);
+36 −25
Original line number Diff line number Diff line
@@ -104,7 +104,6 @@ CameraSource::CameraSource(const sp<Camera> &camera)
      mNumFramesReceived(0),
      mNumFramesEncoded(0),
      mNumFramesDropped(0),
      mBufferGroup(NULL),
      mStarted(false) {
    String8 s = mCamera->getParameters();
    printf("params: \"%s\"\n", s.string());
@@ -139,8 +138,12 @@ status_t CameraSource::stop() {
    mCamera->stopRecording();

    releaseQueuedFrames();
    delete mBufferGroup;
    mBufferGroup = NULL;

    while (!mFramesBeingEncoded.empty()) {
        LOGI("Number of outstanding frames is being encoded: %d", mFramesBeingEncoded.size());
        mFrameCompleteCondition.wait(mLock);
    }

    LOGI("Frames received/encoded/dropped: %d/%d/%d, timestamp (us) last/first: %lld/%lld",
            mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
            mLastFrameTimestampUs, mFirstFrameTimeUs);
@@ -151,10 +154,10 @@ status_t CameraSource::stop() {

void CameraSource::releaseQueuedFrames() {
    List<sp<IMemory> >::iterator it;
    while (!mFrames.empty()) {
        it = mFrames.begin();
    while (!mFramesReceived.empty()) {
        it = mFramesReceived.begin();
        mCamera->releaseRecordingFrame(*it);
        mFrames.erase(it);
        mFramesReceived.erase(it);
        ++mNumFramesDropped;
    }
}
@@ -169,6 +172,23 @@ sp<MetaData> CameraSource::getFormat() {
    return meta;
}

void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
    LOGV("signalBufferReturned: %p", buffer->data());
    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
         it != mFramesBeingEncoded.end(); ++it) {
        if ((*it)->pointer() ==  buffer->data()) {
            mCamera->releaseRecordingFrame((*it));
            mFramesBeingEncoded.erase(it);
            ++mNumFramesEncoded;
            buffer->setObserver(0);
            buffer->release();
            mFrameCompleteCondition.signal();
            return;
        }
    }
    CHECK_EQ(0, "signalBufferReturned: bogus buffer");
}

status_t CameraSource::read(
        MediaBuffer **buffer, const ReadOptions *options) {
    LOGV("read");
@@ -185,33 +205,24 @@ status_t CameraSource::read(

    {
        Mutex::Autolock autoLock(mLock);
        while (mStarted && mFrames.empty()) {
        while (mStarted && mFramesReceived.empty()) {
            mFrameAvailableCondition.wait(mLock);
        }
        if (!mStarted) {
            return OK;
        }
        frame = *mFrames.begin();
        mFrames.erase(mFrames.begin());
        frame = *mFramesReceived.begin();
        mFramesReceived.erase(mFramesReceived.begin());

        frameTime = *mFrameTimes.begin();
        mFrameTimes.erase(mFrameTimes.begin());
        ++mNumFramesEncoded;
    }
    if (mBufferGroup == NULL) {
        mBufferGroup = new MediaBufferGroup();
        CHECK(mBufferGroup != NULL);
        mBufferGroup->add_buffer(new MediaBuffer(frame->size()));
    }

    mBufferGroup->acquire_buffer(buffer);
    memcpy((*buffer)->data(), frame->pointer(), frame->size());
    (*buffer)->set_range(0, frame->size());
    mCamera->releaseRecordingFrame(frame);

    (*buffer)->meta_data()->clear();
        mFramesBeingEncoded.push_back(frame);
        *buffer = new MediaBuffer(frame->pointer(), frame->size());
        (*buffer)->setObserver(this);
        (*buffer)->add_ref();
        (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);

    }
    return OK;
}

@@ -232,7 +243,7 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
    }
    ++mNumFramesReceived;

    mFrames.push_back(data);
    mFramesReceived.push_back(data);
    mFrameTimes.push_back(timestampUs - mFirstFrameTimeUs);
    mFrameAvailableCondition.signal();
}