Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e4adddbd authored by Shuzhen Wang's avatar Shuzhen Wang
Browse files

Camera: Add PreviewFrameScheduler to address preview jitter

The ideal viewfinder user experience is that frames are presented to the
user in the same cadence as outputed by the camera sensor. However, the
processing latency between frames could vary, due to factors such
as CPU load, difference in request settings, etc. This frame processing
latency results in variation in presentation of frames to the user.

Improve the user experience by:

1. Cache preview buffers in PreviewFrameScheduler.
2. For each choreographer callback, queue the oldest preview buffer,
   with the best matching presentation timestamp. Frame N's
   presentation timestamp is the choreographer timeline timestamp closest to
   (Frame N-1's presentation time + capture interval between frame N-1 and N).
3. Maintain at most 2 queue-able buffers. If a 3rd preview buffer becomes
   available, queue it to the buffer queue right away.

Test: Run GoogleCamera video mode and observe smoother viewfinder
Test: Observe surfaceflinger trace when running viewfinder
Test: Camera CTS
Bug: 200306379
Change-Id: I791c841aaded2acd112de8f7e99a131443b21e11
parent 74377e47
Loading
Loading
Loading
Loading
+2 −0
Original line number Diff line number Diff line
@@ -84,6 +84,7 @@ cc_library_shared {
        "device3/Camera3OutputUtils.cpp",
        "device3/Camera3DeviceInjectionMethods.cpp",
        "device3/UHRCropAndMeteringRegionMapper.cpp",
        "device3/PreviewFrameScheduler.cpp",
        "gui/RingBufferConsumer.cpp",
        "hidl/AidlCameraDeviceCallbacks.cpp",
        "hidl/AidlCameraServiceListener.cpp",
@@ -107,6 +108,7 @@ cc_library_shared {
    ],

    shared_libs: [
        "libandroid",
        "libbase",
        "libdl",
        "libexif",
+45 −20
Original line number Diff line number Diff line
@@ -22,6 +22,7 @@
#include <fstream>

#include <android-base/unique_fd.h>
#include <cutils/properties.h>
#include <ui/GraphicBuffer.h>
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -347,33 +348,40 @@ status_t Camera3OutputStream::returnBufferCheckedLocked(
            mTraceFirstBuffer = false;
        }

        if (transform != -1) {
            setTransformLocked(transform);
        // If this is a JPEG output, and image dump mask is set, save image to
        // disk.
        if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF &&
                mImageDumpMask) {
            dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
        }

        /* Certain consumers (such as AudioSource or HardwareComposer) use
         * MONOTONIC time, causing time misalignment if camera timestamp is
         * in BOOTTIME. Do the conversion if necessary. */
        res = native_window_set_buffers_timestamp(mConsumer.get(),
                mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp);
        nsecs_t adjustedTs = mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp;
        if (mPreviewFrameScheduler != nullptr) {
            res = mPreviewFrameScheduler->queuePreviewBuffer(adjustedTs, transform,
                    anwBuffer, anwReleaseFence);
            if (res != OK) {
            ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
                ALOGE("%s: Stream %d: Error queuing buffer to preview buffer scheduler: %s (%d)",
                        __FUNCTION__, mId, strerror(-res), res);
                return res;
            }
        // If this is a JPEG output, and image dump mask is set, save image to
        // disk.
        if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF &&
                mImageDumpMask) {
            dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
        } else {
            setTransform(transform);
            res = native_window_set_buffers_timestamp(mConsumer.get(), adjustedTs);
            if (res != OK) {
                ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
                      __FUNCTION__, mId, strerror(-res), res);
                return res;
            }

            res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
            if (shouldLogError(res, state)) {
                ALOGE("%s: Stream %d: Error queueing buffer to native window:"
                      " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
            }
        }
    }
    mLock.lock();

    // Once a valid buffer has been returned to the queue, can no longer
@@ -412,6 +420,9 @@ status_t Camera3OutputStream::setTransform(int transform) {

status_t Camera3OutputStream::setTransformLocked(int transform) {
    status_t res = OK;

    if (transform == -1) return res;

    if (mState == STATE_ERROR) {
        ALOGE("%s: Stream in error state", __FUNCTION__);
        return INVALID_OPERATION;
@@ -437,7 +448,7 @@ status_t Camera3OutputStream::configureQueueLocked() {
        return res;
    }

    if ((res = configureConsumerQueueLocked()) != OK) {
    if ((res = configureConsumerQueueLocked(true /*allowPreviewScheduler*/)) != OK) {
        return res;
    }

@@ -461,7 +472,7 @@ status_t Camera3OutputStream::configureQueueLocked() {
    return OK;
}

status_t Camera3OutputStream::configureConsumerQueueLocked() {
status_t Camera3OutputStream::configureConsumerQueueLocked(bool allowPreviewScheduler) {
    status_t res;

    mTraceFirstBuffer = true;
@@ -547,6 +558,15 @@ status_t Camera3OutputStream::configureConsumerQueueLocked() {
    }

    mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
    if (allowPreviewScheduler && isConsumedByHWComposer()) {
        // We cannot distinguish between a SurfaceView and an ImageReader of
        // preview buffer format. The PreviewFrameScheduler needs to handle both.
        if (!property_get_bool("camera.disable_preview_scheduler", false)) {
            mPreviewFrameScheduler = std::make_unique<PreviewFrameScheduler>(*this, mConsumer);
            mTotalBufferCount += PreviewFrameScheduler::kQueueDepthWatermark;
        }
    }

    mHandoutTotalBufferCount = 0;
    mFrameCount = 0;
    mLastTimestamp = 0;
@@ -1185,6 +1205,11 @@ void Camera3OutputStream::returnPrefetchedBuffersLocked() {
    }
}

bool Camera3OutputStream::shouldLogError(status_t res) {
    Mutex::Autolock l(mLock);
    return shouldLogError(res, mState);
}

}; // namespace camera3

}; // namespace android
+5 −1
Original line number Diff line number Diff line
@@ -27,6 +27,7 @@
#include "Camera3IOStreamBase.h"
#include "Camera3OutputStreamInterface.h"
#include "Camera3BufferManager.h"
#include "PreviewFrameScheduler.h"

namespace android {

@@ -229,6 +230,7 @@ class Camera3OutputStream :
    static void applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/);

    void setImageDumpMask(int mask) { mImageDumpMask = mask; }
    bool shouldLogError(status_t res);

  protected:
    Camera3OutputStream(int id, camera_stream_type_t type,
@@ -255,7 +257,7 @@ class Camera3OutputStream :

    status_t getEndpointUsageForSurface(uint64_t *usage,
            const sp<Surface>& surface) const;
    status_t configureConsumerQueueLocked();
    status_t configureConsumerQueueLocked(bool allowPreviewScheduler);

    // Consumer as the output of camera HAL
    sp<Surface> mConsumer;
@@ -370,6 +372,8 @@ class Camera3OutputStream :

    int mImageDumpMask = 0;

    // The preview stream scheduler for re-timing frames
    std::unique_ptr<PreviewFrameScheduler> mPreviewFrameScheduler;
}; // class Camera3OutputStream

} // namespace camera3
+1 −1
Original line number Diff line number Diff line
@@ -247,7 +247,7 @@ status_t Camera3SharedOutputStream::configureQueueLocked() {
        return res;
    }

    res = configureConsumerQueueLocked();
    res = configureConsumerQueueLocked(false/*allowPreviewScheduler*/);
    if (res != OK) {
        ALOGE("Failed to configureConsumerQueueLocked: %s(%d)", strerror(-res), res);
        return res;
+210 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2021 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#define LOG_TAG "Camera3-PreviewFrameScheduler"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0

#include <utils/Log.h>
#include <utils/Trace.h>

#include <android/looper.h>
#include "PreviewFrameScheduler.h"
#include "Camera3OutputStream.h"

namespace android {

namespace camera3 {

/**
 * Internal Choreographer thread implementation for polling and handling callbacks
 */

// Callback function for Choreographer
static void frameCallback(const AChoreographerFrameCallbackData* callbackData, void* data) {
    PreviewFrameScheduler* parent = static_cast<PreviewFrameScheduler*>(data);
    if (parent == nullptr) {
        ALOGE("%s: Invalid data for Choreographer callback!", __FUNCTION__);
        return;
    }

    size_t length = AChoreographerFrameCallbackData_getFrameTimelinesLength(callbackData);
    std::vector<nsecs_t> timeline(length);
    for (size_t i = 0; i < length; i++) {
        nsecs_t timestamp = AChoreographerFrameCallbackData_getFrameTimelineExpectedPresentTime(
                callbackData, i);
        timeline[i] = timestamp;
    }

    parent->onNewPresentationTime(timeline);

    AChoreographer_postExtendedFrameCallback(AChoreographer_getInstance(), frameCallback, data);
}

struct ChoreographerThread : public Thread {
    ChoreographerThread();
    status_t start(PreviewFrameScheduler* parent);
    virtual status_t readyToRun() override;
    virtual bool threadLoop() override;

protected:
    virtual ~ChoreographerThread() {}

private:
    ChoreographerThread &operator=(const ChoreographerThread &);

    // This only impacts the shutdown time. It won't impact the choreographer
    // callback frequency.
    static constexpr nsecs_t kPollingTimeoutMs = 5;
    PreviewFrameScheduler* mParent = nullptr;
};

ChoreographerThread::ChoreographerThread() : Thread(false /*canCallJava*/) {
}

status_t ChoreographerThread::start(PreviewFrameScheduler* parent) {
    mParent = parent;
    return run("PreviewChoreographer");
}

status_t ChoreographerThread::readyToRun() {
    ALooper_prepare(ALOOPER_PREPARE_ALLOW_NON_CALLBACKS);
    if (AChoreographer_getInstance() == NULL) {
        return NO_INIT;
    }

    AChoreographer_postExtendedFrameCallback(
            AChoreographer_getInstance(), frameCallback, mParent);
    return OK;
}

bool ChoreographerThread::threadLoop() {
    if (exitPending()) {
        return false;
    }
    ALooper_pollOnce(kPollingTimeoutMs, nullptr, nullptr, nullptr);
    return true;
}

/**
 * PreviewFrameScheduler implementation
 */

PreviewFrameScheduler::PreviewFrameScheduler(Camera3OutputStream& parent, sp<Surface> consumer) :
        mParent(parent),
        mConsumer(consumer),
        mChoreographerThread(new ChoreographerThread()) {
}

PreviewFrameScheduler::~PreviewFrameScheduler() {
    {
        Mutex::Autolock l(mLock);
        mChoreographerThread->requestExit();
    }
    mChoreographerThread->join();
}

status_t PreviewFrameScheduler::queuePreviewBuffer(nsecs_t timestamp, int32_t transform,
        ANativeWindowBuffer* anwBuffer, int releaseFence) {
    // Start choreographer thread if it's not already running.
    if (!mChoreographerThread->isRunning()) {
        status_t res = mChoreographerThread->start(this);
        if (res != OK) {
            ALOGE("%s: Failed to init choreographer thread!", __FUNCTION__);
            return res;
        }
    }

    {
        Mutex::Autolock l(mLock);
        mPendingBuffers.emplace(timestamp, transform, anwBuffer, releaseFence);

        // Queue buffer to client right away if pending buffers are more than
        // the queue depth watermark.
        if (mPendingBuffers.size() > kQueueDepthWatermark) {
            auto oldBuffer = mPendingBuffers.front();
            mPendingBuffers.pop();

            status_t res = queueBufferToClientLocked(oldBuffer, oldBuffer.timestamp);
            if (res != OK) {
                return res;
            }

            // Reset the last capture and presentation time
            mLastCameraCaptureTime = 0;
            mLastCameraPresentTime = 0;
        } else {
            ATRACE_INT(kPendingBufferTraceName, mPendingBuffers.size());
        }
    }
    return OK;
}

void PreviewFrameScheduler::onNewPresentationTime(const std::vector<nsecs_t>& timeline) {
    ATRACE_CALL();
    Mutex::Autolock l(mLock);
    if (mPendingBuffers.size() > 0) {
        auto nextBuffer = mPendingBuffers.front();
        mPendingBuffers.pop();

        // Find the best presentation time by finding the element in the
        // choreographer timeline that's closest to the ideal presentation time.
        // The ideal presentation time is the last presentation time + frame
        // interval.
        nsecs_t cameraInterval = nextBuffer.timestamp - mLastCameraCaptureTime;
        nsecs_t idealPresentTime = (cameraInterval < kSpacingResetIntervalNs) ?
                (mLastCameraPresentTime + cameraInterval) : nextBuffer.timestamp;
        nsecs_t presentTime = *std::min_element(timeline.begin(), timeline.end(),
                [idealPresentTime](nsecs_t p1, nsecs_t p2) {
                        return std::abs(p1 - idealPresentTime) < std::abs(p2 - idealPresentTime);
                });

        status_t res = queueBufferToClientLocked(nextBuffer, presentTime);
        ATRACE_INT(kPendingBufferTraceName, mPendingBuffers.size());

        if (mParent.shouldLogError(res)) {
            ALOGE("%s: Preview Stream: Error queueing buffer to native window:"
                    " %s (%d)", __FUNCTION__, strerror(-res), res);
        }

        mLastCameraCaptureTime = nextBuffer.timestamp;
        mLastCameraPresentTime = presentTime;
    }
}

status_t PreviewFrameScheduler::queueBufferToClientLocked(
        const BufferHolder& bufferHolder, nsecs_t timestamp) {
    mParent.setTransform(bufferHolder.transform);

    status_t res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp);
    if (res != OK) {
        ALOGE("%s: Preview Stream: Error setting timestamp: %s (%d)",
                __FUNCTION__, strerror(-res), res);
        return res;
    }

    res = mConsumer->queueBuffer(mConsumer.get(), bufferHolder.anwBuffer.get(),
            bufferHolder.releaseFence);
    if (res != OK) {
        close(bufferHolder.releaseFence);
    }

    return res;
}

}; // namespace camera3

}; // namespace android
Loading