Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f0d962a6 authored by Zhijun He's avatar Zhijun He
Browse files

Camera3: fix ZSL processor3 issues

- Return input buffer in capture result. Per hal3.2 spec, we should return the
input buffer in process capture result rather than immediately after process
capture request.
- Make the depths of mZslQueue and mFrameList the same. It doesn't make sense
mFrameList depth is larger than mZslQueue depth.
- Set the depths of mZslQueue and mFrameList based on pipelineMaxDepth.
- Clear result queue while clearing zsl buffer queue.
- Hook up camera3 buffer listener with ZslProcessor3, make sure that adding the
same listener multiple times has no effect.
- Remove flush call in pushToReprocess, it is a guaranteed deadlock once
camera3 buffer listener is hooked up.

Change-Id: I285155ab4241e827145855d628f8e98b881c01d5
parent 954d248e
Loading
Loading
Loading
Loading
+61 −17
Original line number Diff line number Diff line
@@ -52,8 +52,31 @@ ZslProcessor3::ZslProcessor3(
        mFrameListHead(0),
        mZslQueueHead(0),
        mZslQueueTail(0) {
    mZslQueue.insertAt(0, kZslBufferDepth);
    mFrameList.insertAt(0, kFrameListDepth);
    // Initialize buffer queue and frame list based on pipeline max depth.
    size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
    if (client != 0) {
        sp<Camera3Device> device =
        static_cast<Camera3Device*>(client->getCameraDevice().get());
        if (device != 0) {
            camera_metadata_ro_entry_t entry =
                device->info().find(ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
            if (entry.count == 1) {
                pipelineMaxDepth = entry.data.u8[0];
            } else {
                ALOGW("%s: Unable to find the android.request.pipelineMaxDepth,"
                        " use default pipeline max depth %zu", __FUNCTION__,
                        kDefaultMaxPipelineDepth);
            }
        }
    }

    ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%d)",
          __FUNCTION__, pipelineMaxDepth);
    mBufferQueueDepth = pipelineMaxDepth + 1;
    mFrameListDepth = pipelineMaxDepth + 1;

    mZslQueue.insertAt(0, mBufferQueueDepth);
    mFrameList.insertAt(0, mFrameListDepth);
    sp<CaptureSequencer> captureSequencer = mSequencer.promote();
    if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
}
@@ -70,13 +93,25 @@ void ZslProcessor3::onResultAvailable(const CaptureResult &result) {
    camera_metadata_ro_entry_t entry;
    entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
    nsecs_t timestamp = entry.data.i64[0];
    if (entry.count == 0) {
        ALOGE("%s: metadata doesn't have timestamp, skip this result");
        return;
    }
    (void)timestamp;
    ALOGVV("Got preview metadata for timestamp %" PRId64, timestamp);

    entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
    if (entry.count == 0) {
        ALOGE("%s: metadata doesn't have frame number, skip this result");
        return;
    }
    int32_t frameNumber = entry.data.i32[0];

    ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);

    if (mState != RUNNING) return;

    mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
    mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
    mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
}

status_t ZslProcessor3::updateStream(const Parameters &params) {
@@ -136,7 +171,7 @@ status_t ZslProcessor3::updateStream(const Parameters &params) {
        // Note that format specified internally in Camera3ZslStream
        res = device->createZslStream(
                params.fastInfo.arrayWidth, params.fastInfo.arrayHeight,
                kZslBufferDepth,
                mBufferQueueDepth,
                &mZslStreamId,
                &mZslStream);
        if (res != OK) {
@@ -145,7 +180,11 @@ status_t ZslProcessor3::updateStream(const Parameters &params) {
                    strerror(-res), res);
            return res;
        }

        // Only add the camera3 buffer listener when the stream is created.
        mZslStream->addBufferListener(this);
    }

    client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
            Camera2Client::kPreviewRequestIdEnd,
            this,
@@ -277,15 +316,6 @@ status_t ZslProcessor3::pushToReprocess(int32_t requestId) {
            return INVALID_OPERATION;
        }

        // Flush device to clear out all in-flight requests pending in HAL.
        res = client->getCameraDevice()->flush();
        if (res != OK) {
            ALOGE("%s: Camera %d: Failed to flush device: "
                "%s (%d)",
                __FUNCTION__, client->getCameraId(), strerror(-res), res);
            return res;
        }

        // Update JPEG settings
        {
            SharedParameters::Lock l(client->getParameters());
@@ -323,11 +353,19 @@ status_t ZslProcessor3::clearZslQueue() {

status_t ZslProcessor3::clearZslQueueLocked() {
    if (mZslStream != 0) {
        // clear result metadata list first.
        clearZslResultQueueLocked();
        return mZslStream->clearInputRingBuffer();
    }
    return OK;
}

void ZslProcessor3::clearZslResultQueueLocked() {
    mFrameList.clear();
    mFrameListHead = 0;
    mFrameList.insertAt(0, mFrameListDepth);
}

void ZslProcessor3::dump(int fd, const Vector<String16>& /*args*/) const {
    Mutex::Autolock l(mInputMutex);
    if (!mLatestCapturedRequest.isEmpty()) {
@@ -481,11 +519,17 @@ void ZslProcessor3::onBufferReleased(const BufferInfo& bufferInfo) {
    // We need to guarantee that if we do two back-to-back captures,
    // the second won't use a buffer that's older/the same as the first, which
    // is theoretically possible if we don't clear out the queue and the
    // selection criteria is something like 'newest'. Clearing out the queue
    // on a completed capture ensures we'll only use new data.
    // selection criteria is something like 'newest'. Clearing out the result
    // metadata queue on a completed capture ensures we'll only use new timestamp.
    // Calling clearZslQueueLocked is a guaranteed deadlock because this callback
    // holds the Camera3Stream internal lock (mLock), and clearZslQueueLocked requires
    // to hold the same lock.
    // TODO: need figure out a way to clear the Zsl buffer queue properly. Right now
    // it is safe not to do so, as back to back ZSL capture requires stop and start
    // preview, which will flush ZSL queue automatically.
    ALOGV("%s: Memory optimization, clearing ZSL queue",
          __FUNCTION__);
    clearZslQueueLocked();
    clearZslResultQueueLocked();

    // Required so we accept more ZSL requests
    mState = RUNNING;
+5 −2
Original line number Diff line number Diff line
@@ -107,8 +107,9 @@ class ZslProcessor3 :
        CameraMetadata frame;
    };

    static const size_t kZslBufferDepth = 4;
    static const size_t kFrameListDepth = kZslBufferDepth * 2;
    static const int32_t kDefaultMaxPipelineDepth = 4;
    size_t mBufferQueueDepth;
    size_t mFrameListDepth;
    Vector<CameraMetadata> mFrameList;
    size_t mFrameListHead;

@@ -124,6 +125,8 @@ class ZslProcessor3 :

    status_t clearZslQueueLocked();

    void clearZslResultQueueLocked();

    void dumpZslQueue(int id) const;

    nsecs_t getCandidateTimestampLocked(size_t* metadataIdx) const;
+20 −18
Original line number Diff line number Diff line
@@ -1720,7 +1720,8 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
    status_t res;

    uint32_t frameNumber = result->frame_number;
    if (result->result == NULL && result->num_output_buffers == 0) {
    if (result->result == NULL && result->num_output_buffers == 0 &&
            result->input_buffer == NULL) {
        SET_ERR("No result data provided by HAL for frame %d",
                frameNumber);
        return;
@@ -1805,7 +1806,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
        }

        request.numBuffersLeft -= result->num_output_buffers;

        request.numBuffersLeft -= (result->input_buffer != NULL) ? 1 : 0;
        if (request.numBuffersLeft < 0) {
            SET_ERR("Too many buffers returned for frame %d",
                    frameNumber);
@@ -1906,6 +1907,19 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
        }
    }

    if (result->input_buffer != NULL) {
        Camera3Stream *stream =
            Camera3Stream::cast(result->input_buffer->stream);
        res = stream->returnInputBuffer(*(result->input_buffer));
        // Note: stream may be deallocated at this point, if this buffer was the
        // last reference to it.
        if (res != OK) {
            ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
                    "  its stream:%s (%d)",  __FUNCTION__,
                    frameNumber, strerror(-res), res);
        }
    }

    // Finally, signal any waiters for new frames

    if (gotResult) {
@@ -2318,6 +2332,7 @@ bool Camera3Device::RequestThread::threadLoop() {
    }

    camera3_stream_buffer_t inputBuffer;
    uint32_t totalNumBuffers = 0;

    // Fill in buffers

@@ -2330,6 +2345,7 @@ bool Camera3Device::RequestThread::threadLoop() {
            cleanUpFailedRequest(request, nextRequest, outputBuffers);
            return true;
        }
        totalNumBuffers += 1;
    } else {
        request.input_buffer = NULL;
    }
@@ -2348,6 +2364,7 @@ bool Camera3Device::RequestThread::threadLoop() {
        }
        request.num_output_buffers++;
    }
    totalNumBuffers += request.num_output_buffers;

    // Log request in the in-flight queue
    sp<Camera3Device> parent = mParent.promote();
@@ -2358,7 +2375,7 @@ bool Camera3Device::RequestThread::threadLoop() {
    }

    res = parent->registerInFlight(request.frame_number,
            request.num_output_buffers, nextRequest->mResultExtras);
            totalNumBuffers, nextRequest->mResultExtras);
    ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
           ", burstId = %" PRId32 ".",
            __FUNCTION__,
@@ -2414,21 +2431,6 @@ bool Camera3Device::RequestThread::threadLoop() {
    }
    mPrevTriggers = triggerCount;

    // Return input buffer back to framework
    if (request.input_buffer != NULL) {
        Camera3Stream *stream =
            Camera3Stream::cast(request.input_buffer->stream);
        res = stream->returnInputBuffer(*(request.input_buffer));
        // Note: stream may be deallocated at this point, if this buffer was the
        // last reference to it.
        if (res != OK) {
            ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
                    "  its stream:%s (%d)",  __FUNCTION__,
                    request.frame_number, strerror(-res), res);
            // TODO: Report error upstream
        }
    }

    return true;
}

+1 −1
Original line number Diff line number Diff line
@@ -501,7 +501,7 @@ class Camera3Device :
        // Set by process_capture_result call with valid metadata
        bool    haveResultMetadata;
        // Decremented by calls to process_capture_result with valid output
        // buffers
        // and input buffers
        int     numBuffersLeft;
        CaptureResultExtras resultExtras;

+12 −0
Original line number Diff line number Diff line
@@ -485,6 +485,18 @@ status_t Camera3Stream::returnInputBufferLocked(
void Camera3Stream::addBufferListener(
        wp<Camera3StreamBufferListener> listener) {
    Mutex::Autolock l(mLock);

    List<wp<Camera3StreamBufferListener> >::iterator it, end;
    for (it = mBufferListenerList.begin(), end = mBufferListenerList.end();
         it != end;
         ) {
        if (*it == listener) {
            ALOGE("%s: Try to add the same listener twice, ignoring...", __FUNCTION__);
            return;
        }
        it++;
    }

    mBufferListenerList.push_back(listener);
}

Loading