Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 6524b7e1 authored by Eino-Ville Talvala's avatar Eino-Ville Talvala Committed by Android (Google) Code Review
Browse files

Merge "Camera2: Factor out FrameProcessor." into jb-mr1-dev

parents 2f9a4410 a16733ee
Loading
Loading
Loading
Loading
+2 −1
Original line number Diff line number Diff line
@@ -12,7 +12,8 @@ LOCAL_SRC_FILES:= \
    Camera2Client.cpp \
    Camera2Device.cpp \
    camera2/CameraMetadata.cpp \
    camera2/Parameters.cpp
    camera2/Parameters.cpp \
    camera2/FrameProcessor.cpp

LOCAL_SHARED_LIBRARIES:= \
    libui \
+43 −188
Original line number Diff line number Diff line
@@ -393,8 +393,8 @@ status_t Camera2Client::connect(const sp<ICameraClient>& client) {

    mClientPid = getCallingPid();

    Mutex::Autolock iccl(mICameraClientLock);
    mCameraClient = client;
    mSharedCameraClient = client;

    SharedParameters::Lock l(mParameters);
    l.mParameters.state = Parameters::STOPPED;
@@ -433,10 +433,9 @@ status_t Camera2Client::unlock() {
    // TODO: Check for uninterruptable conditions

    if (mClientPid == getCallingPid()) {
        Mutex::Autolock iccl(mICameraClientLock);

        mClientPid = 0;
        mCameraClient.clear();
        mSharedCameraClient.clear();
        return OK;
    }

@@ -1457,22 +1456,25 @@ void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
            case Parameters::FOCUS_MODE_FIXED:
            default:
                if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) {
                    ALOGE("%s: Unexpected AF state change %d (ID %d) in focus mode %d",
                          __FUNCTION__, newState, triggerId, l.mParameters.focusMode);
                    ALOGE("%s: Unexpected AF state change %d "
                            "(ID %d) in focus mode %d",
                          __FUNCTION__, newState, triggerId,
                            l.mParameters.focusMode);
                }
        }
    }
    if (sendMovingMessage) {
        Mutex::Autolock iccl(mICameraClientLock);
        if (mCameraClient != 0) {
            mCameraClient->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
        SharedCameraClient::Lock l(mSharedCameraClient);
        if (l.mCameraClient != 0) {
            l.mCameraClient->notifyCallback(CAMERA_MSG_FOCUS_MOVE,
                    afInMotion ? 1 : 0, 0);
        }
    }
    if (sendCompletedMessage) {
        Mutex::Autolock iccl(mICameraClientLock);
        if (mCameraClient != 0) {
            mCameraClient->notifyCallback(CAMERA_MSG_FOCUS, success ? 1 : 0, 0);
        SharedCameraClient::Lock l(mSharedCameraClient);
        if (l.mCameraClient != 0) {
            l.mCameraClient->notifyCallback(CAMERA_MSG_FOCUS,
                    success ? 1 : 0, 0);
        }
    }
}
@@ -1487,185 +1489,38 @@ void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
            __FUNCTION__, newState, triggerId);
}

Camera2Client::FrameProcessor::FrameProcessor(wp<Camera2Client> client):
        Thread(false), mClient(client) {
}

Camera2Client::FrameProcessor::~FrameProcessor() {
    ALOGV("%s: Exit", __FUNCTION__);
}

void Camera2Client::FrameProcessor::dump(int fd, const Vector<String16>& args) {
    String8 result("    Latest received frame:\n");
    write(fd, result.string(), result.size());
    mLastFrame.dump(fd, 2, 6);
}

bool Camera2Client::FrameProcessor::threadLoop() {
    status_t res;

    sp<Camera2Device> device;
    {
        sp<Camera2Client> client = mClient.promote();
        if (client == 0) return false;
        device = client->mDevice;
    }

    res = device->waitForNextFrame(kWaitDuration);
    if (res == OK) {
        sp<Camera2Client> client = mClient.promote();
        if (client == 0) return false;
        processNewFrames(client);
    } else if (res != TIMED_OUT) {
        ALOGE("Camera2Client::FrameProcessor: Error waiting for new "
                "frames: %s (%d)", strerror(-res), res);
    }

    return true;
}

void Camera2Client::FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
    status_t res;
    CameraMetadata frame;
    while ( (res = client->mDevice->getNextFrame(&frame)) == OK) {
        camera_metadata_entry_t entry;
        entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
        if (entry.count == 0) {
            ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
                    __FUNCTION__, client->mCameraId, strerror(-res), res);
            break;
        }

        res = processFaceDetect(frame, client);
        if (res != OK) break;

        mLastFrame.acquire(frame);
    }
    if (res != NOT_ENOUGH_DATA) {
        ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
                __FUNCTION__, client->mCameraId, strerror(-res), res);
        return;
    }

    return;
int Camera2Client::getCameraId() {
    return mCameraId;
}

status_t Camera2Client::FrameProcessor::processFaceDetect(
    const CameraMetadata &frame, sp<Camera2Client> &client) {
    status_t res;
    camera_metadata_ro_entry_t entry;
    bool enableFaceDetect;
    int maxFaces;
    {
        SharedParameters::Lock l(client->mParameters);
        enableFaceDetect = l.mParameters.enableFaceDetect;
    }
    entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE);

    // TODO: This should be an error once implementations are compliant
    if (entry.count == 0) {
        return OK;
    }

    uint8_t faceDetectMode = entry.data.u8[0];

    camera_frame_metadata metadata;
    Vector<camera_face_t> faces;
    metadata.number_of_faces = 0;

    if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) {
        SharedParameters::Lock l(client->mParameters);
        entry = frame.find(ANDROID_STATS_FACE_RECTANGLES);
        if (entry.count == 0) {
            ALOGE("%s: Camera %d: Unable to read face rectangles",
                    __FUNCTION__, client->mCameraId);
            return res;
        }
        metadata.number_of_faces = entry.count / 4;
        if (metadata.number_of_faces >
                l.mParameters.fastInfo.maxFaces) {
            ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
                    __FUNCTION__, client->mCameraId,
                    metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
            return res;
const sp<Camera2Device>& Camera2Client::getCameraDevice() {
    return mDevice;
}
        const int32_t *faceRects = entry.data.i32;

        entry = frame.find(ANDROID_STATS_FACE_SCORES);
        if (entry.count == 0) {
            ALOGE("%s: Camera %d: Unable to read face scores",
                    __FUNCTION__, client->mCameraId);
            return res;
camera2::SharedParameters& Camera2Client::getParameters() {
    return mParameters;
}
        const uint8_t *faceScores = entry.data.u8;

        const int32_t *faceLandmarks = NULL;
        const int32_t *faceIds = NULL;

        if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
            entry = frame.find(ANDROID_STATS_FACE_LANDMARKS);
            if (entry.count == 0) {
                ALOGE("%s: Camera %d: Unable to read face landmarks",
                        __FUNCTION__, client->mCameraId);
                return res;
Camera2Client::SharedCameraClient::Lock::Lock(SharedCameraClient &client):
        mCameraClient(client.mCameraClient),
        mSharedClient(client) {
    mSharedClient.mCameraClientLock.lock();
}
            faceLandmarks = entry.data.i32;

            entry = frame.find(ANDROID_STATS_FACE_IDS);

            if (entry.count == 0) {
                ALOGE("%s: Camera %d: Unable to read face IDs",
                        __FUNCTION__, client->mCameraId);
                return res;
            }
            faceIds = entry.data.i32;
        }

        faces.setCapacity(metadata.number_of_faces);

        for (int i = 0; i < metadata.number_of_faces; i++) {
            camera_face_t face;

            face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
            face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
            face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
            face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);

            face.score = faceScores[i];
            if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
                face.id = faceIds[i];
                face.left_eye[0] =
                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
                face.left_eye[1] =
                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
                face.right_eye[0] =
                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
                face.right_eye[1] =
                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
                face.mouth[0] =
                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
                face.mouth[1] =
                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
            } else {
                face.id = 0;
                face.left_eye[0] = face.left_eye[1] = -2000;
                face.right_eye[0] = face.right_eye[1] = -2000;
                face.mouth[0] = face.mouth[1] = -2000;
            }
            faces.push_back(face);
Camera2Client::SharedCameraClient::Lock::~Lock() {
    mSharedClient.mCameraClientLock.unlock();
}

        metadata.faces = faces.editArray();
Camera2Client::SharedCameraClient& Camera2Client::SharedCameraClient::operator=(
        const sp<ICameraClient>&client) {
    Mutex::Autolock l(mCameraClientLock);
    mCameraClient = client;
    return *this;
}

    if (metadata.number_of_faces != 0) {
        Mutex::Autolock iccl(client->mICameraClientLock);
        if (client->mCameraClient != NULL) {
            client->mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
                    NULL, &metadata);
        }
    }
    return OK;
void Camera2Client::SharedCameraClient::clear() {
    Mutex::Autolock l(mCameraClientLock);
    mCameraClient.clear();
}

void Camera2Client::onCallbackAvailable() {
@@ -1777,11 +1632,11 @@ void Camera2Client::onCallbackAvailable() {

    // Call outside parameter lock to allow re-entrancy from notification
    {
        Mutex::Autolock iccl(mICameraClientLock);
        if (mCameraClient != 0) {
        SharedCameraClient::Lock l(mSharedCameraClient);
        if (l.mCameraClient != 0) {
            ALOGV("%s: Camera %d: Invoking client data callback",
                    __FUNCTION__, mCameraId);
            mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_FRAME,
            l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_FRAME,
                    callbackHeap->mBuffers[heapIdx], NULL);
        }
    }
@@ -1853,9 +1708,9 @@ void Camera2Client::onCaptureAvailable() {
        captureHeap = mCaptureHeap;
    }
    // Call outside parameter locks to allow re-entrancy from notification
    Mutex::Autolock iccl(mICameraClientLock);
    if (mCameraClient != 0) {
        mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE,
    SharedCameraClient::Lock l(mSharedCameraClient);
    if (l.mCameraClient != 0) {
        l.mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE,
                captureHeap->mBuffers[0], NULL);
    }
}
@@ -1951,9 +1806,9 @@ void Camera2Client::onRecordingFrameAvailable() {
    }

    // Call outside locked parameters to allow re-entrancy from notification
    Mutex::Autolock iccl(mICameraClientLock);
    if (mCameraClient != 0) {
        mCameraClient->dataCallbackTimestamp(timestamp,
    SharedCameraClient::Lock l(mSharedCameraClient);
    if (l.mCameraClient != 0) {
        l.mCameraClient->dataCallbackTimestamp(timestamp,
                CAMERA_MSG_VIDEO_FRAME,
                recordingHeap->mBuffers[heapIdx]);
    }
+28 −28
Original line number Diff line number Diff line
@@ -20,6 +20,7 @@
#include "Camera2Device.h"
#include "CameraService.h"
#include "camera2/Parameters.h"
#include "camera2/FrameProcessor.h"
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <gui/CpuConsumer.h>
@@ -82,6 +83,32 @@ public:
    virtual void notifyAutoExposure(uint8_t newState, int triggerId);
    virtual void notifyAutoWhitebalance(uint8_t newState, int triggerId);

    // Interface used by independent components of Camera2Client.

    int getCameraId();
    const sp<Camera2Device>& getCameraDevice();
    camera2::SharedParameters& getParameters();

    // Simple class to ensure that access to ICameraClient is serialized by
    // requiring mCameraClientLock to be locked before access to mCameraClient
    // is possible.
    class SharedCameraClient {
      public:
        class Lock {
          public:
            Lock(SharedCameraClient &client);
            ~Lock();
            sp<ICameraClient> &mCameraClient;
          private:
            SharedCameraClient &mSharedClient;
        };
        SharedCameraClient& operator=(const sp<ICameraClient>& client);
        void clear();
      private:
        sp<ICameraClient> mCameraClient;
        mutable Mutex mCameraClientLock;
    } mSharedCameraClient;

private:
    /** ICamera interface-related private members */

@@ -91,11 +118,6 @@ private:
    // they're called
    mutable Mutex mICameraLock;

    // Mutex that must be locked by methods accessing the base Client's
    // mCameraClient ICameraClient interface member, for sending notifications
    // up to the camera user
    mutable Mutex mICameraClientLock;

    typedef camera2::Parameters Parameters;
    typedef camera2::CameraMetadata CameraMetadata;

@@ -131,29 +153,7 @@ private:
    // Used with stream IDs
    static const int NO_STREAM = -1;

    /* Output frame metadata processing thread.  This thread waits for new
     * frames from the device, and analyzes them as necessary.
     */
    class FrameProcessor: public Thread {
      public:
        FrameProcessor(wp<Camera2Client> client);
        ~FrameProcessor();

        void dump(int fd, const Vector<String16>& args);
      private:
        static const nsecs_t kWaitDuration = 10000000; // 10 ms
        wp<Camera2Client> mClient;

        virtual bool threadLoop();

        void processNewFrames(sp<Camera2Client> &client);
        status_t processFaceDetect(const CameraMetadata &frame,
                sp<Camera2Client> &client);

        CameraMetadata mLastFrame;
    };

    sp<FrameProcessor> mFrameProcessor;
    sp<camera2::FrameProcessor> mFrameProcessor;

    /* Preview related members */

+214 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2012 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#define LOG_TAG "Camera2Client::FrameProcessor"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0

#include <utils/Log.h>
#include <utils/Trace.h>

#include "FrameProcessor.h"
#include "../Camera2Device.h"
#include "../Camera2Client.h"

namespace android {
namespace camera2 {

FrameProcessor::FrameProcessor(wp<Camera2Client> client):
        Thread(false), mClient(client) {
}

FrameProcessor::~FrameProcessor() {
    ALOGV("%s: Exit", __FUNCTION__);
}

void FrameProcessor::dump(int fd, const Vector<String16>& args) {
    String8 result("    Latest received frame:\n");
    write(fd, result.string(), result.size());
    mLastFrame.dump(fd, 2, 6);
}

bool FrameProcessor::threadLoop() {
    status_t res;

    sp<Camera2Device> device;
    {
        sp<Camera2Client> client = mClient.promote();
        if (client == 0) return false;
        device = client->getCameraDevice();
    }

    res = device->waitForNextFrame(kWaitDuration);
    if (res == OK) {
        sp<Camera2Client> client = mClient.promote();
        if (client == 0) return false;
        processNewFrames(client);
    } else if (res != TIMED_OUT) {
        ALOGE("Camera2Client::FrameProcessor: Error waiting for new "
                "frames: %s (%d)", strerror(-res), res);
    }

    return true;
}

void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
    status_t res;
    CameraMetadata frame;
    while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) {
        camera_metadata_entry_t entry;
        entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
        if (entry.count == 0) {
            ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
                    __FUNCTION__, client->getCameraId(), strerror(-res), res);
            break;
        }

        res = processFaceDetect(frame, client);
        if (res != OK) break;

        mLastFrame.acquire(frame);
    }
    if (res != NOT_ENOUGH_DATA) {
        ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
                __FUNCTION__, client->getCameraId(), strerror(-res), res);
        return;
    }

    return;
}

status_t FrameProcessor::processFaceDetect(
    const CameraMetadata &frame, sp<Camera2Client> &client) {
    status_t res;
    camera_metadata_ro_entry_t entry;
    bool enableFaceDetect;
    int maxFaces;
    {
        SharedParameters::Lock l(client->getParameters());
        enableFaceDetect = l.mParameters.enableFaceDetect;
    }
    entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE);

    // TODO: This should be an error once implementations are compliant
    if (entry.count == 0) {
        return OK;
    }

    uint8_t faceDetectMode = entry.data.u8[0];

    camera_frame_metadata metadata;
    Vector<camera_face_t> faces;
    metadata.number_of_faces = 0;

    if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) {
        SharedParameters::Lock l(client->getParameters());
        entry = frame.find(ANDROID_STATS_FACE_RECTANGLES);
        if (entry.count == 0) {
            ALOGE("%s: Camera %d: Unable to read face rectangles",
                    __FUNCTION__, client->getCameraId());
            return res;
        }
        metadata.number_of_faces = entry.count / 4;
        if (metadata.number_of_faces >
                l.mParameters.fastInfo.maxFaces) {
            ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
                    __FUNCTION__, client->getCameraId(),
                    metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
            return res;
        }
        const int32_t *faceRects = entry.data.i32;

        entry = frame.find(ANDROID_STATS_FACE_SCORES);
        if (entry.count == 0) {
            ALOGE("%s: Camera %d: Unable to read face scores",
                    __FUNCTION__, client->getCameraId());
            return res;
        }
        const uint8_t *faceScores = entry.data.u8;

        const int32_t *faceLandmarks = NULL;
        const int32_t *faceIds = NULL;

        if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
            entry = frame.find(ANDROID_STATS_FACE_LANDMARKS);
            if (entry.count == 0) {
                ALOGE("%s: Camera %d: Unable to read face landmarks",
                        __FUNCTION__, client->getCameraId());
                return res;
            }
            faceLandmarks = entry.data.i32;

            entry = frame.find(ANDROID_STATS_FACE_IDS);

            if (entry.count == 0) {
                ALOGE("%s: Camera %d: Unable to read face IDs",
                        __FUNCTION__, client->getCameraId());
                return res;
            }
            faceIds = entry.data.i32;
        }

        faces.setCapacity(metadata.number_of_faces);

        for (int i = 0; i < metadata.number_of_faces; i++) {
            camera_face_t face;

            face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
            face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
            face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
            face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);

            face.score = faceScores[i];
            if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
                face.id = faceIds[i];
                face.left_eye[0] =
                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
                face.left_eye[1] =
                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
                face.right_eye[0] =
                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
                face.right_eye[1] =
                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
                face.mouth[0] =
                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
                face.mouth[1] =
                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
            } else {
                face.id = 0;
                face.left_eye[0] = face.left_eye[1] = -2000;
                face.right_eye[0] = face.right_eye[1] = -2000;
                face.mouth[0] = face.mouth[1] = -2000;
            }
            faces.push_back(face);
        }

        metadata.faces = faces.editArray();
    }

    if (metadata.number_of_faces != 0) {
        Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient);
        if (l.mCameraClient != NULL) {
            l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
                    NULL, &metadata);
        }
    }
    return OK;
}


}; // namespace camera2
}; // namespace android
+57 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2012 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H
#define ANDROID_SERVERS_CAMERA_CAMERA2_FRAMEPROCESSOR_H

#include <utils/Thread.h>
#include <utils/String16.h>
#include <utils/Vector.h>
#include "CameraMetadata.h"

namespace android {

class Camera2Client;

namespace camera2 {

/* Output frame metadata processing thread.  This thread waits for new
 * frames from the device, and analyzes them as necessary.
 */
class FrameProcessor: public Thread {
  public:
    FrameProcessor(wp<Camera2Client> client);
    ~FrameProcessor();

    void dump(int fd, const Vector<String16>& args);
  private:
    static const nsecs_t kWaitDuration = 10000000; // 10 ms
    wp<Camera2Client> mClient;

    virtual bool threadLoop();

    void processNewFrames(sp<Camera2Client> &client);
    status_t processFaceDetect(const CameraMetadata &frame,
            sp<Camera2Client> &client);

    CameraMetadata mLastFrame;
};


}; //namespace camera2
}; //namespace android

#endif