Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f0d6a48c authored by Wu-cheng Li's avatar Wu-cheng Li
Browse files

Add frame metadata parameter to camera data_callback.

bug:4460717
Change-Id: Ib47d7d7df20af8155a719f3dabefe030893bfebc
parent 2043b01b
Loading
Loading
Loading
Loading
+21 −17
Original line number Original line Diff line number Diff line
@@ -131,7 +131,7 @@ public class Camera {
    private static final int CAMERA_MSG_RAW_IMAGE        = 0x080;
    private static final int CAMERA_MSG_RAW_IMAGE        = 0x080;
    private static final int CAMERA_MSG_COMPRESSED_IMAGE = 0x100;
    private static final int CAMERA_MSG_COMPRESSED_IMAGE = 0x100;
    private static final int CAMERA_MSG_RAW_IMAGE_NOTIFY = 0x200;
    private static final int CAMERA_MSG_RAW_IMAGE_NOTIFY = 0x200;
    private static final int CAMERA_MSG_FACE             = 0x400;
    private static final int CAMERA_MSG_METADATA_FACE    = 0x400;
    private static final int CAMERA_MSG_ALL_MSGS         = 0x4FF;
    private static final int CAMERA_MSG_ALL_MSGS         = 0x4FF;


    private int mNativeContext; // accessed by native methods
    private int mNativeContext; // accessed by native methods
@@ -721,9 +721,9 @@ public class Camera {
                }
                }
                return;
                return;


            case CAMERA_MSG_FACE:
            case CAMERA_MSG_METADATA_FACE:
                if (mFaceListener != null) {
                if (mFaceListener != null) {
                    mFaceListener.onFaceDetection((FaceMetadata[])msg.obj, mCamera);
                    mFaceListener.onFaceDetection((Face[])msg.obj, mCamera);
                }
                }
                return;
                return;


@@ -1078,11 +1078,11 @@ public class Camera {
        /**
        /**
         * Notify the listener of the detected faces in the preview frame.
         * Notify the listener of the detected faces in the preview frame.
         *
         *
         * @param faceMetadata the face information. The list is sorted by the
         * @param faces the detected faces. The list is sorted by the score.
         *        score. The highest score is the first element.
         *              The highest score is the first element.
         * @param camera  the Camera service object
         * @param camera  the Camera service object
         */
         */
        void onFaceDetection(FaceMetadata[] faceMetadata, Camera camera);
        void onFaceDetection(Face[] faces, Camera camera);
    }
    }


    /**
    /**
@@ -1151,20 +1151,24 @@ public class Camera {
    private native final void _stopFaceDetection();
    private native final void _stopFaceDetection();


    /**
    /**
     * The information of a face.
     * The information of a face from camera face detection.
     *
     *
     * @hide
     * @hide
     */
     */
    public static class FaceMetadata {
    public static class Face {
        /**
        /**
         * Bounds of the face. (-1000, -1000) represents the top-left of the
         * Bounds of the face. (-1000, -1000) represents the top-left of the
         * camera field of view, and (1000, 1000) represents the bottom-right of
         * camera field of view, and (1000, 1000) represents the bottom-right of
         * the field of view. This is supported by both hardware and software
         * the field of view. The width and height cannot be 0 or negative. This
         * face detection.
         * is supported by both hardware and software face detection.
         *
         * <p>The direction is relative to the sensor orientation, that is, what
         * the sensor sees. The direction is not affected by the rotation or
         * mirroring of {@link #setDisplayOrientation(int)}.</p>
         *
         *
         * @see #startFaceDetection(int)
         * @see #startFaceDetection(int)
         */
         */
        Rect face;
        Rect rect;


        /**
        /**
         * The confidence level of the face. The range is 1 to 100. 100 is the
         * The confidence level of the face. The range is 1 to 100. 100 is the
@@ -1183,20 +1187,20 @@ public class Camera {
        int id;
        int id;


        /**
        /**
         * The coordinates of the center of the left eye. null if this is not
         * The coordinates of the center of the left eye. The range is -1000 to
         * supported.
         * 1000. null if this is not supported.
         */
         */
        Point leftEye;
        Point leftEye;


        /**
        /**
         * The coordinates of the center of the right eye. null if this is not
         * The coordinates of the center of the right eye. The range is -1000 to
         * supported.
         * 1000. null if this is not supported.
         */
         */
        Point rightEye;
        Point rightEye;


        /**
        /**
         * The coordinates of the center of the mouth. null if this is not
         * The coordinates of the center of the mouth. The range is -1000 to
         * supported.
         * 1000. null if this is not supported.
         */
         */
        Point mouth;
        Point mouth;
    }
    }
+3 −3
Original line number Original line Diff line number Diff line
@@ -38,7 +38,7 @@ struct fields_t {
    jfieldID    surfaceTexture;
    jfieldID    surfaceTexture;
    jfieldID    facing;
    jfieldID    facing;
    jfieldID    orientation;
    jfieldID    orientation;
    jfieldID    face_rectangle;
    jfieldID    face_rect;
    jfieldID    face_score;
    jfieldID    face_score;
    jfieldID    rect_left;
    jfieldID    rect_left;
    jfieldID    rect_top;
    jfieldID    rect_top;
@@ -859,8 +859,8 @@ int register_android_hardware_Camera(JNIEnv *env)
          ANDROID_GRAPHICS_SURFACETEXTURE_JNI_ID, "I", &fields.surfaceTexture },
          ANDROID_GRAPHICS_SURFACETEXTURE_JNI_ID, "I", &fields.surfaceTexture },
        { "android/hardware/Camera$CameraInfo", "facing",   "I", &fields.facing },
        { "android/hardware/Camera$CameraInfo", "facing",   "I", &fields.facing },
        { "android/hardware/Camera$CameraInfo", "orientation",   "I", &fields.orientation },
        { "android/hardware/Camera$CameraInfo", "orientation",   "I", &fields.orientation },
        { "android/hardware/Camera$FaceMetadata", "face", "Landroid/graphics/Rect;", &fields.face_rectangle },
        { "android/hardware/Camera$Face", "rect", "Landroid/graphics/Rect;", &fields.face_rect },
        { "android/hardware/Camera$FaceMetadata", "score", "I", &fields.face_score },
        { "android/hardware/Camera$Face", "score", "I", &fields.face_score },
        { "android/graphics/Rect", "left", "I", &fields.rect_left },
        { "android/graphics/Rect", "left", "I", &fields.rect_left },
        { "android/graphics/Rect", "top", "I", &fields.rect_top },
        { "android/graphics/Rect", "top", "I", &fields.rect_top },
        { "android/graphics/Rect", "right", "I", &fields.rect_right },
        { "android/graphics/Rect", "right", "I", &fields.rect_right },
+3 −1
Original line number Original line Diff line number Diff line
@@ -38,6 +38,7 @@ typedef void (*notify_callback)(int32_t msgType,


typedef void (*data_callback)(int32_t msgType,
typedef void (*data_callback)(int32_t msgType,
                            const sp<IMemory> &dataPtr,
                            const sp<IMemory> &dataPtr,
                            camera_frame_metadata_t *metadata,
                            void* user);
                            void* user);


typedef void (*data_callback_timestamp)(nsecs_t timestamp,
typedef void (*data_callback_timestamp)(nsecs_t timestamp,
@@ -442,6 +443,7 @@ private:


    static void __data_cb(int32_t msg_type,
    static void __data_cb(int32_t msg_type,
                          const camera_memory_t *data, unsigned int index,
                          const camera_memory_t *data, unsigned int index,
                          camera_frame_metadata_t *metadata,
                          void *user)
                          void *user)
    {
    {
        LOGV("%s", __FUNCTION__);
        LOGV("%s", __FUNCTION__);
@@ -453,7 +455,7 @@ private:
                 index, mem->mNumBufs);
                 index, mem->mNumBufs);
            return;
            return;
        }
        }
        __this->mDataCb(msg_type, mem->mBuffers[index], __this->mCbUser);
        __this->mDataCb(msg_type, mem->mBuffers[index], metadata, __this->mCbUser);
    }
    }


    static void __data_cb_timestamp(nsecs_t timestamp, int32_t msg_type,
    static void __data_cb_timestamp(nsecs_t timestamp, int32_t msg_type,
+3 −3
Original line number Original line Diff line number Diff line
@@ -180,7 +180,7 @@ int CameraHardwareStub::previewThread()


        // Notify the client of a new frame.
        // Notify the client of a new frame.
        if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
        if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
            mDataCb(CAMERA_MSG_PREVIEW_FRAME, buffer, mCallbackCookie);
            mDataCb(CAMERA_MSG_PREVIEW_FRAME, buffer, NULL, mCallbackCookie);


        // Advance the buffer pointer.
        // Advance the buffer pointer.
        mCurrentPreviewFrame = (mCurrentPreviewFrame + 1) % kBufferCount;
        mCurrentPreviewFrame = (mCurrentPreviewFrame + 1) % kBufferCount;
@@ -290,14 +290,14 @@ int CameraHardwareStub::pictureThread()
        sp<MemoryBase> mem = new MemoryBase(mRawHeap, 0, w * h * 3 / 2);
        sp<MemoryBase> mem = new MemoryBase(mRawHeap, 0, w * h * 3 / 2);
        FakeCamera cam(w, h);
        FakeCamera cam(w, h);
        cam.getNextFrameAsYuv420((uint8_t *)mRawHeap->base());
        cam.getNextFrameAsYuv420((uint8_t *)mRawHeap->base());
        mDataCb(CAMERA_MSG_RAW_IMAGE, mem, mCallbackCookie);
        mDataCb(CAMERA_MSG_RAW_IMAGE, mem, NULL, mCallbackCookie);
    }
    }


    if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) {
    if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) {
        sp<MemoryHeapBase> heap = new MemoryHeapBase(kCannedJpegSize);
        sp<MemoryHeapBase> heap = new MemoryHeapBase(kCannedJpegSize);
        sp<MemoryBase> mem = new MemoryBase(heap, 0, kCannedJpegSize);
        sp<MemoryBase> mem = new MemoryBase(heap, 0, kCannedJpegSize);
        memcpy(heap->base(), kCannedJpeg, kCannedJpegSize);
        memcpy(heap->base(), kCannedJpeg, kCannedJpegSize);
        mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mem, mCallbackCookie);
        mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mem, NULL, mCallbackCookie);
    }
    }
    return NO_ERROR;
    return NO_ERROR;
}
}
+1 −1
Original line number Original line Diff line number Diff line
@@ -988,7 +988,7 @@ void CameraService::Client::notifyCallback(int32_t msgType, int32_t ext1,
}
}


void CameraService::Client::dataCallback(int32_t msgType,
void CameraService::Client::dataCallback(int32_t msgType,
        const sp<IMemory>& dataPtr, void* user) {
        const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
    LOG2("dataCallback(%d)", msgType);
    LOG2("dataCallback(%d)", msgType);


    sp<Client> client = getClientFromCookie(user);
    sp<Client> client = getClientFromCookie(user);
Loading