Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 0fdfbcc5 authored by Luca Stefani's avatar Luca Stefani
Browse files

Merge remote-tracking branch 'aosp/pie-gsi' into HEAD

* aosp/pie-gsi:
  DO NOT MERGE: add color converter for NV12 to RGB
  Copy native_handle in MetaData mode
  Camera: Use physical camera's capability for physical stream check

Change-Id: I209b8314256d35573d94de44052a6bc780f34545
parents d17862bc 21b39a8f
Loading
Loading
Loading
Loading
+19 −2
Original line number Diff line number Diff line
@@ -19,6 +19,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "CameraSourceTimeLapse"

#include <media/hardware/HardwareAPI.h>
#include <binder/IPCThreadState.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
@@ -172,8 +173,16 @@ void CameraSourceTimeLapse::signalBufferReturned(MediaBufferBase* buffer) {
    ALOGV("signalBufferReturned");
    Mutex::Autolock autoLock(mQuickStopLock);
    if (mQuickStop && (buffer == mLastReadBufferCopy)) {
        if (metaDataStoredInVideoBuffers() == kMetadataBufferTypeNativeHandleSource) {
            native_handle_t* handle = (
                (VideoNativeHandleMetadata*)(mLastReadBufferCopy->data()))->pHandle;
            native_handle_close(handle);
            native_handle_delete(handle);
        }
        buffer->setObserver(NULL);
        buffer->release();
        mLastReadBufferCopy = NULL;
        mForceRead = true;
    } else {
        return CameraSource::signalBufferReturned(buffer);
    }
@@ -182,7 +191,8 @@ void CameraSourceTimeLapse::signalBufferReturned(MediaBufferBase* buffer) {
void createMediaBufferCopy(
        const MediaBufferBase& sourceBuffer,
        int64_t frameTime,
        MediaBufferBase **newBuffer) {
        MediaBufferBase **newBuffer,
        int32_t videoBufferMode) {

    ALOGV("createMediaBufferCopy");
    size_t sourceSize = sourceBuffer.size();
@@ -192,13 +202,20 @@ void createMediaBufferCopy(
    memcpy((*newBuffer)->data(), sourcePointer, sourceSize);

    (*newBuffer)->meta_data().setInt64(kKeyTime, frameTime);

    if (videoBufferMode == kMetadataBufferTypeNativeHandleSource) {
        ((VideoNativeHandleMetadata*)((*newBuffer)->data()))->pHandle =
            native_handle_clone(
                ((VideoNativeHandleMetadata*)(sourceBuffer.data()))->pHandle);
    }
}

void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
    ALOGV("fillLastReadBufferCopy");
    int64_t frameTime;
    CHECK(sourceBuffer.meta_data().findInt64(kKeyTime, &frameTime));
    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
    createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy,
        metaDataStoredInVideoBuffers());
    mLastReadBufferCopy->add_ref();
    mLastReadBufferCopy->setObserver(this);
}
+44 −1
Original line number Diff line number Diff line
@@ -24,6 +24,8 @@
#include <media/stagefright/MediaErrors.h>

#include "libyuv/convert_from.h"
#include "libyuv/convert_argb.h"
#include "libyuv/planar_functions.h"
#include "libyuv/video_common.h"
#include <functional>
#include <sys/time.h>
@@ -70,10 +72,17 @@ bool ColorConverter::isValid() const {

        case OMX_COLOR_FormatCbYCrY:
        case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
        case OMX_COLOR_FormatYUV420SemiPlanar:
        case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
            return mDstFormat == OMX_COLOR_Format16bitRGB565;

        case OMX_COLOR_FormatYUV420SemiPlanar:
#ifdef USE_LIBYUV
            return mDstFormat == OMX_COLOR_Format16bitRGB565
                    || mDstFormat == OMX_COLOR_Format32BitRGBA8888;
#else
            return mDstFormat == OMX_COLOR_Format16bitRGB565;
#endif

        default:
            return false;
    }
@@ -200,7 +209,11 @@ status_t ColorConverter::convert(
            break;

        case OMX_COLOR_FormatYUV420SemiPlanar:
#ifdef USE_LIBYUV
            err = convertYUV420SemiPlanarUseLibYUV(src, dst);
#else
            err = convertYUV420SemiPlanar(src, dst);
#endif
            break;

        case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
@@ -313,6 +326,36 @@ status_t ColorConverter::convertYUV420PlanarUseLibYUV(
    return OK;
}

status_t ColorConverter::convertYUV420SemiPlanarUseLibYUV(
        const BitmapParams &src, const BitmapParams &dst) {
    uint8_t *dst_ptr = (uint8_t *)dst.mBits
        + dst.mCropTop * dst.mStride + dst.mCropLeft * dst.mBpp;

    const uint8_t *src_y =
        (const uint8_t *)src.mBits + src.mCropTop * src.mStride + src.mCropLeft;

    const uint8_t *src_u =
        (const uint8_t *)src.mBits + src.mStride * src.mHeight
        + src.mCropTop * src.mStride + src.mCropLeft;

    switch (mDstFormat) {
    case OMX_COLOR_Format16bitRGB565:
        libyuv::NV12ToRGB565(src_y, src.mStride, src_u, src.mStride, (uint8 *)dst_ptr,
                dst.mStride, src.cropWidth(), src.cropHeight());
        break;

    case OMX_COLOR_Format32BitRGBA8888:
        libyuv::NV12ToARGB(src_y, src.mStride, src_u, src.mStride, (uint8 *)dst_ptr,
                dst.mStride, src.cropWidth(), src.cropHeight());
        break;

    default:
        return ERROR_UNSUPPORTED;
   }

   return OK;
}

std::function<void (void *, void *, void *, size_t,
                    signed *, signed *, signed *, signed *)>
getReadFromSrc(OMX_COLOR_FORMATTYPE srcFormat) {
+3 −0
Original line number Diff line number Diff line
@@ -78,6 +78,9 @@ private:
    status_t convertYUV420PlanarUseLibYUV(
            const BitmapParams &src, const BitmapParams &dst);

    status_t convertYUV420SemiPlanarUseLibYUV(
            const BitmapParams &src, const BitmapParams &dst);

    status_t convertYUV420Planar16(
            const BitmapParams &src, const BitmapParams &dst);

+10 −5
Original line number Diff line number Diff line
@@ -661,7 +661,8 @@ binder::Status CameraDeviceClient::createStream(
        }

        sp<Surface> surface;
        res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer);
        res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
                physicalCameraId);

        if (!res.isOk())
            return res;
@@ -889,6 +890,8 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,

    const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
            outputConfiguration.getGraphicBufferProducers();
    String8 physicalCameraId(outputConfiguration.getPhysicalCameraId());

    auto producerCount = bufferProducers.size();
    if (producerCount == 0) {
        ALOGE("%s: bufferProducers must not be empty", __FUNCTION__);
@@ -942,7 +945,7 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,
        OutputStreamInfo outInfo;
        sp<Surface> surface;
        res = createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false, surface,
                newOutputsMap.valueAt(i));
                newOutputsMap.valueAt(i), physicalCameraId);
        if (!res.isOk())
            return res;

@@ -1021,7 +1024,8 @@ bool CameraDeviceClient::isPublicFormat(int32_t format)

binder::Status CameraDeviceClient::createSurfaceFromGbp(
        OutputStreamInfo& streamInfo, bool isStreamInfoValid,
        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp) {
        sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
        const String8& physicalId) {

    // bufferProducer must be non-null
    if (gbp == nullptr) {
@@ -1098,7 +1102,7 @@ binder::Status CameraDeviceClient::createSurfaceFromGbp(
    // Round dimensions to the nearest dimensions available for this format
    if (flexibleConsumer && isPublicFormat(format) &&
            !CameraDeviceClient::roundBufferDimensionNearest(width, height,
            format, dataSpace, mDevice->info(), /*out*/&width, /*out*/&height)) {
            format, dataSpace, mDevice->info(physicalId), /*out*/&width, /*out*/&height)) {
        String8 msg = String8::format("Camera %s: No supported stream configurations with "
                "format %#x defined, failed to create output stream",
                mCameraIdStr.string(), format);
@@ -1468,6 +1472,7 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId

    const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
            outputConfiguration.getGraphicBufferProducers();
    String8 physicalId(outputConfiguration.getPhysicalCameraId());

    if (bufferProducers.size() == 0) {
        ALOGE("%s: bufferProducers must not be empty", __FUNCTION__);
@@ -1521,7 +1526,7 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId

        sp<Surface> surface;
        res = createSurfaceFromGbp(mStreamInfoMap[streamId], true /*isStreamInfoValid*/,
                surface, bufferProducer);
                surface, bufferProducer, physicalId);

        if (!res.isOk())
            return res;
+2 −1
Original line number Diff line number Diff line
@@ -258,7 +258,8 @@ private:
    // Create a Surface from an IGraphicBufferProducer. Returns error if
    // IGraphicBufferProducer's property doesn't match with streamInfo
    binder::Status createSurfaceFromGbp(OutputStreamInfo& streamInfo, bool isStreamInfoValid,
            sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp);
            sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
            const String8& physicalCameraId);


    // Utility method to insert the surface into SurfaceMap
Loading