Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 38df0cac authored by Shuzhen Wang's avatar Shuzhen Wang
Browse files

Camera: Add per-surface mirroring mode

Add the support to do per output surface mirror mode. Previously
mirroring mode is set per OutputConfiguration, which could contain
multiple shared surfaces.

Flag: com.android.internal.camera.flags.mirror_mode_shared_surfaces
Test: atest MultiViewTest
Bug: 298899993
Change-Id: I3654cb585d243365b62a10131c21a869af754e73
parent 6b349a87
Loading
Loading
Loading
Loading
+52 −12
Original line number Diff line number Diff line
@@ -22,12 +22,14 @@

#include <camera/camera2/OutputConfiguration.h>
#include <camera/StringUtils.h>
#include <com_android_internal_camera_flags.h>
#include <binder/Parcel.h>
#include <gui/view/Surface.h>
#include <system/camera_metadata.h>
#include <system/graphics.h>
#include <utils/String8.h>

namespace flags = com::android::internal::camera::flags;

namespace android {

@@ -95,7 +97,21 @@ int OutputConfiguration::getTimestampBase() const {
    return mTimestampBase;
}

int OutputConfiguration::getMirrorMode() const {
int OutputConfiguration::getMirrorMode(sp<IGraphicBufferProducer> surface) const {
    if (!flags::mirror_mode_shared_surfaces()) {
        return mMirrorMode;
    }

    // Use per-producer mirror mode if available.
    for (size_t i = 0; i < mGbps.size(); i++) {
        if (mGbps[i] == surface) {
            return mMirrorModeForProducers[i];
        }
    }

    // For surface that doesn't belong to this output configuration, use
    // mMirrorMode as default.
    ALOGW("%s: Surface doesn't belong to this OutputConfiguration!", __FUNCTION__);
    return mMirrorMode;
}

@@ -251,6 +267,12 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
        return err;
    }

    std::vector<int> mirrorModeForProducers;
    if ((err = parcel->readInt32Vector(&mirrorModeForProducers)) != OK) {
        ALOGE("%s: Failed to read mirroring mode for surfaces from parcel", __FUNCTION__);
        return err;
    }

    int useReadoutTimestamp = 0;
    if ((err = parcel->readInt32(&useReadoutTimestamp)) != OK) {
        ALOGE("%s: Failed to read useReadoutTimestamp flag from parcel", __FUNCTION__);
@@ -286,6 +308,7 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
    mStreamUseCase = streamUseCase;
    mTimestampBase = timestampBase;
    mMirrorMode = mirrorMode;
    mMirrorModeForProducers = std::move(mirrorModeForProducers);
    mUseReadoutTimestamp = useReadoutTimestamp != 0;
    for (auto& surface : surfaceShims) {
        ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
@@ -409,6 +432,9 @@ status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
    err = parcel->writeInt32(mMirrorMode);
    if (err != OK) return err;

    err = parcel->writeInt32Vector(mMirrorModeForProducers);
    if (err != OK) return err;

    err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
    if (err != OK) return err;

@@ -438,6 +464,20 @@ static bool simpleVectorsEqual(T first, T second) {
    return true;
}

template <typename T>
static bool simpleVectorsLessThan(T first, T second) {
    if (first.size() != second.size()) {
        return first.size() < second.size();
    }

    for (size_t i = 0; i < first.size(); i++) {
        if (first[i] != second[i]) {
            return first[i] < second[i];
        }
    }
    return false;
}

bool OutputConfiguration::gbpsEqual(const OutputConfiguration& other) const {
    const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
            other.getGraphicBufferProducers();
@@ -449,20 +489,20 @@ bool OutputConfiguration::sensorPixelModesUsedEqual(const OutputConfiguration& o
    return simpleVectorsEqual(othersensorPixelModesUsed, mSensorPixelModesUsed);
}

bool OutputConfiguration::sensorPixelModesUsedLessThan(const OutputConfiguration& other) const {
    const std::vector<int32_t>& spms = other.getSensorPixelModesUsed();
bool OutputConfiguration::mirrorModesEqual(const OutputConfiguration& other) const {
    const std::vector<int>& otherMirrorModes = other.getMirrorModes();
    return simpleVectorsEqual(otherMirrorModes, mMirrorModeForProducers);

    if (mSensorPixelModesUsed.size() !=  spms.size()) {
        return mSensorPixelModesUsed.size() < spms.size();
}

    for (size_t i = 0; i < spms.size(); i++) {
        if (mSensorPixelModesUsed[i] != spms[i]) {
            return mSensorPixelModesUsed[i] < spms[i];
        }
bool OutputConfiguration::sensorPixelModesUsedLessThan(const OutputConfiguration& other) const {
    const std::vector<int32_t>& spms = other.getSensorPixelModesUsed();
    return simpleVectorsLessThan(mSensorPixelModesUsed, spms);
}

    return false;
bool OutputConfiguration::mirrorModesLessThan(const OutputConfiguration& other) const {
    const std::vector<int>& otherMirrorModes = other.getMirrorModes();
    return simpleVectorsLessThan(mMirrorModeForProducers, otherMirrorModes);
}

bool OutputConfiguration::gbpsLessThan(const OutputConfiguration& other) const {
+8 −0
Original line number Diff line number Diff line
@@ -178,3 +178,11 @@ flag {
    description: "Pass the full AttributionSource chain to PermissionChecker"
    bug: "190657833"
}

flag {
    namespace: "camera_platform"
    name: "mirror_mode_shared_surfaces"
    is_exported: true
    description: "Support setting and getting mirror mode for shared surfaces"
    bug: "298899993"
}
+9 −1
Original line number Diff line number Diff line
@@ -72,7 +72,7 @@ public:
    bool                       isMultiResolution() const;
    int64_t                    getStreamUseCase() const;
    int                        getTimestampBase() const;
    int                        getMirrorMode() const;
    int                        getMirrorMode(sp<IGraphicBufferProducer> surface) const;
    bool                       useReadoutTimestamp() const;
    int                        getFormat() const;
    int                        getDataspace() const;
@@ -125,6 +125,7 @@ public:
                mStreamUseCase == other.mStreamUseCase &&
                mTimestampBase == other.mTimestampBase &&
                mMirrorMode == other.mMirrorMode &&
                mirrorModesEqual(other) &&
                mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
                mFormat == other.mFormat &&
                mDataspace == other.mDataspace &&
@@ -180,6 +181,9 @@ public:
        if (mMirrorMode != other.mMirrorMode) {
            return mMirrorMode < other.mMirrorMode;
        }
        if (!mirrorModesEqual(other)) {
            return mirrorModesLessThan(other);
        }
        if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
            return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
        }
@@ -204,6 +208,9 @@ public:
    bool sensorPixelModesUsedLessThan(const OutputConfiguration& other) const;
    bool gbpsLessThan(const OutputConfiguration& other) const;
    void addGraphicProducer(sp<IGraphicBufferProducer> gbp) {mGbps.push_back(gbp);}
    bool mirrorModesEqual(const OutputConfiguration& other) const;
    bool mirrorModesLessThan(const OutputConfiguration& other) const;
    const std::vector<int32_t>& getMirrorModes() const {return mMirrorModeForProducers;}
private:
    std::vector<sp<IGraphicBufferProducer>> mGbps;
    int                        mRotation;
@@ -221,6 +228,7 @@ private:
    int64_t                    mStreamUseCase;
    int                        mTimestampBase;
    int                        mMirrorMode;
    std::vector<int>           mMirrorModeForProducers; // 1:1 mapped with mGbps
    bool                       mUseReadoutTimestamp;
    int                        mFormat;
    int                        mDataspace;
+4 −1
Original line number Diff line number Diff line
@@ -120,7 +120,10 @@ void C2OutputConfigurationFuzzer::invokeC2OutputConfigFuzzer() {
                [&]() { outputConfiguration->getColorSpace(); },
                [&]() { outputConfiguration->getStreamUseCase(); },
                [&]() { outputConfiguration->getTimestampBase(); },
                [&]() { outputConfiguration->getMirrorMode(); },
                [&]() {
                    sp<IGraphicBufferProducer> gbp = createIGraphicBufferProducer();
                    outputConfiguration->getMirrorMode(gbp);
                },
                [&]() { outputConfiguration->useReadoutTimestamp(); },
        });
        callC2OutputConfAPIs();
+21 −63
Original line number Diff line number Diff line
@@ -908,7 +908,6 @@ binder::Status CameraDeviceClient::createStream(
    int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
    int64_t streamUseCase = outputConfiguration.getStreamUseCase();
    int timestampBase = outputConfiguration.getTimestampBase();
    int mirrorMode = outputConfiguration.getMirrorMode();
    int32_t colorSpace = outputConfiguration.getColorSpace();
    bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();

@@ -927,7 +926,7 @@ binder::Status CameraDeviceClient::createStream(
        return res;
    }

    std::vector<sp<Surface>> surfaces;
    std::vector<SurfaceHolder> surfaces;
    std::vector<sp<IBinder>> binders;
    status_t err;

@@ -952,6 +951,7 @@ binder::Status CameraDeviceClient::createStream(
            return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.c_str());
        }

        int mirrorMode = outputConfiguration.getMirrorMode(bufferProducer);
        sp<Surface> surface;
        res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
                isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
@@ -966,7 +966,7 @@ binder::Status CameraDeviceClient::createStream(
        }

        binders.push_back(IInterface::asBinder(bufferProducer));
        surfaces.push_back(surface);
        surfaces.push_back({surface, mirrorMode});
    }

    // If mOverrideForPerfClass is true, do not fail createStream() for small
@@ -976,10 +976,11 @@ binder::Status CameraDeviceClient::createStream(
    int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
    std::vector<int> surfaceIds;
    bool isDepthCompositeStream =
            camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
    bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
            camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0].mSurface);
    bool isHeicCompositeStream = camera3::HeicCompositeStream::isHeicCompositeStream(
            surfaces[0].mSurface);
    bool isJpegRCompositeStream =
        camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0]) &&
        camera3::JpegRCompositeStream::isJpegRCompositeStream(surfaces[0].mSurface) &&
        !mDevice->isCompositeJpegRDisabled();
    if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
        sp<CompositeStream> compositeStream;
@@ -1000,7 +1001,8 @@ binder::Status CameraDeviceClient::createStream(
                useReadoutTimestamp);
        if (err == OK) {
            Mutex::Autolock l(mCompositeLock);
            mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
            mCompositeStreamMap.add(
                    IInterface::asBinder(surfaces[0].mSurface->getIGraphicBufferProducer()),
                    compositeStream);
        }
    } else {
@@ -1010,8 +1012,7 @@ binder::Status CameraDeviceClient::createStream(
                &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
                /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
                streamInfo.timestampBase, streamInfo.mirrorMode, streamInfo.colorSpace,
                useReadoutTimestamp);
                streamInfo.timestampBase, streamInfo.colorSpace, useReadoutTimestamp);
    }

    if (err != OK) {
@@ -1036,9 +1037,6 @@ binder::Status CameraDeviceClient::createStream(
                  __FUNCTION__, mCameraIdStr.c_str(), streamId, streamInfo.width,
                  streamInfo.height, streamInfo.format);

        // Set transform flags to ensure preview to be rotated correctly.
        res = setStreamTransformLocked(streamId, streamInfo.mirrorMode);

        // Fill in mHighResolutionCameraIdToStreamIdSet map
        const std::string &cameraIdUsed =
                physicalCameraId.size() != 0 ? physicalCameraId : mCameraIdStr;
@@ -1087,7 +1085,7 @@ binder::Status CameraDeviceClient::createDeferredSurfaceStreamLocked(
        consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
    }
    int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
    std::vector<sp<Surface>> noSurface;
    std::vector<SurfaceHolder> noSurface;
    std::vector<int> surfaceIds;
    const std::string &physicalCameraId = outputConfiguration.getPhysicalCameraId();
    const std::string &cameraIdUsed =
@@ -1113,7 +1111,6 @@ binder::Status CameraDeviceClient::createDeferredSurfaceStreamLocked(
            outputConfiguration.isMultiResolution(), consumerUsage,
            outputConfiguration.getDynamicRangeProfile(),
            outputConfiguration.getStreamUseCase(),
            outputConfiguration.getMirrorMode(),
            outputConfiguration.useReadoutTimestamp());

    if (err != OK) {
@@ -1132,16 +1129,12 @@ binder::Status CameraDeviceClient::createDeferredSurfaceStreamLocked(
                        outputConfiguration.getDynamicRangeProfile(),
                        outputConfiguration.getStreamUseCase(),
                        outputConfiguration.getTimestampBase(),
                        outputConfiguration.getMirrorMode(),
                        colorSpace));

        ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                " (%d x %d) stream with format 0x%x.",
              __FUNCTION__, mCameraIdStr.c_str(), streamId, width, height, format);

        // Set transform flags to ensure preview to be rotated correctly.
        res = setStreamTransformLocked(streamId, outputConfiguration.getMirrorMode());

        *newStreamId = streamId;
        // Fill in mHighResolutionCameraIdToStreamIdSet
        // Only needed for high resolution sensors
@@ -1153,33 +1146,6 @@ binder::Status CameraDeviceClient::createDeferredSurfaceStreamLocked(
    return res;
}

binder::Status CameraDeviceClient::setStreamTransformLocked(int streamId, int mirrorMode) {
    int32_t transform = 0;
    status_t err;
    binder::Status res;

    if (!mDevice.get()) {
        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
    }

    err = getRotationTransformLocked(mirrorMode, &transform);
    if (err != OK) {
        // Error logged by getRotationTransformLocked.
        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
                "Unable to calculate rotation transform for new stream");
    }

    err = mDevice->setStreamTransform(streamId, transform);
    if (err != OK) {
        std::string msg = fmt::sprintf("Failed to set stream transform (stream id %d)",
                streamId);
        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.c_str());
    }

    return res;
}

binder::Status CameraDeviceClient::createInputStream(
        int width, int height, int format, bool isMultiResolution,
        /*out*/
@@ -1312,7 +1278,7 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,

    std::vector<size_t> removedSurfaceIds;
    std::vector<sp<IBinder>> removedOutputs;
    std::vector<sp<Surface>> newOutputs;
    std::vector<SurfaceHolder> newOutputs;
    std::vector<OutputStreamInfo> streamInfos;
    KeyedVector<sp<IBinder>, sp<IGraphicBufferProducer>> newOutputsMap;
    for (auto &it : bufferProducers) {
@@ -1341,11 +1307,11 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,
    int timestampBase = outputConfiguration.getTimestampBase();
    int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
    int32_t colorSpace = outputConfiguration.getColorSpace();
    int mirrorMode = outputConfiguration.getMirrorMode();

    for (size_t i = 0; i < newOutputsMap.size(); i++) {
        OutputStreamInfo outInfo;
        sp<Surface> surface;
        int mirrorMode = outputConfiguration.getMirrorMode(newOutputsMap.valueAt(i));
        res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
                /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
@@ -1354,7 +1320,7 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,
            return res;

        streamInfos.push_back(outInfo);
        newOutputs.push_back(surface);
        newOutputs.push_back({surface, mirrorMode});
    }

    //Trivial case no changes required
@@ -1711,14 +1677,13 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId
        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
    }

    std::vector<sp<Surface>> consumerSurfaces;
    std::vector<SurfaceHolder> consumerSurfaceHolders;
    const std::vector<int32_t> &sensorPixelModesUsed =
            outputConfiguration.getSensorPixelModesUsed();
    int64_t dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
    int32_t colorSpace = outputConfiguration.getColorSpace();
    int64_t streamUseCase = outputConfiguration.getStreamUseCase();
    int timestampBase = outputConfiguration.getTimestampBase();
    int mirrorMode = outputConfiguration.getMirrorMode();
    for (auto& bufferProducer : bufferProducers) {
        // Don't create multiple streams for the same target surface
        ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1729,6 +1694,7 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId
        }

        sp<Surface> surface;
        int mirrorMode = outputConfiguration.getMirrorMode(bufferProducer);
        res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
                mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
@@ -1737,12 +1703,12 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId
        if (!res.isOk())
            return res;

        consumerSurfaces.push_back(surface);
        consumerSurfaceHolders.push_back({surface, mirrorMode});
    }

    // Gracefully handle case where finalizeOutputConfigurations is called
    // without any new surface.
    if (consumerSurfaces.size() == 0) {
    if (consumerSurfaceHolders.size() == 0) {
        mStreamInfoMap[streamId].finalized = true;
        return res;
    }
@@ -1750,11 +1716,11 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId
    // Finish the deferred stream configuration with the surface.
    status_t err;
    std::vector<int> consumerSurfaceIds;
    err = mDevice->setConsumerSurfaces(streamId, consumerSurfaces, &consumerSurfaceIds);
    err = mDevice->setConsumerSurfaces(streamId, consumerSurfaceHolders, &consumerSurfaceIds);
    if (err == OK) {
        for (size_t i = 0; i < consumerSurfaces.size(); i++) {
        for (size_t i = 0; i < consumerSurfaceHolders.size(); i++) {
            sp<IBinder> binder = IInterface::asBinder(
                    consumerSurfaces[i]->getIGraphicBufferProducer());
                    consumerSurfaceHolders[i].mSurface->getIGraphicBufferProducer());
            ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %d", __FUNCTION__,
                    binder.get(), streamId, consumerSurfaceIds[i]);
            mStreamMap.add(binder, StreamSurfaceId(streamId, consumerSurfaceIds[i]));
@@ -2271,14 +2237,6 @@ bool CameraDeviceClient::enforceRequestPermissions(CameraMetadata& metadata) {
    return true;
}

status_t CameraDeviceClient::getRotationTransformLocked(int mirrorMode,
        int32_t* transform) {
    ALOGV("%s: begin", __FUNCTION__);

    const CameraMetadata& staticInfo = mDevice->info();
    return CameraUtils::getRotationTransform(staticInfo, mirrorMode, transform);
}

const CameraMetadata &CameraDeviceClient::getStaticInfo(const std::string &cameraId) {
    if (mDevice->getId() == cameraId) {
        return mDevice->info();
Loading