Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 507ae587 authored by Shuzhen Wang's avatar Shuzhen Wang Committed by Android (Google) Code Review
Browse files

Merge "Camera: Support feature combination query without surfaces" into main

parents 1c3a5f28 0709c28a
Loading
Loading
Loading
Loading
+63 −4
Original line number Diff line number Diff line
@@ -25,6 +25,7 @@
#include <binder/Parcel.h>
#include <gui/view/Surface.h>
#include <system/camera_metadata.h>
#include <system/graphics.h>
#include <utils/String8.h>


@@ -102,6 +103,25 @@ bool OutputConfiguration::useReadoutTimestamp() const {
    return mUseReadoutTimestamp;
}

int OutputConfiguration::getFormat() const {
    return mFormat;
}

int OutputConfiguration::getDataspace() const {
    return mDataspace;
}

int64_t OutputConfiguration::getUsage() const {
    return mUsage;
}

bool OutputConfiguration::isComplete() const {
    return !((mSurfaceType == SURFACE_TYPE_MEDIA_RECORDER ||
             mSurfaceType == SURFACE_TYPE_MEDIA_CODEC ||
             mSurfaceType == SURFACE_TYPE_IMAGE_READER) &&
             mGbps.empty());
}

OutputConfiguration::OutputConfiguration() :
        mRotation(INVALID_ROTATION),
        mSurfaceSetID(INVALID_SET_ID),
@@ -116,7 +136,10 @@ OutputConfiguration::OutputConfiguration() :
        mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
        mTimestampBase(TIMESTAMP_BASE_DEFAULT),
        mMirrorMode(MIRROR_MODE_AUTO),
        mUseReadoutTimestamp(false) {
        mUseReadoutTimestamp(false),
        mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
        mDataspace(0),
        mUsage(0) {
}

OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -234,6 +257,24 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
        return err;
    }

    int format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
    if ((err = parcel->readInt32(&format)) != OK) {
        ALOGE("%s: Failed to read format from parcel", __FUNCTION__);
        return err;
    }

    int dataspace = 0;
    if ((err = parcel->readInt32(&dataspace)) != OK) {
        ALOGE("%s: Failed to read dataspace from parcel", __FUNCTION__);
        return err;
    }

    int64_t usage = 0;
    if ((err = parcel->readInt64(&usage)) != OK) {
        ALOGE("%s: Failed to read usage flag from parcel", __FUNCTION__);
        return err;
    }

    mRotation = rotation;
    mSurfaceSetID = setID;
    mSurfaceType = surfaceType;
@@ -256,13 +297,17 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
    mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
    mDynamicRangeProfile = dynamicProfile;
    mColorSpace = colorSpace;
    mFormat = format;
    mDataspace = dataspace;
    mUsage = usage;

    ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
          " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
          ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d",
          ", timestampBase = %d, mirrorMode = %d, useReadoutTimestamp = %d, format = %d, "
          "dataspace = %d, usage = %" PRId64,
          __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
          mPhysicalCameraId.c_str(), mIsMultiResolution, mStreamUseCase, timestampBase,
          mMirrorMode, mUseReadoutTimestamp);
          mMirrorMode, mUseReadoutTimestamp, mFormat, mDataspace, mUsage);

    return err;
}
@@ -283,6 +328,9 @@ OutputConfiguration::OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int ro
    mTimestampBase = TIMESTAMP_BASE_DEFAULT;
    mMirrorMode = MIRROR_MODE_AUTO;
    mUseReadoutTimestamp = false;
    mFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
    mDataspace = 0;
    mUsage = 0;
}

OutputConfiguration::OutputConfiguration(
@@ -296,7 +344,9 @@ OutputConfiguration::OutputConfiguration(
    mColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
    mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
    mTimestampBase(TIMESTAMP_BASE_DEFAULT),
    mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false) { }
    mMirrorMode(MIRROR_MODE_AUTO), mUseReadoutTimestamp(false),
    mFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), mDataspace(0),
    mUsage(0) { }

status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {

@@ -362,6 +412,15 @@ status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
    err = parcel->writeInt32(mUseReadoutTimestamp ? 1 : 0);
    if (err != OK) return err;

    err = parcel->writeInt32(mFormat);
    if (err != OK) return err;

    err = parcel->writeInt32(mDataspace);
    if (err != OK) return err;

    err = parcel->writeInt64(mUsage);
    if (err != OK) return err;

    return OK;
}

+26 −4
Original line number Diff line number Diff line
@@ -38,7 +38,10 @@ public:
    enum SurfaceType {
        SURFACE_TYPE_UNKNOWN = -1,
        SURFACE_TYPE_SURFACE_VIEW = 0,
        SURFACE_TYPE_SURFACE_TEXTURE = 1
        SURFACE_TYPE_SURFACE_TEXTURE = 1,
        SURFACE_TYPE_MEDIA_RECORDER = 2,
        SURFACE_TYPE_MEDIA_CODEC = 3,
        SURFACE_TYPE_IMAGE_READER = 4
    };
    enum TimestampBaseType {
        TIMESTAMP_BASE_DEFAULT = 0,
@@ -71,6 +74,10 @@ public:
    int                        getTimestampBase() const;
    int                        getMirrorMode() const;
    bool                       useReadoutTimestamp() const;
    int                        getFormat() const;
    int                        getDataspace() const;
    int64_t                    getUsage() const;
    bool                       isComplete() const;

    // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
    const std::vector<int32_t>&            getSensorPixelModesUsed() const;
@@ -98,7 +105,7 @@ public:
    OutputConfiguration(const std::vector<sp<IGraphicBufferProducer>>& gbps,
                        int rotation, const std::string& physicalCameraId,
                        int surfaceSetID = INVALID_SET_ID,
                        int surfaceType = OutputConfiguration::SURFACE_TYPE_UNKNOWN, int width = 0,
                        int surfaceType = SURFACE_TYPE_UNKNOWN, int width = 0,
                        int height = 0, bool isShared = false);

    bool operator == (const OutputConfiguration& other) const {
@@ -118,7 +125,10 @@ public:
                mStreamUseCase == other.mStreamUseCase &&
                mTimestampBase == other.mTimestampBase &&
                mMirrorMode == other.mMirrorMode &&
                mUseReadoutTimestamp == other.mUseReadoutTimestamp);
                mUseReadoutTimestamp == other.mUseReadoutTimestamp &&
                mFormat == other.mFormat &&
                mDataspace == other.mDataspace &&
                mUsage == other.mUsage);
    }
    bool operator != (const OutputConfiguration& other) const {
        return !(*this == other);
@@ -173,6 +183,15 @@ public:
        if (mUseReadoutTimestamp != other.mUseReadoutTimestamp) {
            return mUseReadoutTimestamp < other.mUseReadoutTimestamp;
        }
        if (mFormat != other.mFormat) {
            return mFormat < other.mFormat;
        }
        if (mDataspace != other.mDataspace) {
            return mDataspace < other.mDataspace;
        }
        if (mUsage != other.mUsage) {
            return mUsage < other.mUsage;
        }
        return gbpsLessThan(other);
    }

@@ -203,6 +222,9 @@ private:
    int                        mTimestampBase;
    int                        mMirrorMode;
    bool                       mUseReadoutTimestamp;
    int                        mFormat;
    int                        mDataspace;
    int64_t                    mUsage;
};
} // namespace params
} // namespace camera2
+22 −4
Original line number Diff line number Diff line
@@ -892,6 +892,11 @@ binder::Status CameraDeviceClient::createStream(

    Mutex::Autolock icl(mBinderSerializationLock);

    if (!outputConfiguration.isComplete()) {
        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                "OutputConfiguration isn't valid!");
    }

    const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
            outputConfiguration.getGraphicBufferProducers();
    size_t numBufferProducers = bufferProducers.size();
@@ -908,7 +913,7 @@ binder::Status CameraDeviceClient::createStream(
    bool useReadoutTimestamp = outputConfiguration.useReadoutTimestamp();

    res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
            outputConfiguration.getSurfaceType());
            outputConfiguration.getSurfaceType(), /*isConfigurationComplete*/true);
    if (!res.isOk()) {
        return res;
    }
@@ -951,7 +956,7 @@ binder::Status CameraDeviceClient::createStream(
        res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
                isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
                streamUseCase, timestampBase, mirrorMode, colorSpace);
                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);

        if (!res.isOk())
            return res;
@@ -1064,6 +1069,10 @@ binder::Status CameraDeviceClient::createDeferredSurfaceStreamLocked(
    if (!mDevice.get()) {
        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
    }
    if (!outputConfiguration.isComplete()) {
        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                "OutputConfiguration isn't valid!");
    }

    // Infer the surface info for deferred surface stream creation.
    width = outputConfiguration.getWidth();
@@ -1256,6 +1265,10 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,
    if (!mDevice.get()) {
        return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
    }
    if (!outputConfiguration.isComplete()) {
        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                "OutputConfiguration isn't valid!");
    }

    const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
            outputConfiguration.getGraphicBufferProducers();
@@ -1323,7 +1336,7 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,
        res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
                /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
                streamUseCase, timestampBase, mirrorMode, colorSpace);
                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);
        if (!res.isOk())
            return res;

@@ -1636,6 +1649,11 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId

    Mutex::Autolock icl(mBinderSerializationLock);

    if (!outputConfiguration.isComplete()) {
        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                "OutputConfiguration isn't valid!");
    }

    const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
            outputConfiguration.getGraphicBufferProducers();
    const std::string &physicalId = outputConfiguration.getPhysicalCameraId();
@@ -1701,7 +1719,7 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId
        res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
                mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
                streamUseCase, timestampBase, mirrorMode, colorSpace);
                streamUseCase, timestampBase, mirrorMode, colorSpace, /*respectSurfaceSize*/false);

        if (!res.isOk())
            return res;
+62 −19
Original line number Diff line number Diff line
@@ -432,7 +432,7 @@ binder::Status createSurfaceFromGbp(
        const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
        const std::vector<int32_t> &sensorPixelModesUsed, int64_t dynamicRangeProfile,
        int64_t streamUseCase, int timestampBase, int mirrorMode,
        int32_t colorSpace) {
        int32_t colorSpace, bool respectSurfaceSize) {
    // bufferProducer must be non-null
    if (gbp == nullptr) {
        std::string msg = fmt::sprintf("Camera %s: Surface is NULL", logicalCameraId.c_str());
@@ -529,8 +529,10 @@ binder::Status createSurfaceFromGbp(
        // we can use the default stream configuration map
        foundInMaxRes = true;
    }
    // Round dimensions to the nearest dimensions available for this format
    if (flexibleConsumer && isPublicFormat(format) &&
    // Round dimensions to the nearest dimensions available for this format.
    // Only do the rounding if the client doesn't ask to respect the surface
    // size.
    if (flexibleConsumer && isPublicFormat(format) && !respectSurfaceSize &&
            !SessionConfigurationUtils::roundBufferDimensionNearest(width, height,
            format, dataSpace, physicalCameraMetadata, foundInMaxRes, /*out*/&width,
            /*out*/&height)) {
@@ -753,6 +755,7 @@ convertToHALStreamCombination(
        const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
            it.getGraphicBufferProducers();
        bool deferredConsumer = it.isDeferred();
        bool isConfigurationComplete = it.isComplete();
        const std::string &physicalCameraId = it.getPhysicalCameraId();

        int64_t dynamicRangeProfile = it.getDynamicRangeProfile();
@@ -768,7 +771,8 @@ convertToHALStreamCombination(
        int32_t groupId = it.isMultiResolution() ? it.getSurfaceSetID() : -1;
        OutputStreamInfo streamInfo;

        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType());
        res = checkSurfaceType(numBufferProducers, deferredConsumer, it.getSurfaceType(),
                               isConfigurationComplete);
        if (!res.isOk()) {
            return res;
        }
@@ -781,15 +785,38 @@ convertToHALStreamCombination(
        int64_t streamUseCase = it.getStreamUseCase();
        int timestampBase = it.getTimestampBase();
        int mirrorMode = it.getMirrorMode();
        if (deferredConsumer) {
        // If the configuration is a deferred consumer, or a not yet completed
        // configuration with no buffer producers attached.
        if (deferredConsumer || (!isConfigurationComplete && numBufferProducers == 0)) {
            streamInfo.width = it.getWidth();
            streamInfo.height = it.getHeight();
            streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
            streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
            auto surfaceType = it.getSurfaceType();
            switch (surfaceType) {
                case OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE:
                    streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE;
            if (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) {
                streamInfo.consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
                    streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
                    streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
                    break;
                case OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW:
                    streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_TEXTURE
                            | GraphicBuffer::USAGE_HW_COMPOSER;
                    streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
                    streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
                    break;
                case OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER:
                case OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC:
                    streamInfo.consumerUsage = GraphicBuffer::USAGE_HW_VIDEO_ENCODER;
                    streamInfo.format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
                    streamInfo.dataSpace = android_dataspace_t::HAL_DATASPACE_UNKNOWN;
                    break;
                case OutputConfiguration::SURFACE_TYPE_IMAGE_READER:
                    streamInfo.consumerUsage = it.getUsage();
                    streamInfo.format = it.getFormat();
                    streamInfo.dataSpace = (android_dataspace)it.getDataspace();
                    break;
                default:
                    return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
                                        "Invalid surface type.");
            }
            streamInfo.dynamicRangeProfile = it.getDynamicRangeProfile();
            if (checkAndOverrideSensorPixelModesUsed(sensorPixelModesUsed,
@@ -815,7 +842,8 @@ convertToHALStreamCombination(
            sp<Surface> surface;
            res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
                    logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
                    streamUseCase, timestampBase, mirrorMode, colorSpace);
                    streamUseCase, timestampBase, mirrorMode, colorSpace,
                    /*respectSurfaceSize*/true);

            if (!res.isOk())
                return res;
@@ -912,22 +940,37 @@ binder::Status checkPhysicalCameraId(
}

binder::Status checkSurfaceType(size_t numBufferProducers,
        bool deferredConsumer, int surfaceType)  {
        bool deferredConsumer, int surfaceType, bool isConfigurationComplete)  {
    if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
        ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
                __FUNCTION__, numBufferProducers, MAX_SURFACES_PER_STREAM);
        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
    } else if ((numBufferProducers == 0) && (!deferredConsumer)) {
    } else if ((numBufferProducers == 0) && (!deferredConsumer) && isConfigurationComplete) {
        ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "No valid consumers.");
    }

    bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
    if (deferredConsumer) {
        bool validSurfaceType = (
                (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
                (surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));

    if (deferredConsumer && !validSurfaceType) {
        ALOGE("%s: Target surface has invalid surfaceType = %d.", __FUNCTION__, surfaceType);
        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
        if (!validSurfaceType) {
            std::string msg = fmt::sprintf("Deferred target surface has invalid "
                    "surfaceType = %d.", surfaceType);
            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
        }
    } else if (!isConfigurationComplete && numBufferProducers == 0) {
        bool validSurfaceType = (
                (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_RECORDER) ||
                (surfaceType == OutputConfiguration::SURFACE_TYPE_MEDIA_CODEC) ||
                (surfaceType == OutputConfiguration::SURFACE_TYPE_IMAGE_READER));
        if (!validSurfaceType) {
            std::string msg = fmt::sprintf("OutputConfiguration target surface has invalid "
                    "surfaceType = %d.", surfaceType);
            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
        }
    }

    return binder::Status::ok();
+3 −3
Original line number Diff line number Diff line
@@ -112,7 +112,7 @@ binder::Status createSurfaceFromGbp(
        const std::string &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
        const std::vector<int32_t> &sensorPixelModesUsed,  int64_t dynamicRangeProfile,
        int64_t streamUseCase, int timestampBase, int mirrorMode,
        int32_t colorSpace);
        int32_t colorSpace, bool respectSurfaceSize);

//check if format is 10-bit output compatible
bool is10bitCompatibleFormat(int32_t format, android_dataspace_t dataSpace);
@@ -143,7 +143,7 @@ const std::vector<std::string> &physicalCameraIds, const std::string &physicalCa
const std::string &logicalCameraId);

binder::Status checkSurfaceType(size_t numBufferProducers,
bool deferredConsumer, int surfaceType);
        bool deferredConsumer, int surfaceType, bool isConfigurationComplete);

binder::Status checkOperatingMode(int operatingMode,
        const CameraMetadata &staticInfo, const std::string &cameraId);