Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit cd3d36bb authored by Jayant Chowdhary's avatar Jayant Chowdhary
Browse files

Handle ANDROID_JPEG_MAX_SIZE for ultra high res sensors.



DepthCompositeStream conservatively allocates buffers in
processInputFrame() looking at ANDROID_JPEG_MAX_SIZE. For ultra high
resolution sensors ANDROID_JPEG_MAX_SIZE was set to the maximum buffer
size for max res mode JPEG captures.

This could lead to even default size input frames getting ultra max resolution size
buffers, which is wasteful.

Here we go back to the original definition of ANDROID_JPEG_MAX_SIZE :
maximum jpeg buffer size for default mode captures. We estimate the
maximum jpeg buffer size for max res mode captures instead.

Bug: 193346383

Test: Camera CTS

Change-Id: I4decf1430d38219c666ea11dfe109587f7fff1ba
Signed-off-by: default avatarJayant Chowdhary <jchowdhary@google.com>
parent 0d0a5832
Loading
Loading
Loading
Loading
+29 −8
Original line number Diff line number Diff line
@@ -42,17 +42,29 @@ DepthCompositeStream::DepthCompositeStream(sp<CameraDeviceBase> device,
        mDepthBufferAcquired(false),
        mBlobBufferAcquired(false),
        mProducerListener(new ProducerListener()),
        mMaxJpegSize(-1),
        mMaxJpegBufferSize(-1),
        mUHRMaxJpegBufferSize(-1),
        mIsLogicalCamera(false) {
    if (device != nullptr) {
        CameraMetadata staticInfo = device->info();
        auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
        if (entry.count > 0) {
            mMaxJpegSize = entry.data.i32[0];
            mMaxJpegBufferSize = entry.data.i32[0];
        } else {
            ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
        }

        mUHRMaxJpegSize =
                SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
                        /*ultraHighResolution*/true);
        mDefaultMaxJpegSize =
                SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
                        /*isUltraHighResolution*/false);

        mUHRMaxJpegBufferSize =
            SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
                    mMaxJpegBufferSize);

        entry = staticInfo.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
        if (entry.count == 5) {
            mIntrinsicCalibration.reserve(5);
@@ -243,13 +255,22 @@ status_t DepthCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &i
        jpegSize = inputFrame.jpegBuffer.width;
    }

    size_t maxDepthJpegSize;
    if (mMaxJpegSize > 0) {
        maxDepthJpegSize = mMaxJpegSize;
    size_t maxDepthJpegBufferSize = 0;
    if (mMaxJpegBufferSize > 0) {
        // If this is an ultra high resolution sensor and the input frames size
        // is > default res jpeg.
        if (mUHRMaxJpegSize.width != 0 &&
                inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
                mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
            maxDepthJpegBufferSize = mUHRMaxJpegBufferSize;
        } else {
        maxDepthJpegSize = std::max<size_t> (jpegSize,
            maxDepthJpegBufferSize = mMaxJpegBufferSize;
        }
    } else {
        maxDepthJpegBufferSize = std::max<size_t> (jpegSize,
                inputFrame.depthBuffer.width * inputFrame.depthBuffer.height * 3 / 2);
    }

    uint8_t jpegQuality = 100;
    auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
    if (entry.count > 0) {
@@ -259,7 +280,7 @@ status_t DepthCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &i
    // The final depth photo will consist of the main jpeg buffer, the depth map buffer (also in
    // jpeg format) and confidence map (jpeg as well). Assume worst case that all 3 jpeg need
    // max jpeg size.
    size_t finalJpegBufferSize = maxDepthJpegSize * 3;
    size_t finalJpegBufferSize = maxDepthJpegBufferSize * 3;

    if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), finalJpegBufferSize, 1))
            != OK) {
@@ -302,7 +323,7 @@ status_t DepthCompositeStream::processInputFrame(nsecs_t ts, const InputFrame &i
    depthPhoto.mDepthMapStride = inputFrame.depthBuffer.stride;
    depthPhoto.mJpegQuality = jpegQuality;
    depthPhoto.mIsLogical = mIsLogicalCamera;
    depthPhoto.mMaxJpegSize = maxDepthJpegSize;
    depthPhoto.mMaxJpegSize = maxDepthJpegBufferSize;
    // The camera intrinsic calibration layout is as follows:
    // [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
    if (mIntrinsicCalibration.size() == 5) {
+6 −1
Original line number Diff line number Diff line
@@ -132,7 +132,12 @@ private:
    sp<Surface>          mDepthSurface, mBlobSurface, mOutputSurface;
    sp<ProducerListener> mProducerListener;

    ssize_t              mMaxJpegSize;
    ssize_t              mMaxJpegBufferSize;
    ssize_t              mUHRMaxJpegBufferSize;

    camera3::Size        mDefaultMaxJpegSize;
    camera3::Size        mUHRMaxJpegSize;

    std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizes;
    std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizesMaximumResolution;
    std::vector<float>   mIntrinsicCalibration, mLensDistortion;
+25 −41
Original line number Diff line number Diff line
@@ -499,42 +499,6 @@ bool Camera3Device::tryLockSpinRightRound(Mutex& lock) {
    return gotLock;
}

camera3::Size Camera3Device::getMaxJpegResolution() const {
    int32_t maxJpegWidth = 0, maxJpegHeight = 0;
    const int STREAM_CONFIGURATION_SIZE = 4;
    const int STREAM_FORMAT_OFFSET = 0;
    const int STREAM_WIDTH_OFFSET = 1;
    const int STREAM_HEIGHT_OFFSET = 2;
    const int STREAM_IS_INPUT_OFFSET = 3;
    bool isHighResolutionSensor =
            camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo);
    int32_t scalerSizesTag = isHighResolutionSensor ?
            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
    camera_metadata_ro_entry_t availableStreamConfigs =
            mDeviceInfo.find(scalerSizesTag);
    if (availableStreamConfigs.count == 0 ||
            availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
        return camera3::Size(0, 0);
    }

    // Get max jpeg size (area-wise).
    for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
        if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
                && format == HAL_PIXEL_FORMAT_BLOB &&
                (width * height > maxJpegWidth * maxJpegHeight)) {
            maxJpegWidth = width;
            maxJpegHeight = height;
        }
    }

    return camera3::Size(maxJpegWidth, maxJpegHeight);
}

nsecs_t Camera3Device::getMonoToBoottimeOffset() {
    // try three times to get the clock offset, choose the one
    // with the minimum gap in measurements.
@@ -625,13 +589,26 @@ uint64_t Camera3Device::mapProducerToFrameworkUsage(
}

ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
    // Get max jpeg size (area-wise).
    camera3::Size maxJpegResolution = getMaxJpegResolution();
    if (maxJpegResolution.width == 0) {
    // Get max jpeg size (area-wise) for default sensor pixel mode
    camera3::Size maxDefaultJpegResolution =
            SessionConfigurationUtils::getMaxJpegResolution(mDeviceInfo,
                    /*isUltraHighResolutionSensor*/false);
    // Get max jpeg size (area-wise) for max resolution sensor pixel mode / 0 if
    // not ultra high res sensor
    camera3::Size uhrMaxJpegResolution =
            SessionConfigurationUtils::getMaxJpegResolution(mDeviceInfo,
                    /*isUltraHighResolution*/true);
    if (maxDefaultJpegResolution.width == 0) {
        ALOGE("%s: Camera %s: Can't find valid available jpeg sizes in static metadata!",
                __FUNCTION__, mId.string());
        return BAD_VALUE;
    }
    bool useMaxSensorPixelModeThreshold = false;
    if (uhrMaxJpegResolution.width != 0 &&
            width * height > maxDefaultJpegResolution.width * maxDefaultJpegResolution.height) {
        // Use the ultra high res max jpeg size and max jpeg buffer size
        useMaxSensorPixelModeThreshold = true;
    }

    // Get max jpeg buffer size
    ssize_t maxJpegBufferSize = 0;
@@ -642,11 +619,19 @@ ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const
        return BAD_VALUE;
    }
    maxJpegBufferSize = jpegBufMaxSize.data.i32[0];

    camera3::Size chosenMaxJpegResolution = maxDefaultJpegResolution;
    if (useMaxSensorPixelModeThreshold) {
        maxJpegBufferSize =
                SessionConfigurationUtils::getUHRMaxJpegBufferSize(uhrMaxJpegResolution,
                        maxDefaultJpegResolution, maxJpegBufferSize);
        chosenMaxJpegResolution = uhrMaxJpegResolution;
    }
    assert(kMinJpegBufferSize < maxJpegBufferSize);

    // Calculate final jpeg buffer size for the given resolution.
    float scaleFactor = ((float) (width * height)) /
            (maxJpegResolution.width * maxJpegResolution.height);
            (chosenMaxJpegResolution.width * chosenMaxJpegResolution.height);
    ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
            kMinJpegBufferSize;
    if (jpegBufferSize > maxJpegBufferSize) {
@@ -654,7 +639,6 @@ ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const
                  __FUNCTION__, maxJpegBufferSize);
        jpegBufferSize = maxJpegBufferSize;
    }

    return jpegBufferSize;
}

+0 −6
Original line number Diff line number Diff line
@@ -775,12 +775,6 @@ class Camera3Device :
     */
    bool               tryLockSpinRightRound(Mutex& lock);

    /**
     * Helper function to get the largest Jpeg resolution (in area)
     * Return Size(0, 0) if static metatdata is invalid
     */
    camera3::Size getMaxJpegResolution() const;

    /**
     * Helper function to get the offset between MONOTONIC and BOOTTIME
     * timestamp.
+42 −0
Original line number Diff line number Diff line
@@ -36,6 +36,48 @@ int32_t SessionConfigurationUtils::PERF_CLASS_LEVEL =

bool SessionConfigurationUtils::IS_PERF_CLASS = (PERF_CLASS_LEVEL == SDK_VERSION_S);

camera3::Size SessionConfigurationUtils::getMaxJpegResolution(const CameraMetadata &metadata,
        bool ultraHighResolution) {
    int32_t maxJpegWidth = 0, maxJpegHeight = 0;
    const int STREAM_CONFIGURATION_SIZE = 4;
    const int STREAM_FORMAT_OFFSET = 0;
    const int STREAM_WIDTH_OFFSET = 1;
    const int STREAM_HEIGHT_OFFSET = 2;
    const int STREAM_IS_INPUT_OFFSET = 3;

    int32_t scalerSizesTag = ultraHighResolution ?
            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
    camera_metadata_ro_entry_t availableStreamConfigs =
            metadata.find(scalerSizesTag);
    if (availableStreamConfigs.count == 0 ||
            availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
        return camera3::Size(0, 0);
    }

    // Get max jpeg size (area-wise).
    for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
        int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
        int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
        int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
        int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
        if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
                && format == HAL_PIXEL_FORMAT_BLOB &&
                (width * height > maxJpegWidth * maxJpegHeight)) {
            maxJpegWidth = width;
            maxJpegHeight = height;
        }
    }

    return camera3::Size(maxJpegWidth, maxJpegHeight);
}

size_t SessionConfigurationUtils::getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
        camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
    return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
            (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
}

void StreamConfiguration::getStreamConfigurations(
        const CameraMetadata &staticInfo, int configuration,
        std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
Loading