Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f510eb3b authored by Emilian Peev's avatar Emilian Peev
Browse files

Camera: Add support for active physical camera crop

Enable clients to receive information about the
current source physical camera crop.
Additionally allow devices to report intra-frame
lens intrinsic samples.

Bug: 297083874
Test: Camera CTS
Change-Id: I85f909db212eeb19c28d8107adfefe4156136d4e
parent fae25ec1
Loading
Loading
Loading
Loading
+8 −0
Original line number Diff line number Diff line
@@ -19480,6 +19480,7 @@ package android.hardware.camera2 {
    field @Deprecated @NonNull public static final android.hardware.camera2.CaptureResult.Key<float[]> LENS_RADIAL_DISTORTION;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<java.lang.Integer> LENS_STATE;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<java.lang.String> LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID;
    field @FlaggedApi("com.android.internal.camera.flags.concert_mode") @NonNull public static final android.hardware.camera2.CaptureResult.Key<android.graphics.Rect> LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<java.lang.Integer> NOISE_REDUCTION_MODE;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<java.lang.Float> REPROCESS_EFFECTIVE_EXPOSURE_FACTOR;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<java.lang.Byte> REQUEST_PIPELINE_DEPTH;
@@ -19505,6 +19506,7 @@ package android.hardware.camera2 {
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<java.lang.Integer> STATISTICS_FACE_DETECT_MODE;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<android.graphics.Point[]> STATISTICS_HOT_PIXEL_MAP;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<java.lang.Boolean> STATISTICS_HOT_PIXEL_MAP_MODE;
    field @FlaggedApi("com.android.internal.camera.flags.concert_mode") @NonNull public static final android.hardware.camera2.CaptureResult.Key<android.hardware.camera2.params.LensIntrinsicsSample[]> STATISTICS_LENS_INTRINSICS_SAMPLES;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<android.hardware.camera2.params.LensShadingMap> STATISTICS_LENS_SHADING_CORRECTION_MAP;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<java.lang.Integer> STATISTICS_LENS_SHADING_MAP_MODE;
    field @NonNull public static final android.hardware.camera2.CaptureResult.Key<java.lang.Integer> STATISTICS_OIS_DATA_MODE;
@@ -19661,6 +19663,12 @@ package android.hardware.camera2.params {
    method public boolean isMultiResolution();
  }
  @FlaggedApi("com.android.internal.camera.flags.concert_mode") public final class LensIntrinsicsSample {
    ctor @FlaggedApi("com.android.internal.camera.flags.concert_mode") public LensIntrinsicsSample(long, @NonNull float[]);
    method @FlaggedApi("com.android.internal.camera.flags.concert_mode") @NonNull public float[] getLensIntrinsics();
    method @FlaggedApi("com.android.internal.camera.flags.concert_mode") public long getTimestamp();
  }
  public final class LensShadingMap {
    method public void copyGainFactors(float[], int);
    method public int getColumnCount();
+103 −0
Original line number Diff line number Diff line
@@ -5225,6 +5225,60 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
    public static final Key<android.hardware.camera2.params.OisSample[]> STATISTICS_OIS_SAMPLES =
            new Key<android.hardware.camera2.params.OisSample[]>("android.statistics.oisSamples", android.hardware.camera2.params.OisSample[].class);

    /**
     * <p>An array of intra-frame lens intrinsic samples.</p>
     * <p>Contains an array of intra-frame {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} updates. This must
     * not be confused or compared to {@link CaptureResult#STATISTICS_OIS_SAMPLES android.statistics.oisSamples}. Although OIS could be the
     * main driver, all relevant factors such as focus distance and optical zoom must also
     * be included. Do note that OIS samples must not be applied on top of the lens intrinsic
     * samples.
     * Support for this capture result can be queried via
     * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.
     * If available, clients can expect multiple samples per capture result. The specific
     * amount will depend on current frame duration and sampling rate. Generally a sampling rate
     * greater than or equal to 200Hz is considered sufficient for high quality results.</p>
     * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p>
     *
     * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
     * @see CaptureResult#STATISTICS_OIS_SAMPLES
     */
    @PublicKey
    @NonNull
    @SyntheticKey
    @FlaggedApi(Flags.FLAG_CONCERT_MODE)
    public static final Key<android.hardware.camera2.params.LensIntrinsicsSample[]> STATISTICS_LENS_INTRINSICS_SAMPLES =
            new Key<android.hardware.camera2.params.LensIntrinsicsSample[]>("android.statistics.lensIntrinsicsSamples", android.hardware.camera2.params.LensIntrinsicsSample[].class);

    /**
     * <p>An array of timestamps of lens intrinsics samples, in nanoseconds.</p>
     * <p>The array contains the timestamps of lens intrinsics samples. The timestamps are in the
     * same timebase as and comparable to {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp}.</p>
     * <p><b>Units</b>: nanoseconds</p>
     * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p>
     *
     * @see CaptureResult#SENSOR_TIMESTAMP
     * @hide
     */
    @FlaggedApi(Flags.FLAG_CONCERT_MODE)
    public static final Key<long[]> STATISTICS_LENS_INTRINSIC_TIMESTAMPS =
            new Key<long[]>("android.statistics.lensIntrinsicTimestamps", long[].class);

    /**
     * <p>An array of intra-frame lens intrinsics.</p>
     * <p>The data layout and contents of individual array entries matches with
     * {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}.</p>
     * <p><b>Units</b>:
     * Pixels in the {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} coordinate system.</p>
     * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p>
     *
     * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
     * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
     * @hide
     */
    @FlaggedApi(Flags.FLAG_CONCERT_MODE)
    public static final Key<float[]> STATISTICS_LENS_INTRINSIC_SAMPLES =
            new Key<float[]>("android.statistics.lensIntrinsicSamples", float[].class);

    /**
     * <p>Tonemapping / contrast / gamma curve for the blue
     * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
@@ -5667,6 +5721,55 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
    public static final Key<String> LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID =
            new Key<String>("android.logicalMultiCamera.activePhysicalId", String.class);

    /**
     * <p>The current region of the active physical sensor that will be read out for this
     * capture.</p>
     * <p>This capture result matches with {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} on non-logical single
     * camera sensor devices. In case of logical cameras that can switch between several
     * physical devices in response to {@link CaptureRequest#CONTROL_ZOOM_RATIO android.control.zoomRatio}, this capture result will
     * not behave like {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} and {@link CaptureRequest#CONTROL_ZOOM_RATIO android.control.zoomRatio}, where the
     * combination of both reflects the effective zoom and crop of the logical camera output.
     * Instead, this capture result value will describe the zoom and crop of the active physical
     * device. Some examples of when the value of this capture result will change include
     * switches between different physical lenses, switches between regular and maximum
     * resolution pixel mode and going through the device digital or optical range.
     * This capture result is similar to {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} with respect to distortion
     * correction. When the distortion correction mode is OFF, the coordinate system follows
     * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize}, with (0, 0) being the top-left pixel
     * of the pre-correction active array. When the distortion correction mode is not OFF,
     * the coordinate system follows {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with (0, 0) being
     * the top-left pixel of the active array.</p>
     * <p>For camera devices with the
     * {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR }
     * capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys }
     * lists {@link CaptureRequest#SENSOR_PIXEL_MODE {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode}}
     * , the current active physical device
     * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION android.sensor.info.activeArraySizeMaximumResolution} /
     * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION android.sensor.info.preCorrectionActiveArraySizeMaximumResolution} must be used as the
     * coordinate system for requests where {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode} is set to
     * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION }.</p>
     * <p><b>Units</b>: Pixel coordinates relative to
     * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} or
     * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} of the currently
     * {@link CaptureResult#LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID android.logicalMultiCamera.activePhysicalId} depending on distortion correction capability
     * and mode</p>
     * <p><b>Optional</b> - The value for this key may be {@code null} on some devices.</p>
     *
     * @see CaptureRequest#CONTROL_ZOOM_RATIO
     * @see CaptureResult#LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID
     * @see CaptureRequest#SCALER_CROP_REGION
     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
     * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
     * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
     * @see CaptureRequest#SENSOR_PIXEL_MODE
     */
    @PublicKey
    @NonNull
    @FlaggedApi(Flags.FLAG_CONCERT_MODE)
    public static final Key<android.graphics.Rect> LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION =
            new Key<android.graphics.Rect>("android.logicalMultiCamera.activePhysicalSensorCropRegion", android.graphics.Rect.class);

    /**
     * <p>Mode of operation for the lens distortion correction block.</p>
     * <p>The lens distortion correction block attempts to improve image quality by fixing
+69 −0
Original line number Diff line number Diff line
@@ -55,6 +55,7 @@ import android.hardware.camera2.params.DeviceStateSensorOrientationMap;
import android.hardware.camera2.params.DynamicRangeProfiles;
import android.hardware.camera2.params.Face;
import android.hardware.camera2.params.HighSpeedVideoConfiguration;
import android.hardware.camera2.params.LensIntrinsicsSample;
import android.hardware.camera2.params.LensShadingMap;
import android.hardware.camera2.params.MandatoryStreamCombination;
import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap;
@@ -849,6 +850,15 @@ public class CameraMetadataNative implements Parcelable {
                        return (T) metadata.getMultiResolutionStreamConfigurationMap();
                    }
                });
        sGetCommandMap.put(
                CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey(),
                new GetCommand() {
                    @Override
                    @SuppressWarnings("unchecked")
                    public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
                        return (T) metadata.getLensIntrinsicSamples();
                    }
                });
    }

    private int[] getAvailableFormats() {
@@ -1780,6 +1790,56 @@ public class CameraMetadataNative implements Parcelable {
        return samples;
    }

    private boolean setLensIntrinsicsSamples(LensIntrinsicsSample[] samples) {
        if (samples == null) {
            return false;
        }

        long[] tsArray = new long[samples.length];
        float[] intrinsicsArray = new float[samples.length * 5];
        for (int i = 0; i < samples.length; i++) {
            tsArray[i] = samples[i].getTimestamp();
            System.arraycopy(samples[i].getLensIntrinsics(), 0, intrinsicsArray, 5*i, 5);

        }
        setBase(CaptureResult.STATISTICS_LENS_INTRINSIC_SAMPLES, intrinsicsArray);
        setBase(CaptureResult.STATISTICS_LENS_INTRINSIC_TIMESTAMPS, tsArray);

        return true;
    }

    private LensIntrinsicsSample[] getLensIntrinsicSamples() {
        long[] timestamps = getBase(CaptureResult.STATISTICS_LENS_INTRINSIC_TIMESTAMPS);
        float[] intrinsics = getBase(CaptureResult.STATISTICS_LENS_INTRINSIC_SAMPLES);

        if (timestamps == null) {
            if (intrinsics != null) {
                throw new AssertionError("timestamps is null but intrinsics is not");
            }

            return null;
        }

        if (intrinsics == null) {
            throw new AssertionError("timestamps is not null but intrinsics is");
        } else if((intrinsics.length % 5) != 0) {
            throw new AssertionError("intrinsics are not multiple of 5");
        }

        if ((intrinsics.length / 5) != timestamps.length) {
            throw new AssertionError(String.format(
                    "timestamps has %d entries but intrinsics has %d", timestamps.length,
                    intrinsics.length / 5));
        }

        LensIntrinsicsSample[] samples = new LensIntrinsicsSample[timestamps.length];
        for (int i = 0; i < timestamps.length; i++) {
            float[] currentIntrinsic = Arrays.copyOfRange(intrinsics, 5*i, 5*i + 5);
            samples[i] = new LensIntrinsicsSample(timestamps[i], currentIntrinsic);
        }
        return samples;
    }

    private Capability[] getExtendedSceneModeCapabilities() {
        int[] maxSizes =
                getBase(CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES);
@@ -1947,6 +2007,15 @@ public class CameraMetadataNative implements Parcelable {
                        metadata.setLensShadingMap((LensShadingMap) value);
                    }
                });
        sSetCommandMap.put(
                CaptureResult.STATISTICS_LENS_INTRINSICS_SAMPLES.getNativeKey(),
                new SetCommand() {
                    @Override
                    @SuppressWarnings("unchecked")
                    public <T> void setValue(CameraMetadataNative metadata, T value) {
                        metadata.setLensIntrinsicsSamples((LensIntrinsicsSample []) value);
                    }
                });
    }

    private boolean setAvailableFormats(int[] value) {
+125 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2023 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package android.hardware.camera2.params;

import android.annotation.FlaggedApi;
import android.annotation.NonNull;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.utils.HashCodeHelpers;
import android.text.TextUtils;

import com.android.internal.camera.flags.Flags;
import com.android.internal.util.Preconditions;

import java.util.Arrays;

/**
 * Immutable class to store an
 * {@link CaptureResult#STATISTICS_LENS_INTRINSICS_SAMPLES lens intrinsics intra-frame sample}.
 */
@FlaggedApi(Flags.FLAG_CONCERT_MODE)
public final class LensIntrinsicsSample {
    /**
     * Create a new {@link LensIntrinsicsSample}.
     *
     * <p>{@link LensIntrinsicsSample} contains the timestamp and the
     * {@link CaptureResult#LENS_INTRINSIC_CALIBRATION} sample.
     *
     * @param timestamp timestamp of the lens intrinsics sample.
     * @param lensIntrinsics the lens intrinsic calibration for the sample.
     *
     * @throws IllegalArgumentException if lensIntrinsics length is different from 5
     */
    @FlaggedApi(Flags.FLAG_CONCERT_MODE)
    public LensIntrinsicsSample(final long timestamp, @NonNull final float[] lensIntrinsics) {
        mTimestampNs = timestamp;
        Preconditions.checkArgument(lensIntrinsics.length == 5);
        mLensIntrinsics = lensIntrinsics;
    }

    /**
     * Get the timestamp in nanoseconds.
     *
     *<p>The timestamps are in the same timebase as and comparable to
     *{@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp}.</p>
     *
     * @return a long value (guaranteed to be finite)
     */
    @FlaggedApi(Flags.FLAG_CONCERT_MODE)
    public long getTimestamp() {
        return mTimestampNs;
    }

    /**
     * Get the lens intrinsics calibration
     *
     * @return a floating point value (guaranteed to be finite)
     * @see CaptureResult#LENS_INTRINSIC_CALIBRATION
     */
    @FlaggedApi(Flags.FLAG_CONCERT_MODE)
    @NonNull
    public float[] getLensIntrinsics() {
        return mLensIntrinsics;
    }

    /**
     * Check if this {@link LensIntrinsicsSample} is equal to another {@link LensIntrinsicsSample}.
     *
     * <p>Two samples are only equal if and only if each of the lens intrinsics are equal.</p>
     *
     * @return {@code true} if the objects were equal, {@code false} otherwise
     */
    @Override
    public boolean equals(final Object obj) {
        if (obj == null) {
            return false;
        } else if (this == obj) {
            return true;
        } else if (obj instanceof LensIntrinsicsSample) {
            final LensIntrinsicsSample other = (LensIntrinsicsSample) obj;
            return mTimestampNs == other.mTimestampNs
                    && Arrays.equals(mLensIntrinsics, other.getLensIntrinsics());
        }
        return false;
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public int hashCode() {
        int timestampHash = HashCodeHelpers.hashCode(((float)mTimestampNs));
        return HashCodeHelpers.hashCode(Arrays.hashCode(mLensIntrinsics), timestampHash);
    }

    /**
     * Return the LensIntrinsicsSample as a string representation.
     *
     * <p> {@code "LensIntrinsicsSample{timestamp:%l, sample:%s}"} represents the LensIntrinsics
     * sample's timestamp, and calibration data.</p>
     *
     * @return string representation of {@link LensIntrinsicsSample}
     */
    @Override
    public String toString() {
        return TextUtils.formatSimple("LensIntrinsicsSample{timestamp:%d, sample:%s}", mTimestampNs,
               Arrays.toString(mLensIntrinsics));
    }

    private final long mTimestampNs;
    private final float [] mLensIntrinsics;
}