Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 78712a8b authored by Igor Murashkin's avatar Igor Murashkin Committed by Eino-Ville Talvala
Browse files

camera2: Add new metadata keys, change types for existing range keys

Also adds the following keys:
* android.control.maxRegionsAe
* android.control.maxRegionsAwb
* android.control.maxRegionsAf
* android.request.maxNumOutputRaw
* android.request.maxNumOutputProc
* android.request.maxNumOutputProcStalling

Changes the following keys' type
generations:
* android.control.aeTargetFpsRange
* (Range<Integer>)
* android.control.aeAvailableTargetFpsRanges
* (Range<Integer>[])
* android.control.aeCompensationRange
* (Range<Integer>)
* android.lens.focusRange
* (Range<Float>)

Bug: 14628001
Change-Id: I141847dffc4b0d89cea37c19a54d6d5faf24a9bb
parent 817f8933
Loading
Loading
Loading
Loading
+6 −2
Original line number Original line Diff line number Diff line
@@ -12149,7 +12149,9 @@ package android.hardware.camera2 {
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AVAILABLE_SCENE_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AVAILABLE_SCENE_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AWB_AVAILABLE_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AWB_AVAILABLE_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS;
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS_AE;
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS_AF;
    field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS_AWB;
    field public static final android.hardware.camera2.CameraCharacteristics.Key EDGE_AVAILABLE_EDGE_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key EDGE_AVAILABLE_EDGE_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key FLASH_INFO_AVAILABLE;
    field public static final android.hardware.camera2.CameraCharacteristics.Key FLASH_INFO_AVAILABLE;
    field public static final android.hardware.camera2.CameraCharacteristics.Key HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES;
@@ -12167,7 +12169,9 @@ package android.hardware.camera2 {
    field public static final android.hardware.camera2.CameraCharacteristics.Key NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_AVAILABLE_CAPABILITIES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_AVAILABLE_CAPABILITIES;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_INPUT_STREAMS;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_INPUT_STREAMS;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_STREAMS;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_PROC;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_PROC_STALLING;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_RAW;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_PARTIAL_RESULT_COUNT;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_PARTIAL_RESULT_COUNT;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_PIPELINE_MAX_DEPTH;
    field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_PIPELINE_MAX_DEPTH;
    field public static final android.hardware.camera2.CameraCharacteristics.Key SCALER_AVAILABLE_MAX_DIGITAL_ZOOM;
    field public static final android.hardware.camera2.CameraCharacteristics.Key SCALER_AVAILABLE_MAX_DIGITAL_ZOOM;
+107 −5
Original line number Original line Diff line number Diff line
@@ -322,8 +322,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
     * <p>List of frame rate ranges supported by the
     * <p>List of frame rate ranges supported by the
     * AE algorithm/hardware</p>
     * AE algorithm/hardware</p>
     */
     */
    public static final Key<int[]> CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES =
    public static final Key<android.util.Range<Integer>[]> CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES =
            new Key<int[]>("android.control.aeAvailableTargetFpsRanges", int[].class);
            new Key<android.util.Range<Integer>[]>("android.control.aeAvailableTargetFpsRanges", new TypeReference<android.util.Range<Integer>[]>() {{ }});


    /**
    /**
     * <p>Maximum and minimum exposure compensation
     * <p>Maximum and minimum exposure compensation
@@ -332,8 +332,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
     *
     *
     * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP
     * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP
     */
     */
    public static final Key<int[]> CONTROL_AE_COMPENSATION_RANGE =
    public static final Key<android.util.Range<Integer>> CONTROL_AE_COMPENSATION_RANGE =
            new Key<int[]>("android.control.aeCompensationRange", int[].class);
            new Key<android.util.Range<Integer>>("android.control.aeCompensationRange", new TypeReference<android.util.Range<Integer>>() {{ }});


    /**
    /**
     * <p>Smallest step by which exposure compensation
     * <p>Smallest step by which exposure compensation
@@ -427,10 +427,44 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
     * @see CaptureRequest#CONTROL_AE_REGIONS
     * @see CaptureRequest#CONTROL_AE_REGIONS
     * @see CaptureRequest#CONTROL_AF_REGIONS
     * @see CaptureRequest#CONTROL_AF_REGIONS
     * @see CaptureRequest#CONTROL_AWB_REGIONS
     * @see CaptureRequest#CONTROL_AWB_REGIONS
     * @hide
     */
     */
    public static final Key<int[]> CONTROL_MAX_REGIONS =
    public static final Key<int[]> CONTROL_MAX_REGIONS =
            new Key<int[]>("android.control.maxRegions", int[].class);
            new Key<int[]>("android.control.maxRegions", int[].class);


    /**
     * <p>List of the maximum number of regions that can be used for metering in
     * auto-exposure (AE);
     * this corresponds to the the maximum number of elements in
     * {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}.</p>
     *
     * @see CaptureRequest#CONTROL_AE_REGIONS
     */
    public static final Key<Integer> CONTROL_MAX_REGIONS_AE =
            new Key<Integer>("android.control.maxRegionsAe", int.class);

    /**
     * <p>List of the maximum number of regions that can be used for metering in
     * auto-white balance (AWB);
     * this corresponds to the the maximum number of elements in
     * {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions}.</p>
     *
     * @see CaptureRequest#CONTROL_AWB_REGIONS
     */
    public static final Key<Integer> CONTROL_MAX_REGIONS_AWB =
            new Key<Integer>("android.control.maxRegionsAwb", int.class);

    /**
     * <p>List of the maximum number of regions that can be used for metering in
     * auto-focus (AF);
     * this corresponds to the the maximum number of elements in
     * {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}.</p>
     *
     * @see CaptureRequest#CONTROL_AF_REGIONS
     */
    public static final Key<Integer> CONTROL_MAX_REGIONS_AF =
            new Key<Integer>("android.control.maxRegionsAf", int.class);

    /**
    /**
     * <p>The set of edge enhancement modes supported by this camera device.</p>
     * <p>The set of edge enhancement modes supported by this camera device.</p>
     * <p>This tag lists the valid modes for {@link CaptureRequest#EDGE_MODE android.edge.mode}.</p>
     * <p>This tag lists the valid modes for {@link CaptureRequest#EDGE_MODE android.edge.mode}.</p>
@@ -621,7 +655,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
     * number is 3, and max JPEG stream number is 2, then this tuple should be <code>(1, 3, 2)</code>.</p>
     * number is 3, and max JPEG stream number is 2, then this tuple should be <code>(1, 3, 2)</code>.</p>
     * <p>This lists the upper bound of the number of output streams supported by
     * <p>This lists the upper bound of the number of output streams supported by
     * the camera device. Using more streams simultaneously may require more hardware and
     * the camera device. Using more streams simultaneously may require more hardware and
     * CPU resources that will consume more power. The image format for a output stream can
     * CPU resources that will consume more power. The image format for an output stream can
     * be any supported format provided by android.scaler.availableStreamConfigurations.
     * be any supported format provided by android.scaler.availableStreamConfigurations.
     * The formats defined in android.scaler.availableStreamConfigurations can be catergorized
     * The formats defined in android.scaler.availableStreamConfigurations can be catergorized
     * into the 3 stream types as below:</p>
     * into the 3 stream types as below:</p>
@@ -632,10 +666,78 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
     * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
     * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
     * Typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12.</li>
     * Typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12.</li>
     * </ul>
     * </ul>
     * @hide
     */
     */
    public static final Key<int[]> REQUEST_MAX_NUM_OUTPUT_STREAMS =
    public static final Key<int[]> REQUEST_MAX_NUM_OUTPUT_STREAMS =
            new Key<int[]>("android.request.maxNumOutputStreams", int[].class);
            new Key<int[]>("android.request.maxNumOutputStreams", int[].class);


    /**
     * <p>The maximum numbers of different types of output streams
     * that can be configured and used simultaneously by a camera device
     * for any <code>RAW</code> formats.</p>
     * <p>This value contains the max number of output simultaneous
     * streams from the raw sensor.</p>
     * <p>This lists the upper bound of the number of output streams supported by
     * the camera device. Using more streams simultaneously may require more hardware and
     * CPU resources that will consume more power. The image format for this kind of an output stream can
     * be any <code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
     * <p>In particular, a <code>RAW</code> format is typically one of:</p>
     * <ul>
     * <li>ImageFormat#RAW_SENSOR</li>
     * <li>Opaque <code>RAW</code></li>
     * </ul>
     *
     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
     */
    public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_RAW =
            new Key<Integer>("android.request.maxNumOutputRaw", int.class);

    /**
     * <p>The maximum numbers of different types of output streams
     * that can be configured and used simultaneously by a camera device
     * for any processed (but not-stalling) formats.</p>
     * <p>This value contains the max number of output simultaneous
     * streams for any processed (but not-stalling) formats.</p>
     * <p>This lists the upper bound of the number of output streams supported by
     * the camera device. Using more streams simultaneously may require more hardware and
     * CPU resources that will consume more power. The image format for this kind of an output stream can
     * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
     * <p>Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
     * Typically:</p>
     * <ul>
     * <li>ImageFormat#YUV_420_888</li>
     * <li>ImageFormat#NV21</li>
     * <li>ImageFormat#YV12</li>
     * <li>Implementation-defined formats, i.e. StreamConfiguration#isOutputSupportedFor(Class)</li>
     * </ul>
     * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with
     * a processed format -- it will return 0 for a non-stalling stream.</p>
     *
     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
     */
    public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC =
            new Key<Integer>("android.request.maxNumOutputProc", int.class);

    /**
     * <p>The maximum numbers of different types of output streams
     * that can be configured and used simultaneously by a camera device
     * for any processed (and stalling) formats.</p>
     * <p>This value contains the max number of output simultaneous
     * streams for any processed (but not-stalling) formats.</p>
     * <p>This lists the upper bound of the number of output streams supported by
     * the camera device. Using more streams simultaneously may require more hardware and
     * CPU resources that will consume more power. The image format for this kind of an output stream can
     * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
     * <p>A processed and stalling format is defined as any non-RAW format with a stallDurations &gt; 0.
     * Typically only the <code>JPEG</code> format (ImageFormat#JPEG)</p>
     * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with
     * a processed format -- it will return a non-0 value for a stalling stream.</p>
     *
     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
     */
    public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC_STALLING =
            new Key<Integer>("android.request.maxNumOutputProcStalling", int.class);

    /**
    /**
     * <p>The maximum numbers of any type of input streams
     * <p>The maximum numbers of any type of input streams
     * that can be configured and used simultaneously by a camera device.</p>
     * that can be configured and used simultaneously by a camera device.</p>
+2 −2
Original line number Original line Diff line number Diff line
@@ -719,8 +719,8 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
     *
     *
     * @see CaptureRequest#SENSOR_EXPOSURE_TIME
     * @see CaptureRequest#SENSOR_EXPOSURE_TIME
     */
     */
    public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE =
    public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE =
            new Key<int[]>("android.control.aeTargetFpsRange", int[].class);
            new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }});


    /**
    /**
     * <p>Whether the camera device will trigger a precapture
     * <p>Whether the camera device will trigger a precapture
+4 −4
Original line number Original line Diff line number Diff line
@@ -565,8 +565,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
     *
     *
     * @see CaptureRequest#SENSOR_EXPOSURE_TIME
     * @see CaptureRequest#SENSOR_EXPOSURE_TIME
     */
     */
    public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE =
    public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE =
            new Key<int[]>("android.control.aeTargetFpsRange", int[].class);
            new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }});


    /**
    /**
     * <p>Whether the camera device will trigger a precapture
     * <p>Whether the camera device will trigger a precapture
@@ -1783,8 +1783,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
     * <p>If variable focus not supported, can still report
     * <p>If variable focus not supported, can still report
     * fixed depth of field range</p>
     * fixed depth of field range</p>
     */
     */
    public static final Key<float[]> LENS_FOCUS_RANGE =
    public static final Key<android.util.Range<Float>> LENS_FOCUS_RANGE =
            new Key<float[]>("android.lens.focusRange", float[].class);
            new Key<android.util.Range<Float>>("android.lens.focusRange", new TypeReference<android.util.Range<Float>>() {{ }});


    /**
    /**
     * <p>Sets whether the camera device uses optical image stabilization (OIS)
     * <p>Sets whether the camera device uses optical image stabilization (OIS)
+73 −10
Original line number Original line Diff line number Diff line
@@ -50,6 +50,7 @@ import android.hardware.camera2.utils.TypeReference;
import android.os.Parcelable;
import android.os.Parcelable;
import android.os.Parcel;
import android.os.Parcel;
import android.util.Log;
import android.util.Log;
import android.util.Pair;


import com.android.internal.util.Preconditions;
import com.android.internal.util.Preconditions;


@@ -297,9 +298,9 @@ public class CameraMetadataNative implements Parcelable {
    public <T> T get(Key<T> key) {
    public <T> T get(Key<T> key) {
        Preconditions.checkNotNull(key, "key must not be null");
        Preconditions.checkNotNull(key, "key must not be null");


        T value = getOverride(key);
        Pair<T, Boolean> override = getOverride(key);
        if (value != null) {
        if (override.second) {
            return value;
            return override.first;
        }
        }


        return getBase(key);
        return getBase(key);
@@ -413,19 +414,35 @@ public class CameraMetadataNative implements Parcelable {
    // Need overwrite some metadata that has different definitions between native
    // Need overwrite some metadata that has different definitions between native
    // and managed sides.
    // and managed sides.
    @SuppressWarnings("unchecked")
    @SuppressWarnings("unchecked")
    private <T> T getOverride(Key<T> key) {
    private <T> Pair<T, Boolean> getOverride(Key<T> key) {
        T value = null;
        boolean override = true;

        if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_FORMATS)) {
        if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_FORMATS)) {
            return (T) getAvailableFormats();
            value = (T) getAvailableFormats();
        } else if (key.equals(CaptureResult.STATISTICS_FACES)) {
        } else if (key.equals(CaptureResult.STATISTICS_FACES)) {
            return (T) getFaces();
            value = (T) getFaces();
        } else if (key.equals(CaptureResult.STATISTICS_FACE_RECTANGLES)) {
        } else if (key.equals(CaptureResult.STATISTICS_FACE_RECTANGLES)) {
            return (T) getFaceRectangles();
            value = (T) getFaceRectangles();
        } else if (key.equals(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)) {
        } else if (key.equals(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)) {
            return (T) getStreamConfigurationMap();
            value = (T) getStreamConfigurationMap();
        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) {
            value = (T) getMaxRegions(key);
        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) {
            value = (T) getMaxRegions(key);
        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) {
            value = (T) getMaxRegions(key);
        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) {
            value = (T) getMaxNumOutputs(key);
        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) {
            value = (T) getMaxNumOutputs(key);
        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) {
            value = (T) getMaxNumOutputs(key);
        } else {
            override = false;
        }
        }


        // For other keys, get() falls back to getBase()
        return Pair.create(value, override);
        return null;
    }
    }


    private int[] getAvailableFormats() {
    private int[] getAvailableFormats() {
@@ -552,6 +569,52 @@ public class CameraMetadataNative implements Parcelable {
        return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations);
        return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations);
    }
    }


    private <T> Integer getMaxRegions(Key<T> key) {
        final int AE = 0;
        final int AWB = 1;
        final int AF = 2;

        // The order of the elements is: (AE, AWB, AF)
        int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS);

        if (maxRegions == null) {
            return null;
        }

        if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) {
            return maxRegions[AE];
        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) {
            return maxRegions[AWB];
        } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) {
            return maxRegions[AF];
        } else {
            throw new AssertionError("Invalid key " + key);
        }
    }

    private <T> Integer getMaxNumOutputs(Key<T> key) {
        final int RAW = 0;
        final int PROC = 1;
        final int PROC_STALLING = 2;

        // The order of the elements is: (raw, proc+nonstalling, proc+stalling)
        int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS);

        if (maxNumOutputs == null) {
            return null;
        }

        if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) {
            return maxNumOutputs[RAW];
        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) {
            return maxNumOutputs[PROC];
        } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) {
            return maxNumOutputs[PROC_STALLING];
        } else {
            throw new AssertionError("Invalid key " + key);
        }
    }

    private <T> void setBase(CameraCharacteristics.Key<T> key, T value) {
    private <T> void setBase(CameraCharacteristics.Key<T> key, T value) {
        setBase(key.getNativeKey(), value);
        setBase(key.getNativeKey(), value);
    }
    }
Loading