Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 06fe59bf authored by Chien-Yu Chen's avatar Chien-Yu Chen Committed by Android (Google) Code Review
Browse files

Merge "camera2: add reprocess format map"

parents cd890311 0a551f14
Loading
Loading
Loading
Loading
+3 −0
Original line number Diff line number Diff line
@@ -13571,6 +13571,8 @@ package android.hardware.camera2.params {
    method public android.util.Range<java.lang.Integer>[] getHighSpeedVideoFpsRangesFor(android.util.Size);
    method public android.util.Size[] getHighSpeedVideoSizes();
    method public android.util.Size[] getHighSpeedVideoSizesFor(android.util.Range<java.lang.Integer>);
    method public final int[] getInputFormats();
    method public android.util.Size[] getInputSizes(int);
    method public final int[] getOutputFormats();
    method public long getOutputMinFrameDuration(int, android.util.Size);
    method public long getOutputMinFrameDuration(java.lang.Class<T>, android.util.Size);
@@ -13578,6 +13580,7 @@ package android.hardware.camera2.params {
    method public android.util.Size[] getOutputSizes(int);
    method public long getOutputStallDuration(int, android.util.Size);
    method public long getOutputStallDuration(java.lang.Class<T>, android.util.Size);
    method public final int[] getValidOutputFormatsForInput(int);
    method public boolean isOutputSupportedFor(int);
    method public static boolean isOutputSupportedFor(java.lang.Class<T>);
    method public boolean isOutputSupportedFor(android.view.Surface);
+3 −0
Original line number Diff line number Diff line
@@ -13863,6 +13863,8 @@ package android.hardware.camera2.params {
    method public android.util.Range<java.lang.Integer>[] getHighSpeedVideoFpsRangesFor(android.util.Size);
    method public android.util.Size[] getHighSpeedVideoSizes();
    method public android.util.Size[] getHighSpeedVideoSizesFor(android.util.Range<java.lang.Integer>);
    method public final int[] getInputFormats();
    method public android.util.Size[] getInputSizes(int);
    method public final int[] getOutputFormats();
    method public long getOutputMinFrameDuration(int, android.util.Size);
    method public long getOutputMinFrameDuration(java.lang.Class<T>, android.util.Size);
@@ -13870,6 +13872,7 @@ package android.hardware.camera2.params {
    method public android.util.Size[] getOutputSizes(int);
    method public long getOutputStallDuration(int, android.util.Size);
    method public long getOutputStallDuration(java.lang.Class<T>, android.util.Size);
    method public final int[] getValidOutputFormatsForInput(int);
    method public boolean isOutputSupportedFor(int);
    method public static boolean isOutputSupportedFor(java.lang.Class<T>);
    method public boolean isOutputSupportedFor(android.view.Surface);
+11 −14
Original line number Diff line number Diff line
@@ -1107,8 +1107,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
     * that can be configured and used simultaneously by a camera device.</p>
     * <p>When set to 0, it means no input stream is supported.</p>
     * <p>The image format for a input stream can be any supported
     * format provided by
     * android.scaler.availableInputOutputFormatsMap. When using an
     * format returned by StreamConfigurationMap#getInputFormats. When using an
     * input stream, there must be at least one output stream
     * configured to to receive the reprocessed images.</p>
     * <p>When an input stream and some output streams are used in a reprocessing request,
@@ -1408,12 +1407,12 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
     * </thead>
     * <tbody>
     * <tr>
     * <td align="left">OPAQUE</td>
     * <td align="left">PRIVATE (ImageFormat#PRIVATE)</td>
     * <td align="left">JPEG</td>
     * <td align="left">OPAQUE_REPROCESSING</td>
     * </tr>
     * <tr>
     * <td align="left">OPAQUE</td>
     * <td align="left">PRIVATE</td>
     * <td align="left">YUV_420_888</td>
     * <td align="left">OPAQUE_REPROCESSING</td>
     * </tr>
@@ -1429,25 +1428,23 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
     * </tr>
     * </tbody>
     * </table>
     * <p>OPAQUE refers to a device-internal format that is not directly application-visible.
     * An OPAQUE input or output surface can be acquired by
     * OpaqueImageRingBufferQueue#getInputSurface() or
     * OpaqueImageRingBufferQueue#getOutputSurface().
     * For a OPAQUE_REPROCESSING-capable camera device, using the OPAQUE format
     * <p>PRIVATE refers to a device-internal format that is not directly application-visible.
     * A PRIVATE input surface can be acquired by
     * ImageReader.newOpaqueInstance(width, height, maxImages).
     * For a OPAQUE_REPROCESSING-capable camera device, using the PRIVATE format
     * as either input or output will never hurt maximum frame rate (i.e.
     * StreamConfigurationMap#getOutputStallDuration(klass,Size) is always 0),
     * where klass is android.media.OpaqueImageRingBufferQueue.class.</p>
     * StreamConfigurationMap#getOutputStallDuration(format, size) is always 0),
     * where format is ImageFormat#PRIVATE.</p>
     * <p>Attempting to configure an input stream with output streams not
     * listed as available in this map is not valid.</p>
     * <p>TODO: typedef to ReprocessFormatMap</p>
     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
     *
     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
     * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS
     * @hide
     */
    public static final Key<int[]> SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP =
            new Key<int[]>("android.scaler.availableInputOutputFormatsMap", int[].class);
    public static final Key<android.hardware.camera2.params.ReprocessFormatsMap> SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP =
            new Key<android.hardware.camera2.params.ReprocessFormatsMap>("android.scaler.availableInputOutputFormatsMap", android.hardware.camera2.params.ReprocessFormatsMap.class);

    /**
     * <p>The available stream configurations that this
+25 −22
Original line number Diff line number Diff line
@@ -448,17 +448,18 @@ public abstract class CameraMetadata<TKey> {
     * <p>The camera device supports the Zero Shutter Lag reprocessing use case.</p>
     * <ul>
     * <li>One input stream is supported, that is, <code>{@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1</code>.</li>
     * <li>OPAQUE is supported as an output/input format, that is,
     *   StreamConfigurationMap#getOutputSizes(klass) and
     *   StreamConfigurationMap#getInputSizes(klass) return non empty Size[] and have common
     *   sizes, where klass is android.media.OpaqueImageRingBufferQueue.class. See
     *   android.scaler.availableInputOutputFormatsMap for detailed information about
     *   OPAQUE format.</li>
     * <li>android.scaler.availableInputOutputFormatsMap has the required map entries.</li>
     * <li>Using OPAQUE does not cause a frame rate drop
     * <li>ImageFormat#PRIVATE is supported as an output/input format, that is,
     *   ImageFormat#PRIVATE is included in the lists of formats returned by
     *   StreamConfigurationMap#getInputFormats and
     *   StreamConfigurationMap#getOutputFormats.</li>
     * <li>StreamConfigurationMap#getValidOutputFormatsForInput returns non empty int[] for
     *   each supported input format returned by StreamConfigurationMap#getInputFormats.</li>
     * <li>Each size returned by StreamConfigurationMap#getInputSizes(ImageFormat#PRIVATE)
     *   is also included in StreamConfigurationMap#getOutputSizes(ImageFormat#PRIVATE)</li>
     * <li>Using ImageFormat#PRIVATE does not cause a frame rate drop
     *   relative to the sensor's maximum capture rate (at that
     *   resolution), see android.scaler.availableInputOutputFormatsMap for more details.</li>
     * <li>OPAQUE will be reprocessable into both YUV_420_888
     *   resolution).</li>
     * <li>ImageFormat#PRIVATE will be reprocessable into both YUV_420_888
     *   and JPEG formats.</li>
     * <li>The maximum available resolution for OPAQUE streams
     *   (both input/output) will match the maximum available
@@ -539,27 +540,29 @@ public abstract class CameraMetadata<TKey> {
    public static final int REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE = 6;

    /**
     * <p>The camera device supports the YUV420_888 reprocessing use case, similar as
     * <p>The camera device supports the YUV_420_888 reprocessing use case, similar as
     * OPAQUE_REPROCESSING, This capability requires the camera device to support the
     * following:</p>
     * <ul>
     * <li>One input stream is supported, that is, <code>{@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1</code>.</li>
     * <li>YUV420_888 is supported as a common format for both input and output, that is,
     *   StreamConfigurationMap#getOutputSizes(YUV420_888) and
     *   StreamConfigurationMap#getInputSizes(YUV420_888) return non empty Size[] and have
     *   common sizes.</li>
     * <li>android.scaler.availableInputOutputFormatsMap has the required map entries.</li>
     * <li>Using YUV420_888 does not cause a frame rate drop
     *   relative to the sensor's maximum capture rate (at that
     *   resolution), see android.scaler.availableInputOutputFormatsMap for more details.</li>
     * <li>YUV420_888 will be reprocessable into both YUV_420_888
     * <li>YUV_420_888 is supported as an output/input format, that is,
     *   YUV_420_888 is included in the lists of formats returned by
     *   StreamConfigurationMap#getInputFormats and
     *   StreamConfigurationMap#getOutputFormats.</li>
     * <li>StreamConfigurationMap#getValidOutputFormatsForInput returns non empty int[] for
     *   each supported input format returned by StreamConfigurationMap#getInputFormats.</li>
     * <li>Each size returned by StreamConfigurationMap#getInputSizes(YUV_420_888)
     *   is also included in StreamConfigurationMap#getOutputSizes(YUV_420_888)</li>
     * <li>Using YUV_420_888 does not cause a frame rate drop
     *   relative to the sensor's maximum capture rate (at that resolution).</li>
     * <li>YUV_420_888 will be reprocessable into both YUV_420_888
     *   and JPEG formats.</li>
     * <li>The maximum available resolution for YUV420_888 streams
     * <li>The maximum available resolution for YUV_420_888 streams
     *   (both input/output) will match the maximum available
     *   resolution of JPEG streams.</li>
     * <li>Only the below controls are effective for reprocessing requests and will be
     *   present in capture results. The reprocess requests are from the original capture
     *   results that are assocaited with the intermidate YUV420_888 output buffers.
     *   results that are assocaited with the intermidate YUV_420_888 output buffers.
     *   All other controls in the reprocess requests will be ignored by the camera device.<ul>
     * <li>android.jpeg.*</li>
     * <li>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}</li>
+4 −1
Original line number Diff line number Diff line
@@ -48,6 +48,7 @@ import android.hardware.camera2.marshal.impl.MarshalQueryableString;
import android.hardware.camera2.params.Face;
import android.hardware.camera2.params.HighSpeedVideoConfiguration;
import android.hardware.camera2.params.LensShadingMap;
import android.hardware.camera2.params.ReprocessFormatsMap;
import android.hardware.camera2.params.StreamConfiguration;
import android.hardware.camera2.params.StreamConfigurationDuration;
import android.hardware.camera2.params.StreamConfigurationMap;
@@ -838,11 +839,13 @@ public class CameraMetadataNative implements Parcelable {
                CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
        HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase(
                CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
        ReprocessFormatsMap inputOutputFormatsMap = getBase(
                CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP);

        return new StreamConfigurationMap(
                configurations, minFrameDurations, stallDurations,
                depthConfigurations, depthMinFrameDurations, depthStallDurations,
                highSpeedVideoConfigurations);
                highSpeedVideoConfigurations, inputOutputFormatsMap);
    }

    private <T> Integer getMaxRegions(Key<T> key) {
Loading