Loading api/current.txt +1 −0 Original line number Diff line number Diff line Loading @@ -11647,6 +11647,7 @@ package android.hardware.camera2 { field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_EXPOSURE_TIME; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_FORWARD_MATRIX; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_FRAME_DURATION; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_GREEN_SPLIT; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_NEUTRAL_COLOR_POINT; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_PROFILE_HUE_SAT_MAP; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_PROFILE_TONE_CURVE; core/java/android/hardware/camera2/CaptureResult.java +33 −0 Original line number Diff line number Diff line Loading @@ -1535,6 +1535,39 @@ public final class CaptureResult extends CameraMetadata { public static final Key<float[]> SENSOR_PROFILE_TONE_CURVE = new Key<float[]>("android.sensor.profileToneCurve", float[].class); /** * <p>The worst-case divergence between Bayer green channels.</p> * <p>This value is an estimate of the worst case split between the * Bayer green channels in the red and blue rows in the sensor color * filter array.</p> * <p>The green split is calculated as follows:</p> * <ol> * <li>A representative 5x5 pixel window W within the active * sensor array is chosen.</li> * <li>The arithmetic mean of the green channels from the red * rows (mean_Gr) within W is computed.</li> * <li>The arithmetic mean of the green channels from the blue * rows (mean_Gb) within W is computed.</li> * <li>The maximum ratio R of the two means is computed as follows: * <code>R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))</code></li> * </ol> * <p>The ratio R is the green split divergence reported for this property, * which represents how much the green channels differ in the mosaic * pattern. This value is typically used to determine the treatment of * the green mosaic channels when demosaicing.</p> * <p>The green split value can be roughly interpreted as follows:</p> * <ul> * <li>R < 1.03 is a negligible split (<3% divergence).</li> * <li>1.20 <= R >= 1.03 will require some software * correction to avoid demosaic errors (3-20% divergence).</li> * <li>R > 1.20 will require strong software correction to produce * a usuable image (>20% divergence).</li> * </ul> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> */ public static final Key<Float> SENSOR_GREEN_SPLIT = new Key<Float>("android.sensor.greenSplit", float.class); /** * <p>When enabled, the sensor sends a test pattern instead of * doing a real exposure from the camera.</p> Loading Loading
api/current.txt +1 −0 Original line number Diff line number Diff line Loading @@ -11647,6 +11647,7 @@ package android.hardware.camera2 { field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_EXPOSURE_TIME; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_FORWARD_MATRIX; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_FRAME_DURATION; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_GREEN_SPLIT; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_NEUTRAL_COLOR_POINT; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_PROFILE_HUE_SAT_MAP; field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_PROFILE_TONE_CURVE;
core/java/android/hardware/camera2/CaptureResult.java +33 −0 Original line number Diff line number Diff line Loading @@ -1535,6 +1535,39 @@ public final class CaptureResult extends CameraMetadata { public static final Key<float[]> SENSOR_PROFILE_TONE_CURVE = new Key<float[]>("android.sensor.profileToneCurve", float[].class); /** * <p>The worst-case divergence between Bayer green channels.</p> * <p>This value is an estimate of the worst case split between the * Bayer green channels in the red and blue rows in the sensor color * filter array.</p> * <p>The green split is calculated as follows:</p> * <ol> * <li>A representative 5x5 pixel window W within the active * sensor array is chosen.</li> * <li>The arithmetic mean of the green channels from the red * rows (mean_Gr) within W is computed.</li> * <li>The arithmetic mean of the green channels from the blue * rows (mean_Gb) within W is computed.</li> * <li>The maximum ratio R of the two means is computed as follows: * <code>R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))</code></li> * </ol> * <p>The ratio R is the green split divergence reported for this property, * which represents how much the green channels differ in the mosaic * pattern. This value is typically used to determine the treatment of * the green mosaic channels when demosaicing.</p> * <p>The green split value can be roughly interpreted as follows:</p> * <ul> * <li>R < 1.03 is a negligible split (<3% divergence).</li> * <li>1.20 <= R >= 1.03 will require some software * correction to avoid demosaic errors (3-20% divergence).</li> * <li>R > 1.20 will require strong software correction to produce * a usuable image (>20% divergence).</li> * </ul> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> */ public static final Key<Float> SENSOR_GREEN_SPLIT = new Key<Float>("android.sensor.greenSplit", float.class); /** * <p>When enabled, the sensor sends a test pattern instead of * doing a real exposure from the camera.</p> Loading