Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 41f51f93 authored by Emilian Peev's avatar Emilian Peev
Browse files

Camera: Add EFV support for extension latency mitigations

The EFV extension layer was originally designed without any
still capture latency in mind and didn't include any support
for the existing mitigations.
Since the layer can be used by other extension implementation
the gap needs to be closed.

Flag: com.android.internal.camera.flags.efv_capture_latency
Bug: 423674832
Bug: 297083874
Test: Camera CTS
Change-Id: Ice82775456256ba2e66a713a55061f76f3bca339
parent 5a480c2d
Loading
Loading
Loading
Loading
+14 −0
Original line number Diff line number Diff line
@@ -5234,12 +5234,22 @@ package android.hardware.camera2.extension {
    method @NonNull public abstract java.util.List<android.hardware.camera2.CaptureRequest.Key> getAvailableCaptureRequestKeys(@NonNull String);
    method @NonNull public abstract java.util.List<android.hardware.camera2.CaptureResult.Key> getAvailableCaptureResultKeys(@NonNull String);
    method @NonNull public abstract java.util.List<android.util.Pair<android.hardware.camera2.CameraCharacteristics.Key,java.lang.Object>> getAvailableCharacteristicsKeyValues();
    method @FlaggedApi("com.android.internal.camera.flags.efv_capture_latency") @Nullable public android.util.Range<java.lang.Long> getEstimatedCaptureLatencyRangeMillis(@NonNull android.util.Size, int);
    method public long getMetadataVendorId(@NonNull String);
    method @NonNull public abstract android.hardware.camera2.extension.SessionProcessor getSessionProcessor();
    method @NonNull public abstract java.util.Map<java.lang.Integer,java.util.List<android.util.Size>> getSupportedCaptureOutputResolutions(@NonNull String);
    method @FlaggedApi("com.android.internal.camera.flags.efv_capture_latency") @NonNull public java.util.Map<java.lang.Integer,java.util.List<android.util.Size>> getSupportedPostviewOutputResolutions(@NonNull android.util.Size);
    method @NonNull public abstract java.util.Map<java.lang.Integer,java.util.List<android.util.Size>> getSupportedPreviewOutputResolutions(@NonNull String);
    method public abstract void initialize(@NonNull String, @NonNull android.hardware.camera2.extension.CharacteristicsMap);
    method @FlaggedApi("com.android.internal.camera.flags.efv_capture_latency") public boolean isCaptureProcessProgressAvailable();
    method public abstract boolean isExtensionAvailable(@NonNull String, @NonNull android.hardware.camera2.extension.CharacteristicsMap);
    method @FlaggedApi("com.android.internal.camera.flags.efv_capture_latency") public boolean isPostviewAvailable();
  }
  @FlaggedApi("com.android.internal.camera.flags.efv_capture_latency") public final class CameraConfiguration {
    method @NonNull public android.hardware.camera2.extension.CameraOutputSurface getPostViewOutputSurface();
    method @NonNull public android.hardware.camera2.extension.CameraOutputSurface getPreviewOutputSurface();
    method @NonNull public android.hardware.camera2.extension.CameraOutputSurface getStillCaptureOutputSurface();
  }
  public abstract class CameraExtensionService extends android.app.Service {
@@ -5300,11 +5310,14 @@ package android.hardware.camera2.extension {
  public abstract class SessionProcessor {
    ctor public SessionProcessor();
    method public abstract void deInitSession(@NonNull android.os.IBinder);
    method @FlaggedApi("com.android.internal.camera.flags.efv_capture_latency") @Nullable public android.util.Pair<java.lang.Long,java.lang.Long> getRealtimeStillCaptureLatency();
    method @NonNull public abstract android.hardware.camera2.extension.ExtensionConfiguration initSession(@NonNull android.os.IBinder, @NonNull String, @NonNull android.hardware.camera2.extension.CharacteristicsMap, @NonNull android.hardware.camera2.extension.CameraOutputSurface, @NonNull android.hardware.camera2.extension.CameraOutputSurface);
    method @FlaggedApi("com.android.internal.camera.flags.efv_capture_latency") @NonNull public android.hardware.camera2.extension.ExtensionConfiguration initSession(@NonNull android.os.IBinder, @NonNull String, @NonNull android.hardware.camera2.extension.CharacteristicsMap, @NonNull android.hardware.camera2.extension.CameraConfiguration);
    method public abstract void onCaptureSessionEnd();
    method public abstract void onCaptureSessionStart(@NonNull android.hardware.camera2.extension.RequestProcessor, @NonNull String);
    method public abstract void setParameters(@NonNull android.hardware.camera2.CaptureRequest);
    method public abstract int startMultiFrameCapture(@NonNull java.util.concurrent.Executor, @NonNull android.hardware.camera2.extension.SessionProcessor.CaptureCallback);
    method @FlaggedApi("com.android.internal.camera.flags.efv_capture_latency") public int startMultiFrameCapture(boolean, @NonNull java.util.concurrent.Executor, @NonNull android.hardware.camera2.extension.SessionProcessor.CaptureCallback);
    method public abstract int startRepeating(@NonNull java.util.concurrent.Executor, @NonNull android.hardware.camera2.extension.SessionProcessor.CaptureCallback);
    method public abstract int startTrigger(@NonNull android.hardware.camera2.CaptureRequest, @NonNull java.util.concurrent.Executor, @NonNull android.hardware.camera2.extension.SessionProcessor.CaptureCallback);
    method public abstract void stopRepeating();
@@ -5313,6 +5326,7 @@ package android.hardware.camera2.extension {
  public static interface SessionProcessor.CaptureCallback {
    method public void onCaptureCompleted(long, int, @NonNull java.util.Map<android.hardware.camera2.CaptureResult.Key,java.lang.Object>);
    method public void onCaptureFailed(int, int);
    method @FlaggedApi("com.android.internal.camera.flags.efv_capture_latency") public default void onCaptureProcessProgressUpdated(@IntRange(from=0, to=100) int);
    method public void onCaptureProcessStarted(int);
    method public void onCaptureSequenceAborted(int);
    method public void onCaptureSequenceCompleted(int);
+93 −9
Original line number Diff line number Diff line
@@ -18,7 +18,9 @@ package android.hardware.camera2.extension;

import android.annotation.FlaggedApi;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.annotation.SystemApi;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
@@ -28,6 +30,7 @@ import android.hardware.camera2.impl.CameraMetadataNative;
import android.hardware.camera2.impl.CaptureCallback;
import android.util.Log;
import android.util.Pair;
import android.util.Range;
import android.util.Size;

import com.android.internal.camera.flags.Flags;
@@ -36,6 +39,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executor;

/**
 * Advanced contract for implementing Extensions. ImageCapture/Preview
@@ -240,6 +244,63 @@ public abstract class AdvancedExtender {
    public abstract List<CaptureResult.Key> getAvailableCaptureResultKeys(
            @NonNull String cameraId);

    /**
     * Returns the estimated capture latency range in milliseconds for the
     * target capture resolution during the calls to
     * {@link SessionProcessor#startMultiFrameCapture(Executor, SessionProcessor.CaptureCallback)}.
     * This includes the time spent processing the multi-frame capture request along with any
     * additional time for encoding of the processed buffer if necessary.
     *
     * @param captureOutputSize size of the capture output surface. If it is not in the supported
     *                          output sizes, maximum capture output size is used for the estimation
     * @param format            device-specific extension output format
     * @return the range of estimated minimal and maximal capture latency in milliseconds
     * or null if no capture latency info can be provided
     */
    @FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
    public @Nullable Range<Long> getEstimatedCaptureLatencyRangeMillis(
            @NonNull Size captureOutputSize, @ImageFormat.Format int format) {
        throw new UnsupportedOperationException("Subclasses must override this method");
    }

    /**
     * Retrieve support for capture progress callbacks via
     *  {@link SessionProcessor.CaptureCallback#onCaptureProcessProgressed(int)}.
     *
     * @return {@code true} in case progress callbacks are supported, {@code false} otherwise
     */
    @FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
    public boolean isCaptureProcessProgressAvailable() {
        throw new UnsupportedOperationException("Subclasses must override this method");
    }

    /**
     * Indicates whether the extension supports the postview for still capture feature.
     *
     * @return true if the feature is supported, otherwise false
     */
    @FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
    public boolean isPostviewAvailable() {
        throw new UnsupportedOperationException("Subclasses must override this method");
    }

    /**
     * Returns supported output format/size map for postview image. OEM is required to
     * support both JPEG and YUV_420_888 format output.
     *
     * <p>The returned sizes must be smaller than or equal to the provided capture
     *   size and have the same aspect ratio as the given capture size. If no supported
     *   resolution exists for the provided capture size then an empty map is returned.
     *   An example of how the map is parsed can be found in {@link #initializeParcelable(Map)}.</p>
     * @param captureSize The still capture resolution
     */
    @FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
    @NonNull
    public Map<Integer, List<Size>> getSupportedPostviewOutputResolutions(
            @NonNull Size captureSize) {
        throw new UnsupportedOperationException("Subclasses must override this method");
    }

    /**
     * Returns a list of {@link CameraCharacteristics} key/value pairs for apps to use when
     * querying the Extensions specific {@link CameraCharacteristics}.
@@ -280,9 +341,14 @@ public abstract class AdvancedExtender {
        @Override
        public List<SizeList> getSupportedPostviewResolutions(
                android.hardware.camera2.extension.Size captureSize) {
            // Feature is currently unsupported
            if (Flags.efvCaptureLatency()) {
                Size sz = new Size(captureSize.width, captureSize.height);
                return initializeParcelable(
                        AdvancedExtender.this.getSupportedPostviewOutputResolutions(sz));
            } else {
                return null;
            }
        }

        @Override
        public List<SizeList> getSupportedPreviewOutputResolutions(String cameraId) {
@@ -299,9 +365,21 @@ public abstract class AdvancedExtender {
        @Override
        public LatencyRange getEstimatedCaptureLatencyRange(String cameraId,
                android.hardware.camera2.extension.Size outputSize, int format) {
            // Feature is currently unsupported
            if (Flags.efvCaptureLatency()) {
                Range<Long> range = AdvancedExtender.this.getEstimatedCaptureLatencyRangeMillis(
                        new Size(outputSize.width, outputSize.height), format);
                if (range == null) {
                    return null;
                }

                LatencyRange ret = new LatencyRange();
                ret.max = range.getUpper();
                ret.min = range.getLower();
                return ret;
            } else {
                return null;
            }
        }

        @Override
        public ISessionProcessorImpl getSessionProcessor() {
@@ -356,15 +434,21 @@ public abstract class AdvancedExtender {

        @Override
        public boolean isCaptureProcessProgressAvailable() {
            // Feature is currently unsupported
            if (Flags.efvCaptureLatency()) {
                return AdvancedExtender.this.isCaptureProcessProgressAvailable();
            } else {
                return false;
            }
        }

        @Override
        public boolean isPostviewAvailable() {
            // Feature is currently unsupported
            if (Flags.efvCaptureLatency()) {
                return AdvancedExtender.this.isPostviewAvailable();
            } else {
                return false;
            }
        }

        @Override
        public CameraMetadataNative getAvailableCharacteristicsKeyValues(String cameraId) {
+72 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2025 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package android.hardware.camera2.extension;

import android.annotation.FlaggedApi;
import android.annotation.NonNull;
import android.annotation.SystemApi;

import com.android.internal.camera.flags.Flags;


/**
 * Helper class used to describe a single camera
 * output configuration that is passed by the
 * camera client to the extension implementation.
 *
 * @hide
 */

@FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
@SystemApi
public final class CameraConfiguration {
    private CameraOutputSurface mPreviewOutputSurface;
    private CameraOutputSurface mStillCaptureOutputSurface;
    private CameraOutputSurface mPostViewOutputSurface;

    CameraConfiguration(@NonNull CameraOutputSurface previewOutputSurface,
            @NonNull CameraOutputSurface stillCaptureOutputSurface,
            @NonNull CameraOutputSurface postViewOutputSurface) {
       mPreviewOutputSurface = previewOutputSurface;
       mStillCaptureOutputSurface = stillCaptureOutputSurface;
       mPostViewOutputSurface = postViewOutputSurface;
    }

    /**
     * Return the current preview output {@link CameraOutputSurface}
     */
    @NonNull
    public CameraOutputSurface getPreviewOutputSurface() {
        return mPreviewOutputSurface;
    }

    /**
     * Return the current still capture output {@link CameraOutputSurface}
     */
    @NonNull
    public CameraOutputSurface getStillCaptureOutputSurface() {
        return mStillCaptureOutputSurface;
    }

    /**
     * Return the current postview output {@link CameraOutputSurface}
     */
    @NonNull
    public CameraOutputSurface getPostViewOutputSurface() {
        return mPostViewOutputSurface;
    }
}
+146 −12
Original line number Diff line number Diff line
@@ -17,7 +17,9 @@
package android.hardware.camera2.extension;

import android.annotation.FlaggedApi;
import android.annotation.IntRange;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.annotation.SystemApi;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureFailure;
@@ -30,6 +32,7 @@ import android.os.IBinder;
import android.os.Looper;
import android.os.RemoteException;
import android.util.Log;
import android.util.Pair;

import com.android.internal.camera.flags.Flags;

@@ -186,6 +189,11 @@ public abstract class SessionProcessor {
         */
        void onCaptureCompleted(long shutterTimestamp, int requestId,
                @NonNull Map<CaptureResult.Key, Object> results);

        @FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
        default void onCaptureProcessProgressUpdated(@IntRange(from = 0, to = 100) int progress) {
            throw new UnsupportedOperationException("Subclasses must override this method");
        }
    }

    /**
@@ -228,6 +236,46 @@ public abstract class SessionProcessor {
            @NonNull CameraOutputSurface previewSurface,
            @NonNull CameraOutputSurface imageCaptureSurface);

    /**
     * Initializes the session for the extension. This is where the
     * extension implementations allocate resources for
     * preparing a CameraCaptureSession. After initSession() is called,
     * the camera ID, cameraCharacteristics and context will not change
     * until deInitSession() has been called.
     *
     * <p>The framework specifies the output surface configurations for
     * via the config argument and implementations must
     * return a {@link ExtensionConfiguration} which consists of a list of
     * {@link CameraOutputSurface} and session parameters. The {@link
     * ExtensionConfiguration} will be used to configure the
     * CameraCaptureSession.
     *
     * <p>Implementations are responsible for outputting correct camera
     * images output to these output surfaces.</p>
     *
     * @param token Binder token that can be used to register a death
     *              notifier callback
     * @param cameraId  The camera2 id string of the camera.
     * @param map Maps camera ids to camera characteristics
     * @param config contains output surface for preview, still capture and postview
     *
     * @return a {@link ExtensionConfiguration} consisting of a list of
     * {@link CameraOutputConfig} and session parameters which will decide
     * the  {@link android.hardware.camera2.params.SessionConfiguration}
     * for configuring the CameraCaptureSession. Please note that the
     * OutputConfiguration list may not be part of any
     * supported or mandatory stream combination BUT implementations must
     * ensure this list will always  produce a valid camera capture
     * session.
     */
    @FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
    @NonNull
    public ExtensionConfiguration initSession(@NonNull IBinder token,
            @NonNull String cameraId, @NonNull CharacteristicsMap map,
            @NonNull CameraConfiguration config) {
        throw new UnsupportedOperationException("Subclasses must override this method");
    }

    /**
     * Notify to de-initialize the extension. This callback will be
     * invoked after CameraCaptureSession is closed. After onDeInit() was
@@ -294,17 +342,17 @@ public abstract class SessionProcessor {
    public abstract void stopRepeating();

    /**
     * Start a multi-frame capture.
     * Starts a multi-frame capture.
     *
     * When the capture is completed, {@link
     * <p>When the capture is completed, {@link
     * CaptureCallback#onCaptureSequenceCompleted}
     * is called and {@code OnImageAvailableListener#onImageAvailable}
     * will also be called on the ImageReader that creates the image
     * capture output surface.
     * capture output surface.</p>
     *
     * <p>Only one capture can perform at a time. Starting a capture when
     * another capture is running  will cause onCaptureFailed to be called
     * immediately.
     * immediately.</p>
     *
     * @param executor the executor which will be used for
     *                 invoking the callbacks
@@ -314,6 +362,55 @@ public abstract class SessionProcessor {
    public abstract int startMultiFrameCapture(@NonNull Executor executor,
            @NonNull CaptureCallback callback);

    /**
     * Starts a multi-frame capture.
     *
     * <p>When the capture is completed, {@link
     * CaptureCallback#onCaptureSequenceCompleted}
     * is called and {@code OnImageAvailableListener#onImageAvailable}
     * will also be called on the ImageReader that creates the image
     * capture output surface.</p>
     *
     * <p>Only one capture can perform at a time. Starting a capture when
     * another capture is running  will cause onCaptureFailed to be called
     * immediately.</p>
     *
     * @param isPostviewRequested Indicates whether extension client requests
     *                            the postview output.
     * @param executor the executor which will be used for
     *                 invoking the callbacks
     * @param callback a callback to report the status.
     * @return the id of the capture sequence.
     */
    @FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
    public int startMultiFrameCapture(boolean isPostviewRequested,
            @NonNull Executor executor, @NonNull CaptureCallback callback) {
        throw new UnsupportedOperationException("Subclasses must override this method");
    }

    /**
     * Returns the realtime still {@link #startMultiFrameCapture(Executor, CaptureCallback)}
     * latency.
     *
     * <p>The estimation will take into account the current environment conditions, the camera
     * state and will include the time spent processing the multi-frame capture request along with
     * any additional time for encoding of the processed buffer if necessary.</p>
     *
     * @return {@code null} if the estimation is not supported or a pair that includes the estimated
     * input frame/frames camera capture latency as the * first field. This is the time between
     * {@link CaptureCallback#onCaptureStarted} and
     * {@link CaptureCallback#onCaptureProcessStarted}. The second field value includes the
     * estimated post-processing latency. This is the time between
     * {@link CaptureCallback#onCaptureProcessStarted} until * the processed frame returns back to
     * the client registered surface. Both first and second values will be in milliseconds. The
     * total still capture latency will be the sum of both the first and second values of the pair.
     */
    @FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
    @Nullable
    public Pair<Long, Long> getRealtimeStillCaptureLatency() {
        throw new UnsupportedOperationException("Subclasses must override this method");
    }

    /**
     * The camera framework will call these APIs to pass parameters from
     * the app to the extension implementation. It is expected that the
@@ -357,10 +454,21 @@ public abstract class SessionProcessor {
            mPreviewSurface = previewSurface;
            mPostviewSurface = postviewSurface;
            mImageCaptureSurface = imageCaptureSurface;
            ExtensionConfiguration config = SessionProcessor.this.initSession(token, cameraId,

            ExtensionConfiguration config;
            if (Flags.efvCaptureLatency()) {
                CameraConfiguration cameraConfig = new CameraConfiguration(
                        new CameraOutputSurface(previewSurface),
                        new CameraOutputSurface(imageCaptureSurface),
                        new CameraOutputSurface(postviewSurface));
                config = SessionProcessor.this.initSession(token, cameraId,
                        new CharacteristicsMap(charsMap), cameraConfig);
            } else {
                config = SessionProcessor.this.initSession(token, cameraId,
                        new CharacteristicsMap(charsMap),
                        new CameraOutputSurface(previewSurface),
                        new CameraOutputSurface(imageCaptureSurface));
            }
            if (config == null) {
                throw  new  IllegalArgumentException("Invalid extension configuration");
            }
@@ -423,10 +531,16 @@ public abstract class SessionProcessor {
        @Override
        public int startCapture(ICaptureCallback callback, boolean isPostviewRequested)
                throws RemoteException {
            if (Flags.efvCaptureLatency()) {
                return SessionProcessor.this.startMultiFrameCapture(isPostviewRequested,
                        new HandlerExecutor(new Handler(Looper.getMainLooper())),
                        new CaptureCallbackImpl(callback, mVendorId));
            } else {
                return SessionProcessor.this.startMultiFrameCapture(
                        new HandlerExecutor(new Handler(Looper.getMainLooper())),
                        new CaptureCallbackImpl(callback, mVendorId));
            }
        }

        @Override
        public void setParameters(CaptureRequest captureRequest) throws RemoteException {
@@ -443,7 +557,16 @@ public abstract class SessionProcessor {

        @Override
        public LatencyPair getRealtimeCaptureLatency() throws RemoteException {
            // Feature is not supported
            if (Flags.efvCaptureLatency()) {
                Pair<Long, Long> pair = SessionProcessor.this.getRealtimeStillCaptureLatency();
                if (pair == null) {
                    return null;
                }
                LatencyPair ret = new LatencyPair();
                ret.first = pair.first;
                ret.second = pair.second;
                return ret;
            }
            return null;
        }
    }
@@ -516,6 +639,17 @@ public abstract class SessionProcessor {
                Log.e(TAG, "Failed to notify capture complete due to remote exception!");
            }
        }

        @FlaggedApi(Flags.FLAG_EFV_CAPTURE_LATENCY)
        @Override
        public void onCaptureProcessProgressUpdated(int progress) {
            try {
                mCaptureCallback.onCaptureProcessProgressed(progress);
            } catch (RemoteException e) {
                Log.e(TAG, "Failed to notify capture progress due to remote" +
                        " exception!");
            }
        }
    }

    @NonNull ISessionProcessorImpl getSessionProcessorBinder() {