Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit d85e1a6c authored by Ruben Brunk's avatar Ruben Brunk
Browse files

camera2: Adding legacy mode support for teeing to Video.

Bug: 15116722

- Select intermediate GL texture size based on available
  preview dimensions.
- Fixes for configure deadlock.
- Fixes for jpeg capture.

Change-Id: I3f665230defdad718de40494dd67fda5ea432bfb
parent 4961bc88
Loading
Loading
Loading
Loading
+6 −0
Original line number Diff line number Diff line
@@ -34,6 +34,7 @@ import android.util.Log;
 *      <li>{@code CONFIGURING -> IDLE}</li>
 *      <li>{@code IDLE -> CONFIGURING}</li>
 *      <li>{@code IDLE -> CAPTURING}</li>
 *      <li>{@code IDLE -> IDLE}</li>
 *      <li>{@code CAPTURING -> IDLE}</li>
 *      <li>{@code ANY -> ERROR}</li>
 * </ul>
@@ -216,12 +217,17 @@ public class CameraDeviceState {
                mCurrentState = STATE_CONFIGURING;
                break;
            case STATE_IDLE:
                if (mCurrentState == STATE_IDLE) {
                    break;
                }

                if (mCurrentState != STATE_CONFIGURING && mCurrentState != STATE_CAPTURING) {
                    Log.e(TAG, "Cannot call idle while in state: " + mCurrentState);
                    mCurrentError = CameraBinderDecorator.INVALID_OPERATION;
                    doStateTransition(STATE_ERROR);
                    break;
                }

                if (mCurrentState != STATE_IDLE && mCurrentHandler != null &&
                        mCurrentListener != null) {
                    mCurrentHandler.post(new Runnable() {
+6 −0
Original line number Diff line number Diff line
@@ -148,6 +148,12 @@ public class GLThreadManager {
        Handler handler = mGLHandlerThread.getHandler();
        handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
        mGLHandlerThread.quitSafely();
        try {
            mGLHandlerThread.join();
        } catch (InterruptedException e) {
            Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
                    mGLHandlerThread.getName(), mGLHandlerThread.getId()));
        }
    }

    /**
+70 −26
Original line number Diff line number Diff line
@@ -48,7 +48,6 @@ import java.util.concurrent.atomic.AtomicInteger;
 */
public class LegacyCameraDevice implements AutoCloseable {
    public static final String DEBUG_PROP = "HAL1ShimLogging";

    private final String TAG;

    private final int mCameraId;
@@ -56,10 +55,11 @@ public class LegacyCameraDevice implements AutoCloseable {
    private final CameraDeviceState mDeviceState = new CameraDeviceState();

    private final ConditionVariable mIdle = new ConditionVariable(/*open*/true);
    private final AtomicInteger mRequestIdCounter = new AtomicInteger(0);

    private final HandlerThread mCallbackHandlerThread = new HandlerThread("ResultThread");
    private final HandlerThread mResultThread = new HandlerThread("ResultThread");
    private final HandlerThread mCallbackHandlerThread = new HandlerThread("CallbackThread");
    private final Handler mCallbackHandler;
    private final Handler mResultHandler;
    private static final int ILLEGAL_VALUE = -1;

    private CaptureResultExtras getExtrasFromRequest(RequestHolder holder) {
@@ -81,11 +81,18 @@ public class LegacyCameraDevice implements AutoCloseable {
        public void onError(final int errorCode, RequestHolder holder) {
            mIdle.open();
            final CaptureResultExtras extras = getExtrasFromRequest(holder);
            mResultHandler.post(new Runnable() {
                @Override
                public void run() {
                    try {
                        mDeviceCallbacks.onCameraError(errorCode, extras);
                    } catch (RemoteException e) {
                Log.e(TAG, "Received remote exception during onCameraError callback: ", e);
                        throw new IllegalStateException(
                                "Received remote exception during onCameraError callback: ", e);
                    }
                }
            });


        }

@@ -98,37 +105,56 @@ public class LegacyCameraDevice implements AutoCloseable {
        public void onIdle() {
            mIdle.open();

            mResultHandler.post(new Runnable() {
                @Override
                public void run() {
                    try {
                        mDeviceCallbacks.onCameraIdle();
                    } catch (RemoteException e) {
                Log.e(TAG, "Received remote exception during onCameraIdle callback: ", e);
                        throw new IllegalStateException(
                                "Received remote exception during onCameraIdle callback: ", e);
                    }
                }
            });
        }

        @Override
        public void onCaptureStarted(RequestHolder holder) {
            final CaptureResultExtras extras = getExtrasFromRequest(holder);

            final long timestamp = System.nanoTime();
            mResultHandler.post(new Runnable() {
                @Override
                public void run() {
                    try {
                        // TODO: Don't fake timestamp
                mDeviceCallbacks.onCaptureStarted(extras, System.nanoTime());
                        mDeviceCallbacks.onCaptureStarted(extras, timestamp);
                    } catch (RemoteException e) {
                Log.e(TAG, "Received remote exception during onCameraError callback: ", e);
                        throw new IllegalStateException(
                                "Received remote exception during onCameraError callback: ", e);
                    }
                }
            });

        }

        @Override
        public void onCaptureResult(CameraMetadataNative result, RequestHolder holder) {
        public void onCaptureResult(final CameraMetadataNative result, RequestHolder holder) {
            final CaptureResultExtras extras = getExtrasFromRequest(holder);

            mResultHandler.post(new Runnable() {
                @Override
                public void run() {
                    try {
                        // TODO: Don't fake metadata
                        mDeviceCallbacks.onResultReceived(result, extras);
                    } catch (RemoteException e) {
                Log.e(TAG, "Received remote exception during onCameraError callback: ", e);
                        throw new IllegalStateException(
                                "Received remote exception during onCameraError callback: ", e);
                    }
                }
            });
        }
    };

    private final RequestThreadManager mRequestThreadManager;
@@ -161,6 +187,8 @@ public class LegacyCameraDevice implements AutoCloseable {
        mDeviceCallbacks = callbacks;
        TAG = String.format("CameraDevice-%d-LE", mCameraId);

        mResultThread.start();
        mResultHandler = new Handler(mResultThread.getLooper());
        mCallbackHandlerThread.start();
        mCallbackHandler = new Handler(mCallbackHandlerThread.getLooper());
        mDeviceState.setCameraDeviceCallbacks(mCallbackHandler, mStateListener);
@@ -244,6 +272,22 @@ public class LegacyCameraDevice implements AutoCloseable {
    public void close() {
        mRequestThreadManager.quit();
        mCallbackHandlerThread.quitSafely();
        mResultThread.quitSafely();

        try {
            mCallbackHandlerThread.join();
        } catch (InterruptedException e) {
            Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
                    mCallbackHandlerThread.getName(), mCallbackHandlerThread.getId()));
        }

        try {
            mResultThread.join();
        } catch (InterruptedException e) {
            Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
                    mResultThread.getName(), mResultThread.getId()));
        }

        // TODO: throw IllegalStateException in every method after close has been called
    }

+128 −4
Original line number Diff line number Diff line
@@ -20,6 +20,7 @@ import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.utils.LongParcelable;
import android.hardware.camera2.impl.CameraMetadataNative;
import android.os.ConditionVariable;
@@ -28,12 +29,15 @@ import android.os.Message;
import android.os.SystemClock;
import android.util.Log;
import android.util.Pair;
import android.util.Size;
import android.view.Surface;

import java.io.IOError;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;

/**
@@ -64,6 +68,7 @@ public class RequestThreadManager {
    private static final int PREVIEW_FRAME_TIMEOUT = 300; // ms
    private static final int JPEG_FRAME_TIMEOUT = 1000; // ms

    private static final float ASPECT_RATIO_TOLERANCE = 0.01f;
    private boolean mPreviewRunning = false;

    private volatile RequestHolder mInFlightPreview;
@@ -74,6 +79,8 @@ public class RequestThreadManager {
    private GLThreadManager mGLThreadManager;
    private SurfaceTexture mPreviewTexture;

    private Size mIntermediateBufferSize;

    private final RequestQueue mRequestQueue = new RequestQueue();
    private SurfaceTexture mDummyTexture;
    private Surface mDummySurface;
@@ -93,6 +100,31 @@ public class RequestThreadManager {
        }
    }


    /**
     * Comparator for {@link Size} objects.
     *
     * <p>This comparator compares by rectangle area.  Tiebreaks on width.</p>
     */
    private static class SizeComparator implements Comparator<Size> {
        @Override
        public int compare(Size size, Size size2) {
            if (size == null || size2 == null) {
                throw new NullPointerException("Null argument passed to compare");
            }
            if (size.equals(size2)) return 0;
            long width = size.getWidth();
            long width2 = size2.getWidth();
            long area = width * size.getHeight();
            long area2 = width2 * size2.getHeight();
            if (area == area2) {
                return (width > width2) ? 1 : -1;
            }
            return (area > area2) ? 1 : -1;

        }
    }

    /**
     * Counter class used to calculate and log the current FPS of frame production.
     */
@@ -230,7 +262,13 @@ public class RequestThreadManager {
            return; // Already running
        }

        mPreviewTexture.setDefaultBufferSize(640, 480); // TODO: size selection based on request
        if (mPreviewTexture == null) {
            throw new IllegalStateException(
                    "Preview capture called with no preview surfaces configured.");
        }

        mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(),
                mIntermediateBufferSize.getHeight());
        mCamera.setPreviewTexture(mPreviewTexture);
        Camera.Parameters params = mCamera.getParameters();
        List<int[]> supportedFpsRanges = params.getSupportedPreviewFpsRange();
@@ -248,6 +286,7 @@ public class RequestThreadManager {
        startPreview();
    }


    private void configureOutputs(Collection<Surface> outputs) throws IOException {
        stopPreview();
        if (mGLThreadManager != null) {
@@ -261,6 +300,7 @@ public class RequestThreadManager {
        mInFlightPreview = null;
        mInFlightJpeg = null;


        for (Surface s : outputs) {
            int format = LegacyCameraDevice.nativeDetectSurfaceType(s);
            switch (format) {
@@ -273,6 +313,52 @@ public class RequestThreadManager {
            }
        }

        if (mPreviewOutputs.size() > 0) {
            List<Size> outputSizes = new ArrayList<>(outputs.size());
            for (Surface s : mPreviewOutputs) {
                int[] dimens = {0, 0};
                LegacyCameraDevice.nativeDetectSurfaceDimens(s, dimens);
                outputSizes.add(new Size(dimens[0], dimens[1]));
            }

            Size largestOutput = findLargestByArea(outputSizes);

            Camera.Parameters params = mCamera.getParameters();

            // Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
            List<Size> supportedJpegSizes = convertSizeList(params.getSupportedPictureSizes());
            Size largestJpegDimen = findLargestByArea(supportedJpegSizes);

            List<Size> supportedPreviewSizes = convertSizeList(params.getSupportedPreviewSizes());

            // Use smallest preview dimension with same aspect ratio as sensor that is >= than all
            // of the configured output dimensions.  If none exists, fall back to using the largest
            // supported preview size.
            long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
            Size bestPreviewDimen = findLargestByArea(supportedPreviewSizes);
            for (Size s : supportedPreviewSizes) {
                long currArea = s.getWidth() * s.getHeight();
                long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
                if (checkAspectRatiosMatch(largestJpegDimen, s) && (currArea < bestArea &&
                        currArea >= largestOutputArea)) {
                    bestPreviewDimen = s;
                }
            }

            mIntermediateBufferSize = bestPreviewDimen;
            if (DEBUG) {
                Log.d(TAG, "Intermediate buffer selected with dimens: " +
                        bestPreviewDimen.toString());
            }
        } else {
            mIntermediateBufferSize = null;
            if (DEBUG) {
                Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
            }
        }



        // TODO: Detect and optimize single-output paths here to skip stream teeing.
        if (mGLThreadManager == null) {
            mGLThreadManager = new GLThreadManager(mCameraId);
@@ -282,8 +368,29 @@ public class RequestThreadManager {
        mGLThreadManager.setConfigurationAndWait(mPreviewOutputs);
        mGLThreadManager.allowNewFrames();
        mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
        if (mPreviewTexture != null) {
            mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
        }
    }

    private static Size findLargestByArea(List<Size> sizes) {
        return Collections.max(sizes, new SizeComparator());
    }

    private static boolean checkAspectRatiosMatch(Size a, Size b) {
        float aAspect = a.getWidth() / (float) a.getHeight();
        float bAspect = b.getWidth() / (float) b.getHeight();

        return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE;
    }

    private static List<Size> convertSizeList(List<Camera.Size> sizeList) {
        List<Size> sizes = new ArrayList<>(sizeList.size());
        for (Camera.Size s : sizeList) {
            sizes.add(new Size(s.width, s.height));
        }
        return sizes;
    }

    // Calculate the highest FPS range supported
    private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) {
@@ -376,8 +483,10 @@ public class RequestThreadManager {
                            // TODO: err handling
                            throw new IOError(e);
                        }
                        // TODO: Set fields in result.
                        mDeviceState.setCaptureResult(holder, new CameraMetadataNative());
                        Camera.Parameters params = mCamera.getParameters();
                        CameraMetadataNative result = convertResultMetadata(params,
                                holder.getRequest());
                        mDeviceState.setCaptureResult(holder, result);
                    }
                    break;
                case MSG_CLEANUP:
@@ -397,6 +506,15 @@ public class RequestThreadManager {
        }
    };

    private CameraMetadataNative convertResultMetadata(Camera.Parameters params,
                                                       CaptureRequest request) {
        CameraMetadataNative result = new CameraMetadataNative();
        result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());

        // TODO: Remaining result metadata tags conversions.
        return result;
    }

    /**
     * Create a new RequestThreadManager.
     *
@@ -437,6 +555,12 @@ public class RequestThreadManager {
        Handler handler = mRequestThread.waitAndGetHandler();
        handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
        mRequestThread.quitSafely();
        try {
            mRequestThread.join();
        } catch (InterruptedException e) {
            Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
                    mRequestThread.getName(), mRequestThread.getId()));
        }
    }

    /**
+5 −0
Original line number Diff line number Diff line
@@ -431,6 +431,11 @@ public class SurfaceTextureRenderer {
    public void configureSurfaces(Collection<Surface> surfaces) {
        releaseEGLContext();

        if (surfaces == null || surfaces.size() == 0) {
            Log.w(TAG, "No output surfaces configured for GL drawing.");
            return;
        }

        for (Surface s : surfaces) {
            // If pixel conversions aren't handled by egl, use a pbuffer
            if (LegacyCameraDevice.needsConversion(s)) {