Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 927a0dd3 authored by Vadim Caen's avatar Vadim Caen Committed by Android (Google) Code Review
Browse files

Merge "Pass surface timestamp to the capture result" into main

parents 3a711421 b79f4e35
Loading
Loading
Loading
Loading
+1 −0
Original line number Original line Diff line number Diff line
@@ -25,6 +25,7 @@ cc_defaults {
        "libEGL",
        "libEGL",
        "libGLESv2",
        "libGLESv2",
        "libGLESv3",
        "libGLESv3",
        "android.companion.virtualdevice.flags-aconfig-cc",
    ],
    ],
    static_libs: [
    static_libs: [
        "android.hardware.camera.common@1.0-helper",
        "android.hardware.camera.common@1.0-helper",
+71 −33
Original line number Original line Diff line number Diff line
@@ -14,9 +14,12 @@
 * limitations under the License.
 * limitations under the License.
 */
 */


// #define LOG_NDEBUG 0
#define LOG_TAG "VirtualCameraRenderThread"
#define LOG_TAG "VirtualCameraRenderThread"
#include "VirtualCameraRenderThread.h"
#include "VirtualCameraRenderThread.h"


#include <android_companion_virtualdevice_flags.h>

#include <chrono>
#include <chrono>
#include <cstdint>
#include <cstdint>
#include <cstring>
#include <cstring>
@@ -46,13 +49,11 @@
#include "android-base/thread_annotations.h"
#include "android-base/thread_annotations.h"
#include "android/binder_auto_utils.h"
#include "android/binder_auto_utils.h"
#include "android/hardware_buffer.h"
#include "android/hardware_buffer.h"
#include "hardware/gralloc.h"
#include "system/camera_metadata.h"
#include "system/camera_metadata.h"
#include "ui/GraphicBuffer.h"
#include "ui/GraphicBuffer.h"
#include "ui/Rect.h"
#include "ui/Rect.h"
#include "util/EglFramebuffer.h"
#include "util/EglFramebuffer.h"
#include "util/JpegUtil.h"
#include "util/JpegUtil.h"
#include "util/MetadataUtil.h"
#include "util/Util.h"
#include "util/Util.h"
#include "utils/Errors.h"
#include "utils/Errors.h"


@@ -91,6 +92,8 @@ overloaded(Ts...) -> overloaded<Ts...>;


using namespace std::chrono_literals;
using namespace std::chrono_literals;


namespace flags = ::android::companion::virtualdevice::flags;

static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;


static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024;  // 32 KiB
static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024;  // 32 KiB
@@ -117,8 +120,8 @@ NotifyMsg createBufferErrorNotifyMsg(int frameNumber, int streamId) {


NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
  NotifyMsg msg;
  NotifyMsg msg;
  msg.set<NotifyMsg::Tag::error>(ErrorMsg{
  msg.set<NotifyMsg::Tag::error>(
      .frameNumber = frameNumber,
      ErrorMsg{.frameNumber = frameNumber,
               // errorStreamId needs to be set to -1 for ERROR_REQUEST
               // errorStreamId needs to be set to -1 for ERROR_REQUEST
               // (not tied to specific stream).
               // (not tied to specific stream).
               .errorStreamId = -1,
               .errorStreamId = -1,
@@ -413,29 +416,8 @@ void VirtualCameraRenderThread::processTask(
                                                    std::memory_order_relaxed));
                                                    std::memory_order_relaxed));


  if (request.getRequestSettings().fpsRange) {
  if (request.getRequestSettings().fpsRange) {
    const int maxFps =
    int maxFps = std::max(1, request.getRequestSettings().fpsRange->maxFps);
        std::max(1, request.getRequestSettings().fpsRange->maxFps);
    timestamp = throttleRendering(maxFps, lastAcquisitionTimestamp, timestamp);
    const std::chrono::nanoseconds minFrameDuration(
        static_cast<uint64_t>(1e9 / maxFps));
    const std::chrono::nanoseconds frameDuration =
        timestamp - lastAcquisitionTimestamp;
    if (frameDuration < minFrameDuration) {
      // We're too fast for the configured maxFps, let's wait a bit.
      const std::chrono::nanoseconds sleepTime =
          minFrameDuration - frameDuration;
      ALOGV("Current frame duration would  be %" PRIu64
            " ns corresponding to, "
            "sleeping for %" PRIu64
            " ns before updating texture to match maxFps %d",
            static_cast<uint64_t>(frameDuration.count()),
            static_cast<uint64_t>(sleepTime.count()), maxFps);

      std::this_thread::sleep_for(sleepTime);
      timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
          std::chrono::steady_clock::now().time_since_epoch());
      mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
                                                 std::memory_order_relaxed);
    }
  }
  }


  // Calculate the maximal amount of time we can afford to wait for next frame.
  // Calculate the maximal amount of time we can afford to wait for next frame.
@@ -463,6 +445,17 @@ void VirtualCameraRenderThread::processTask(
  }
  }
  // Acquire new (most recent) image from the Surface.
  // Acquire new (most recent) image from the Surface.
  mEglSurfaceTexture->updateTexture();
  mEglSurfaceTexture->updateTexture();
  std::chrono::nanoseconds captureTimestamp = timestamp;

  if (flags::camera_timestamp_from_surface()) {
    std::chrono::nanoseconds surfaceTimestamp =
        getSurfaceTimestamp(elapsedDuration);
    if (surfaceTimestamp.count() > 0) {
      captureTimestamp = surfaceTimestamp;
    }
    ALOGV("%s captureTimestamp:%lld timestamp:%lld", __func__,
          captureTimestamp.count(), timestamp.count());
  }


  CaptureResult captureResult;
  CaptureResult captureResult;
  captureResult.fmqResultSize = 0;
  captureResult.fmqResultSize = 0;
@@ -472,7 +465,7 @@ void VirtualCameraRenderThread::processTask(
  captureResult.inputBuffer.streamId = -1;
  captureResult.inputBuffer.streamId = -1;
  captureResult.physicalCameraMetadata.resize(0);
  captureResult.physicalCameraMetadata.resize(0);
  captureResult.result = createCaptureResultMetadata(
  captureResult.result = createCaptureResultMetadata(
      timestamp, request.getRequestSettings(), mReportedSensorSize);
      captureTimestamp, request.getRequestSettings(), mReportedSensorSize);


  const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
  const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
  captureResult.outputBuffers.resize(buffers.size());
  captureResult.outputBuffers.resize(buffers.size());
@@ -506,7 +499,7 @@ void VirtualCameraRenderThread::processTask(
  }
  }


  std::vector<NotifyMsg> notifyMsg{
  std::vector<NotifyMsg> notifyMsg{
      createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
      createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
  for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
  for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
    if (resBuffer.status != BufferStatus::OK) {
    if (resBuffer.status != BufferStatus::OK) {
      notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
      notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
@@ -535,6 +528,51 @@ void VirtualCameraRenderThread::processTask(
  ALOGV("%s: Successfully called processCaptureResult", __func__);
  ALOGV("%s: Successfully called processCaptureResult", __func__);
}
}


std::chrono::nanoseconds VirtualCameraRenderThread::throttleRendering(
    int maxFps, std::chrono::nanoseconds lastAcquisitionTimestamp,
    std::chrono::nanoseconds timestamp) {
  const std::chrono::nanoseconds minFrameDuration(
      static_cast<uint64_t>(1e9 / maxFps));
  const std::chrono::nanoseconds frameDuration =
      timestamp - lastAcquisitionTimestamp;
  if (frameDuration < minFrameDuration) {
    // We're too fast for the configured maxFps, let's wait a bit.
    const std::chrono::nanoseconds sleepTime = minFrameDuration - frameDuration;
    ALOGV("Current frame duration would  be %" PRIu64
          " ns corresponding to, "
          "sleeping for %" PRIu64
          " ns before updating texture to match maxFps %d",
          static_cast<uint64_t>(frameDuration.count()),
          static_cast<uint64_t>(sleepTime.count()), maxFps);

    std::this_thread::sleep_for(sleepTime);
    timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
        std::chrono::steady_clock::now().time_since_epoch());
    mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
                                               std::memory_order_relaxed);
  }
  return timestamp;
}

std::chrono::nanoseconds VirtualCameraRenderThread::getSurfaceTimestamp(
    std::chrono::nanoseconds timeSinceLastFrame) {
  std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
  if (surfaceTimestamp.count() < 0) {
    uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
    if (lastSurfaceTimestamp > 0) {
      // The timestamps were provided by the producer but we are
      // repeating the last frame, so we increase the previous timestamp by
      // the elapsed time sinced its capture, otherwise the camera framework
      // will discard the frame.
      surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
                                                  timeSinceLastFrame.count());
    }
  }
  mLastSurfaceTimestampNanoseconds.store(surfaceTimestamp.count(),
                                         std::memory_order_relaxed);
  return surfaceTimestamp;
}

void VirtualCameraRenderThread::flushCaptureRequest(
void VirtualCameraRenderThread::flushCaptureRequest(
    const ProcessCaptureRequestTask& request) {
    const ProcessCaptureRequestTask& request) {
  CaptureResult captureResult;
  CaptureResult captureResult;
+18 −0
Original line number Original line Diff line number Diff line
@@ -18,6 +18,7 @@
#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H


#include <atomic>
#include <atomic>
#include <chrono>
#include <cstdint>
#include <cstdint>
#include <deque>
#include <deque>
#include <future>
#include <future>
@@ -188,6 +189,22 @@ class VirtualCameraRenderThread {
      EglFrameBuffer& framebuffer, sp<Fence> fence = nullptr,
      EglFrameBuffer& framebuffer, sp<Fence> fence = nullptr,
      std::optional<Rect> viewport = std::nullopt);
      std::optional<Rect> viewport = std::nullopt);


  // Throttle the current thread to ensure that we are not rendering faster than
  // the provided maxFps.
  // maxFps: The maximum fps in the capture request
  // lastAcquisitionTimestamp: timestamp of the previous frame
  // timestamp: the current capture time
  // Returns the time at which the capture has happened after throttling.
  std::chrono::nanoseconds throttleRendering(
      int maxFps, std::chrono::nanoseconds lastAcquisitionTimestamp,
      std::chrono::nanoseconds timestamp);

  // Fetch the timestamp of the latest buffer from the EGL Surface
  // timeSinceLastFrame: The elapsed time since the last captured frame.
  // Return 0 if no timestamp has been associated to this surface by the producer.
  std::chrono::nanoseconds getSurfaceTimestamp(
      std::chrono::nanoseconds timeSinceLastFrame);

  // Camera callback
  // Camera callback
  const std::shared_ptr<
  const std::shared_ptr<
      ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
      ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
@@ -209,6 +226,7 @@ class VirtualCameraRenderThread {


  // Acquisition timestamp of last frame.
  // Acquisition timestamp of last frame.
  std::atomic<uint64_t> mLastAcquisitionTimestampNanoseconds;
  std::atomic<uint64_t> mLastAcquisitionTimestampNanoseconds;
  std::atomic<uint64_t> mLastSurfaceTimestampNanoseconds;


  // EGL helpers - constructed and accessed only from rendering thread.
  // EGL helpers - constructed and accessed only from rendering thread.
  std::unique_ptr<EglDisplayContext> mEglDisplayContext;
  std::unique_ptr<EglDisplayContext> mEglDisplayContext;
+6 −0
Original line number Original line Diff line number Diff line
@@ -15,6 +15,8 @@
 */
 */


// #define LOG_NDEBUG 0
// #define LOG_NDEBUG 0
#include <chrono>

#include "utils/Timers.h"
#include "utils/Timers.h"
#define LOG_TAG "EglSurfaceTexture"
#define LOG_TAG "EglSurfaceTexture"


@@ -99,6 +101,10 @@ bool EglSurfaceTexture::waitForNextFrame(const std::chrono::nanoseconds timeout)
                                    static_cast<nsecs_t>(timeout.count()));
                                    static_cast<nsecs_t>(timeout.count()));
}
}


std::chrono::nanoseconds EglSurfaceTexture::getTimestamp() {
  return std::chrono::nanoseconds(mGlConsumer->getTimestamp());
}

GLuint EglSurfaceTexture::updateTexture() {
GLuint EglSurfaceTexture::updateTexture() {
  int previousFrameId;
  int previousFrameId;
  int framesAdvance = 0;
  int framesAdvance = 0;
+4 −0
Original line number Original line Diff line number Diff line
@@ -82,6 +82,10 @@ class EglSurfaceTexture {
  // See SurfaceTexture.getTransformMatrix for more details.
  // See SurfaceTexture.getTransformMatrix for more details.
  std::array<float, 16> getTransformMatrix();
  std::array<float, 16> getTransformMatrix();


  // Retrieves the timestamp associated with the texture image
  // set by the most recent call to updateTexture.
  std::chrono::nanoseconds getTimestamp();

 private:
 private:
#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
  sp<IGraphicBufferProducer> mBufferProducer;
  sp<IGraphicBufferProducer> mBufferProducer;