Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 240087ed authored by Treehugger Robot's avatar Treehugger Robot Committed by Android (Google) Code Review
Browse files

Merge "Limit maximal number of streams for virtual camera" into main

parents 6c15b1fc 8ae23595
Loading
Loading
Loading
Loading
+36 −10
Original line number Diff line number Diff line
@@ -72,10 +72,9 @@ constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;

constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};

const std::array<int32_t, 3> kOutputFormats{
    ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
    ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
    ANDROID_SCALER_AVAILABLE_FORMATS_BLOB};
const std::array<PixelFormat, 3> kOutputFormats{
    PixelFormat::IMPLEMENTATION_DEFINED, PixelFormat::YCBCR_420_888,
    PixelFormat::BLOB};

struct Resolution {
  Resolution(const int w, const int h) : width(w), height(h) {
@@ -93,6 +92,11 @@ struct Resolution {
  const int height;
};

bool isSupportedOutputFormat(const PixelFormat pixelFormat) {
  return std::find(kOutputFormats.begin(), kOutputFormats.end(), pixelFormat) !=
         kOutputFormats.end();
}

std::optional<Resolution> getMaxResolution(
    const std::vector<SupportedStreamConfiguration>& configs) {
  auto itMax = std::max_element(configs.begin(), configs.end(),
@@ -174,6 +178,10 @@ std::optional<CameraMetadata> initCameraCharacteristics(
          .setControlAvailableAwbModes({ANDROID_CONTROL_AWB_MODE_AUTO})
          .setControlZoomRatioRange(/*min=*/1.0, /*max=*/1.0)
          .setMaxJpegSize(kMaxJpegSize)
          .setMaxNumberOutputStreams(
              VirtualCameraDevice::kMaxNumberOfRawStreams,
              VirtualCameraDevice::kMaxNumberOfProcessedStreams,
              VirtualCameraDevice::kMaxNumberOfStallStreams)
          .setSyncMaxLatency(ANDROID_SYNC_MAX_LATENCY_UNKNOWN)
          .setAvailableRequestKeys({})
          .setAvailableRequestKeys({ANDROID_CONTROL_AF_MODE})
@@ -201,7 +209,7 @@ std::optional<CameraMetadata> initCameraCharacteristics(
      getResolutionToMaxFpsMap(supportedInputConfig);

  // Add configurations for all unique input resolutions and output formats.
  for (int32_t format : kOutputFormats) {
  for (const PixelFormat format : kOutputFormats) {
    std::transform(
        resolutionToMaxFpsMap.begin(), resolutionToMaxFpsMap.end(),
        std::back_inserter(outputConfigurations), [format](const auto& entry) {
@@ -210,7 +218,7 @@ std::optional<CameraMetadata> initCameraCharacteristics(
          return MetadataBuilder::StreamConfiguration{
              .width = resolution.width,
              .height = resolution.height,
              .format = format,
              .format = static_cast<int32_t>(format),
              .minFrameDuration = std::chrono::nanoseconds(1s) / maxFps,
              .minStallDuration = 0s};
        });
@@ -300,6 +308,8 @@ bool VirtualCameraDevice::isStreamCombinationSupported(
    return false;
  }

  int numberOfProcessedStreams = 0;
  int numberOfStallStreams = 0;
  for (const Stream& stream : streamConfiguration.streams) {
    ALOGV("%s: Configuration queried: %s", __func__, stream.toString().c_str());

@@ -308,15 +318,18 @@ bool VirtualCameraDevice::isStreamCombinationSupported(
      return false;
    }

    // TODO(b/301023410) remove hardcoded format checks, verify against configuration.
    if (stream.rotation != StreamRotation::ROTATION_0 ||
        (stream.format != PixelFormat::IMPLEMENTATION_DEFINED &&
         stream.format != PixelFormat::YCBCR_420_888 &&
         stream.format != PixelFormat::BLOB)) {
        !isSupportedOutputFormat(stream.format)) {
      ALOGV("Unsupported output stream type");
      return false;
    }

    if (stream.format == PixelFormat::BLOB) {
      numberOfStallStreams++;
    } else {
      numberOfProcessedStreams++;
    }

    auto matchesSupportedInputConfig =
        [&stream](const SupportedStreamConfiguration& config) {
          return stream.width == config.width && stream.height == config.height;
@@ -328,6 +341,19 @@ bool VirtualCameraDevice::isStreamCombinationSupported(
      return false;
    }
  }

  if (numberOfProcessedStreams > kMaxNumberOfProcessedStreams) {
    ALOGE("%s: %d processed streams exceeds the supported maximum of %d",
          __func__, numberOfProcessedStreams, kMaxNumberOfProcessedStreams);
    return false;
  }

  if (numberOfStallStreams > kMaxNumberOfStallStreams) {
    ALOGE("%s: %d stall streams exceeds the supported maximum of %d", __func__,
          numberOfStallStreams, kMaxNumberOfStallStreams);
    return false;
  }

  return true;
}

+13 −0
Original line number Diff line number Diff line
@@ -94,6 +94,19 @@ class VirtualCameraDevice

  uint32_t getCameraId() const { return mCameraId; }

  // Maximal number of RAW streams - virtual camera doesn't support RAW streams.
  static const int32_t kMaxNumberOfRawStreams = 0;

  // Maximal number of non-jpeg streams configured concurrently in single
  // session. This should be at least 3 and can be increased at the potential
  // cost of more CPU/GPU load if there are many concurrent streams.
  static const int32_t kMaxNumberOfProcessedStreams = 3;

  // Maximal number of stalling (in case of virtual camera only jpeg for now)
  // streams. Can be increaed at the cost of potential cost of more GPU/CPU
  // load.
  static const int32_t kMaxNumberOfStallStreams = 1;

 private:
  std::shared_ptr<VirtualCameraDevice> sharedFromThis();

+82 −3
Original line number Diff line number Diff line
@@ -14,6 +14,8 @@
 * limitations under the License.
 */

#include <algorithm>
#include <iterator>
#include <memory>

#include "VirtualCameraDevice.h"
@@ -22,6 +24,7 @@
#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
#include "aidl/android/hardware/camera/device/StreamConfiguration.h"
#include "aidl/android/hardware/graphics/common/PixelFormat.h"
#include "android/binder_interface_utils.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
@@ -55,6 +58,20 @@ constexpr int kHdWidth = 1280;
constexpr int kHdHeight = 720;
constexpr int kMaxFps = 30;

const Stream kVgaYUV420Stream = Stream{
    .streamType = StreamType::OUTPUT,
    .width = kVgaWidth,
    .height = kVgaHeight,
    .format = PixelFormat::YCBCR_420_888,
};

const Stream kVgaJpegStream = Stream{
    .streamType = StreamType::OUTPUT,
    .width = kVgaWidth,
    .height = kVgaHeight,
    .format = PixelFormat::BLOB,
};

struct AvailableStreamConfiguration {
  const int width;
  const int height;
@@ -105,10 +122,11 @@ struct VirtualCameraConfigTestParam {
  std::vector<AvailableStreamConfiguration> expectedAvailableStreamConfigs;
};

class VirtualCameraDeviceTest
class VirtualCameraDeviceCharacterisicsTest
    : public testing::TestWithParam<VirtualCameraConfigTestParam> {};

TEST_P(VirtualCameraDeviceTest, cameraCharacteristicsForInputFormat) {
TEST_P(VirtualCameraDeviceCharacterisicsTest,
       cameraCharacteristicsForInputFormat) {
  const VirtualCameraConfigTestParam& param = GetParam();
  std::shared_ptr<VirtualCameraDevice> camera =
      ndk::SharedRefBase::make<VirtualCameraDevice>(kCameraId,
@@ -137,7 +155,7 @@ TEST_P(VirtualCameraDeviceTest, cameraCharacteristicsForInputFormat) {
}

INSTANTIATE_TEST_SUITE_P(
    cameraCharacteristicsForInputFormat, VirtualCameraDeviceTest,
    cameraCharacteristicsForInputFormat, VirtualCameraDeviceCharacterisicsTest,
    testing::Values(
        VirtualCameraConfigTestParam{
            .inputConfig =
@@ -227,6 +245,67 @@ INSTANTIATE_TEST_SUITE_P(
                    .streamConfiguration =
                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT}}}));

class VirtualCameraDeviceTest : public ::testing::Test {
 public:
  void SetUp() override {
    mCamera = ndk::SharedRefBase::make<VirtualCameraDevice>(
        kCameraId, VirtualCameraConfiguration{
                       .supportedStreamConfigs = {SupportedStreamConfiguration{
                           .width = kVgaWidth,
                           .height = kVgaHeight,
                           .pixelFormat = Format::YUV_420_888,
                           .maxFps = kMaxFps}},
                       .virtualCameraCallback = nullptr,
                       .sensorOrientation = SensorOrientation::ORIENTATION_0,
                       .lensFacing = LensFacing::FRONT});
  }

 protected:
  std::shared_ptr<VirtualCameraDevice> mCamera;
};

TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfNonStallStreamsSuceeds) {
  StreamConfiguration config;
  std::fill_n(std::back_insert_iterator(config.streams),
              VirtualCameraDevice::kMaxNumberOfProcessedStreams,
              kVgaYUV420Stream);

  bool aidl_ret;
  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
  EXPECT_TRUE(aidl_ret);
}

TEST_F(VirtualCameraDeviceTest, configureTooManyNonStallStreamsFails) {
  StreamConfiguration config;
  std::fill_n(std::back_insert_iterator(config.streams),
              VirtualCameraDevice::kMaxNumberOfProcessedStreams + 1,
              kVgaYUV420Stream);

  bool aidl_ret;
  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
  EXPECT_FALSE(aidl_ret);
}

TEST_F(VirtualCameraDeviceTest, configureMaximalNumberOfStallStreamsSuceeds) {
  StreamConfiguration config;
  std::fill_n(std::back_insert_iterator(config.streams),
              VirtualCameraDevice::kMaxNumberOfStallStreams, kVgaJpegStream);

  bool aidl_ret;
  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
  EXPECT_TRUE(aidl_ret);
}

TEST_F(VirtualCameraDeviceTest, configureTooManyStallStreamsFails) {
  StreamConfiguration config;
  std::fill_n(std::back_insert_iterator(config.streams),
              VirtualCameraDevice::kMaxNumberOfStallStreams + 1, kVgaJpegStream);

  bool aidl_ret;
  ASSERT_TRUE(mCamera->isStreamCombinationSupported(config, &aidl_ret).isOk());
  EXPECT_FALSE(aidl_ret);
}

}  // namespace
}  // namespace virtualcamera
}  // namespace companion
+8 −0
Original line number Diff line number Diff line
@@ -247,6 +247,14 @@ MetadataBuilder& MetadataBuilder::setMaxJpegSize(const int32_t size) {
  return *this;
}

MetadataBuilder& MetadataBuilder::setMaxNumberOutputStreams(
    const int32_t maxRawStreams, const int32_t maxProcessedStreams,
    const int32_t maxStallStreams) {
  mEntryMap[ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS] = std::vector<int32_t>(
      {maxRawStreams, maxProcessedStreams, maxStallStreams});
  return *this;
}

MetadataBuilder& MetadataBuilder::setSyncMaxLatency(
    camera_metadata_enum_android_sync_max_latency latency) {
  mEntryMap[ANDROID_SYNC_MAX_LATENCY] =
+8 −0
Original line number Diff line number Diff line
@@ -183,6 +183,14 @@ class MetadataBuilder {
  // See ANDROID_JPEG_SIZE in CameraMetadataTag.aidl.
  MetadataBuilder& setMaxJpegSize(int32_t size);

  // The maximum numbers of different types of output streams
  // that can be configured and used simultaneously by a camera device.
  //
  // See ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS in CameraMetadataTag.aidl.
  MetadataBuilder& setMaxNumberOutputStreams(int32_t maxRawStreams,
                                             int32_t maxProcessedStreams,
                                             int32_t maxStallStreams);

  // See ANDROID_SYNC_MAX_LATENCY in CameraMetadataTag.aidl.
  MetadataBuilder& setSyncMaxLatency(
      camera_metadata_enum_android_sync_max_latency setSyncMaxLatency);