Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit c28fe976 authored by Android Build Coastguard Worker's avatar Android Build Coastguard Worker
Browse files

Snap for 10196304 from 2aae677f to udc-release

Change-Id: I4205647d2605ba26087cb70839b8c9ecf773c907
parents b8e4dbdb 2aae677f
Loading
Loading
Loading
Loading
+2 −0
Original line number Diff line number Diff line
@@ -39,10 +39,12 @@ typedef enum {

// Target output formats for decoder
typedef enum {
  ULTRAHDR_OUTPUT_UNSPECIFIED = -1,
  ULTRAHDR_OUTPUT_SDR,          // SDR in RGBA_8888 color format
  ULTRAHDR_OUTPUT_HDR_LINEAR,   // HDR in F16 color format (linear)
  ULTRAHDR_OUTPUT_HDR_PQ,       // HDR in RGBA_1010102 color format (PQ transfer function)
  ULTRAHDR_OUTPUT_HDR_HLG,      // HDR in RGBA_1010102 color format (HLG transfer function)
  ULTRAHDR_OUTPUT_MAX = ULTRAHDR_OUTPUT_HDR_HLG,
} ultrahdr_output_format;

/*
+69 −4
Original line number Diff line number Diff line
@@ -26,6 +26,8 @@ using namespace std;

namespace android::ultrahdr {

#define ALIGNM(x, m)  ((((x) + ((m) - 1)) / (m)) * (m))

const uint32_t kAPP0Marker = JPEG_APP0;      // JFIF
const uint32_t kAPP1Marker = JPEG_APP0 + 1;  // EXIF, XMP
const uint32_t kAPP2Marker = JPEG_APP0 + 2;  // ICC
@@ -224,7 +226,14 @@ bool JpegDecoderHelper::decode(const void* image, int length, bool decodeToRGBA)
        cinfo.out_color_space = JCS_EXT_RGBA;
    } else {
        if (cinfo.jpeg_color_space == JCS_YCbCr) {
            // 1 byte per pixel for Y, 0.5 byte per pixel for U+V
            if (cinfo.comp_info[0].h_samp_factor != 2 ||
                cinfo.comp_info[1].h_samp_factor != 1 ||
                cinfo.comp_info[2].h_samp_factor != 1 ||
                cinfo.comp_info[0].v_samp_factor != 2 ||
                cinfo.comp_info[1].v_samp_factor != 1 ||
                cinfo.comp_info[2].v_samp_factor != 1) {
                return false;
            }
            mResultBuffer.resize(cinfo.image_width * cinfo.image_height * 3 / 2, 0);
        } else if (cinfo.jpeg_color_space == JCS_GRAYSCALE) {
            mResultBuffer.resize(cinfo.image_width * cinfo.image_height, 0);
@@ -342,7 +351,6 @@ bool JpegDecoderHelper::decompressRGBA(jpeg_decompress_struct* cinfo, const uint
}

bool JpegDecoderHelper::decompressYUV(jpeg_decompress_struct* cinfo, const uint8_t* dest) {

    JSAMPROW y[kCompressBatchSize];
    JSAMPROW cb[kCompressBatchSize / 2];
    JSAMPROW cr[kCompressBatchSize / 2];
@@ -356,6 +364,32 @@ bool JpegDecoderHelper::decompressYUV(jpeg_decompress_struct* cinfo, const uint8
    std::unique_ptr<uint8_t[]> empty(new uint8_t[cinfo->image_width]);
    memset(empty.get(), 0, cinfo->image_width);

    const int aligned_width = ALIGNM(cinfo->image_width, kCompressBatchSize);
    bool is_width_aligned = (aligned_width == cinfo->image_width);
    std::unique_ptr<uint8_t[]> buffer_intrm = nullptr;
    uint8_t* y_plane_intrm = nullptr;
    uint8_t* u_plane_intrm = nullptr;
    uint8_t* v_plane_intrm = nullptr;
    JSAMPROW y_intrm[kCompressBatchSize];
    JSAMPROW cb_intrm[kCompressBatchSize / 2];
    JSAMPROW cr_intrm[kCompressBatchSize / 2];
    JSAMPARRAY planes_intrm[3] {y_intrm, cb_intrm, cr_intrm};
    if (!is_width_aligned) {
        size_t mcu_row_size = aligned_width * kCompressBatchSize * 3 / 2;
        buffer_intrm = std::make_unique<uint8_t[]>(mcu_row_size);
        y_plane_intrm = buffer_intrm.get();
        u_plane_intrm = y_plane_intrm + (aligned_width * kCompressBatchSize);
        v_plane_intrm = u_plane_intrm + (aligned_width * kCompressBatchSize) / 4;
        for (int i = 0; i < kCompressBatchSize; ++i) {
            y_intrm[i] = y_plane_intrm + i * aligned_width;
        }
        for (int i = 0; i < kCompressBatchSize / 2; ++i) {
            int offset_intrm = i * (aligned_width / 2);
            cb_intrm[i] = u_plane_intrm + offset_intrm;
            cr_intrm[i] = v_plane_intrm + offset_intrm;
        }
    }

    while (cinfo->output_scanline < cinfo->image_height) {
        for (int i = 0; i < kCompressBatchSize; ++i) {
            size_t scanline = cinfo->output_scanline + i;
@@ -377,11 +411,21 @@ bool JpegDecoderHelper::decompressYUV(jpeg_decompress_struct* cinfo, const uint8
            }
        }

        int processed = jpeg_read_raw_data(cinfo, planes, kCompressBatchSize);
        int processed = jpeg_read_raw_data(cinfo, is_width_aligned ? planes : planes_intrm,
                                           kCompressBatchSize);
        if (processed != kCompressBatchSize) {
            ALOGE("Number of processed lines does not equal input lines.");
            return false;
        }
        if (!is_width_aligned) {
            for (int i = 0; i < kCompressBatchSize; ++i) {
                memcpy(y[i], y_intrm[i], cinfo->image_width);
            }
            for (int i = 0; i < kCompressBatchSize / 2; ++i) {
                memcpy(cb[i], cb_intrm[i], cinfo->image_width / 2);
                memcpy(cr[i], cr_intrm[i], cinfo->image_width / 2);
            }
        }
    }
    return true;
}
@@ -394,6 +438,21 @@ bool JpegDecoderHelper::decompressSingleChannel(jpeg_decompress_struct* cinfo, c
    std::unique_ptr<uint8_t[]> empty(new uint8_t[cinfo->image_width]);
    memset(empty.get(), 0, cinfo->image_width);

    int aligned_width = ALIGNM(cinfo->image_width, kCompressBatchSize);
    bool is_width_aligned = (aligned_width == cinfo->image_width);
    std::unique_ptr<uint8_t[]> buffer_intrm = nullptr;
    uint8_t* y_plane_intrm = nullptr;
    JSAMPROW y_intrm[kCompressBatchSize];
    JSAMPARRAY planes_intrm[1] {y_intrm};
    if (!is_width_aligned) {
        size_t mcu_row_size = aligned_width * kCompressBatchSize;
        buffer_intrm = std::make_unique<uint8_t[]>(mcu_row_size);
        y_plane_intrm = buffer_intrm.get();
        for (int i = 0; i < kCompressBatchSize; ++i) {
            y_intrm[i] = y_plane_intrm + i * aligned_width;
        }
    }

    while (cinfo->output_scanline < cinfo->image_height) {
        for (int i = 0; i < kCompressBatchSize; ++i) {
            size_t scanline = cinfo->output_scanline + i;
@@ -404,11 +463,17 @@ bool JpegDecoderHelper::decompressSingleChannel(jpeg_decompress_struct* cinfo, c
            }
        }

        int processed = jpeg_read_raw_data(cinfo, planes, kCompressBatchSize);
        int processed = jpeg_read_raw_data(cinfo, is_width_aligned ? planes : planes_intrm,
                                           kCompressBatchSize);
        if (processed != kCompressBatchSize / 2) {
            ALOGE("Number of processed lines does not equal input lines.");
            return false;
        }
        if (!is_width_aligned) {
            for (int i = 0; i < kCompressBatchSize; ++i) {
                memcpy(y[i], y_intrm[i], cinfo->image_width);
            }
        }
    }
    return true;
}
+35 −6
Original line number Diff line number Diff line
@@ -65,6 +65,13 @@ static const char* const kJpegrVersion = "1.0";

// Map is quarter res / sixteenth size
static const size_t kMapDimensionScaleFactor = 4;

// Gain Map width is (image_width / kMapDimensionScaleFactor). If we were to
// compress 420 GainMap in jpeg, then we need at least 2 samples. For Grayscale
// 1 sample is sufficient. We are using 2 here anyways
static const int kMinWidth = 2 * kMapDimensionScaleFactor;
static const int kMinHeight = 2 * kMapDimensionScaleFactor;

// JPEG block size.
// JPEG encoding / decoding will require block based DCT transform 16 x 16 for luma,
// and 8 x 8 for chroma.
@@ -105,10 +112,10 @@ status_t JpegR::areInputArgumentsValid(jr_uncompressed_ptr uncompressed_p010_ima
    return ERROR_JPEGR_INVALID_INPUT_TYPE;
  }

  if (uncompressed_p010_image->width == 0
          || uncompressed_p010_image->height == 0) {
    ALOGE("Image dimensions cannot be zero, image dimensions %dx%d",
          uncompressed_p010_image->width, uncompressed_p010_image->height);
  if (uncompressed_p010_image->width < kMinWidth
          || uncompressed_p010_image->height < kMinHeight) {
    ALOGE("Image dimensions cannot be less than %dx%d, image dimensions %dx%d",
          kMinWidth, kMinHeight, uncompressed_p010_image->width, uncompressed_p010_image->height);
    return ERROR_JPEGR_INVALID_INPUT_TYPE;
  }

@@ -469,11 +476,33 @@ status_t JpegR::decodeJPEGR(jr_compressed_ptr compressed_jpegr_image,
                            ultrahdr_output_format output_format,
                            jr_uncompressed_ptr gain_map,
                            ultrahdr_metadata_ptr metadata) {
  if (compressed_jpegr_image == nullptr || dest == nullptr) {
  if (compressed_jpegr_image == nullptr || compressed_jpegr_image->data == nullptr) {
    ALOGE("received nullptr for compressed jpegr image");
    return ERROR_JPEGR_INVALID_NULL_PTR;
  }

  if (dest == nullptr || dest->data == nullptr) {
    ALOGE("received nullptr for dest image");
    return ERROR_JPEGR_INVALID_NULL_PTR;
  }

  if (max_display_boost < 1.0f) {
    ALOGE("received bad value for max_display_boost %f", max_display_boost);
    return ERROR_JPEGR_INVALID_INPUT_TYPE;
  }

  if (exif != nullptr && exif->data == nullptr) {
    ALOGE("received nullptr address for exif data");
    return ERROR_JPEGR_INVALID_INPUT_TYPE;
  }

  if (output_format <= ULTRAHDR_OUTPUT_UNSPECIFIED || output_format > ULTRAHDR_OUTPUT_MAX) {
    ALOGE("received bad value for output format %d", output_format);
    return ERROR_JPEGR_INVALID_INPUT_TYPE;
  }

  if (gain_map != nullptr && gain_map->data == nullptr) {
    ALOGE("received nullptr address for gain map data");
    return ERROR_JPEGR_INVALID_INPUT_TYPE;
  }

+171 −4
Original line number Diff line number Diff line
@@ -89,6 +89,51 @@ static bool loadFile(const char filename[], void*& result, int* fileLength) {
  return true;
}

static bool loadP010Image(const char *filename, jr_uncompressed_ptr img,
                          bool isUVContiguous) {
  int fd = open(filename, O_CLOEXEC);
  if (fd < 0) {
    return false;
  }
  const int bpp = 2;
  int lumaStride = img->luma_stride == 0 ? img->width : img->luma_stride;
  int lumaSize = bpp * lumaStride * img->height;
  int chromaSize = bpp * (img->height / 2) *
                   (isUVContiguous ? lumaStride : img->chroma_stride);
  img->data = malloc(lumaSize + (isUVContiguous ? chromaSize : 0));
  if (img->data == nullptr) {
    ALOGE("loadP010Image(): failed to allocate memory for luma data.");
    return false;
  }
  uint8_t *mem = static_cast<uint8_t *>(img->data);
  for (int i = 0; i < img->height; i++) {
    if (read(fd, mem, img->width * bpp) != img->width * bpp) {
      close(fd);
      return false;
    }
    mem += lumaStride * bpp;
  }
  int chromaStride = lumaStride;
  if (!isUVContiguous) {
    img->chroma_data = malloc(chromaSize);
    if (img->chroma_data == nullptr) {
      ALOGE("loadP010Image(): failed to allocate memory for chroma data.");
      return false;
    }
    mem = static_cast<uint8_t *>(img->chroma_data);
    chromaStride = img->chroma_stride;
  }
  for (int i = 0; i < img->height / 2; i++) {
    if (read(fd, mem, img->width * bpp) != img->width * bpp) {
      close(fd);
      return false;
    }
    mem += chromaStride * bpp;
  }
  close(fd);
  return true;
}

class JpegRTest : public testing::Test {
public:
  JpegRTest();
@@ -98,10 +143,11 @@ protected:
  virtual void SetUp();
  virtual void TearDown();

  struct jpegr_uncompressed_struct mRawP010Image;
  struct jpegr_uncompressed_struct mRawP010ImageWithStride;
  struct jpegr_uncompressed_struct mRawYuv420Image;
  struct jpegr_compressed_struct mJpegImage;
  struct jpegr_uncompressed_struct mRawP010Image{};
  struct jpegr_uncompressed_struct mRawP010ImageWithStride{};
  struct jpegr_uncompressed_struct mRawP010ImageWithChromaData{};
  struct jpegr_uncompressed_struct mRawYuv420Image{};
  struct jpegr_compressed_struct mJpegImage{};
};

JpegRTest::JpegRTest() {}
@@ -110,7 +156,11 @@ JpegRTest::~JpegRTest() {}
void JpegRTest::SetUp() {}
void JpegRTest::TearDown() {
  free(mRawP010Image.data);
  free(mRawP010Image.chroma_data);
  free(mRawP010ImageWithStride.data);
  free(mRawP010ImageWithStride.chroma_data);
  free(mRawP010ImageWithChromaData.data);
  free(mRawP010ImageWithChromaData.chroma_data);
  free(mRawYuv420Image.data);
  free(mJpegImage.data);
}
@@ -286,6 +336,8 @@ TEST_F(JpegRTest, encodeAPI0ForInvalidArgs) {
      &mRawP010ImageWithStride, ultrahdr_transfer_function::ULTRAHDR_TF_HLG, &jpegR,
      DEFAULT_JPEG_QUALITY, nullptr)) << "fail, API allows bad chroma stride";

  mRawP010ImageWithStride.chroma_data = nullptr;

  free(jpegR.data);
}

@@ -734,6 +786,7 @@ TEST_F(JpegRTest, encodeAPI3ForInvalidArgs) {
  EXPECT_NE(OK, jpegRCodec.encodeJPEGR(
      &mRawP010ImageWithStride, &jpegR, ultrahdr_transfer_function::ULTRAHDR_TF_HLG,
      &jpegR)) << "fail, API allows bad chroma stride";
  mRawP010ImageWithStride.chroma_data = nullptr;

  // bad compressed image
  EXPECT_NE(OK, jpegRCodec.encodeJPEGR(
@@ -769,6 +822,45 @@ TEST_F(JpegRTest, encodeAPI4ForInvalidArgs) {
  free(jpegR.data);
}

/* Test Decode API invalid arguments */
TEST_F(JpegRTest, decodeAPIForInvalidArgs) {
  int ret;

  // we are not really compressing anything so lets keep allocs to a minimum
  jpegr_compressed_struct jpegR;
  jpegR.maxLength = 16 * sizeof(uint8_t);
  jpegR.data = malloc(jpegR.maxLength);

  // we are not really decoding anything so lets keep allocs to a minimum
  mRawP010Image.data = malloc(16);

  JpegR jpegRCodec;

  // test jpegr image
  EXPECT_NE(OK, jpegRCodec.decodeJPEGR(
        nullptr, &mRawP010Image)) << "fail, API allows nullptr for jpegr img";

  // test dest image
  EXPECT_NE(OK, jpegRCodec.decodeJPEGR(
        &jpegR, nullptr)) << "fail, API allows nullptr for dest";

  // test max display boost
  EXPECT_NE(OK, jpegRCodec.decodeJPEGR(
        &jpegR, &mRawP010Image, 0.5)) << "fail, API allows invalid max display boost";

  // test output format
  EXPECT_NE(OK, jpegRCodec.decodeJPEGR(
        &jpegR, &mRawP010Image, 0.5, nullptr,
        static_cast<ultrahdr_output_format>(-1))) << "fail, API allows invalid output format";

  EXPECT_NE(OK, jpegRCodec.decodeJPEGR(
        &jpegR, &mRawP010Image, 0.5, nullptr,
        static_cast<ultrahdr_output_format>(ULTRAHDR_OUTPUT_MAX + 1)))
        << "fail, API allows invalid output format";

  free(jpegR.data);
}

TEST_F(JpegRTest, writeXmpThenRead) {
  ultrahdr_metadata_struct metadata_expected;
  metadata_expected.version = "1.0";
@@ -792,6 +884,81 @@ TEST_F(JpegRTest, writeXmpThenRead) {
  EXPECT_FLOAT_EQ(metadata_expected.minContentBoost, metadata_read.minContentBoost);
}

/* Test Encode API-0 */
TEST_F(JpegRTest, encodeFromP010) {
  int ret;

  mRawP010Image.width = TEST_IMAGE_WIDTH;
  mRawP010Image.height = TEST_IMAGE_HEIGHT;
  mRawP010Image.colorGamut = ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
  // Load input files.
  if (!loadP010Image(RAW_P010_IMAGE, &mRawP010Image, true)) {
    FAIL() << "Load file " << RAW_P010_IMAGE << " failed";
  }

  JpegR jpegRCodec;

  jpegr_compressed_struct jpegR;
  jpegR.maxLength = TEST_IMAGE_WIDTH * TEST_IMAGE_HEIGHT * sizeof(uint8_t);
  jpegR.data = malloc(jpegR.maxLength);
  ret = jpegRCodec.encodeJPEGR(
      &mRawP010Image, ultrahdr_transfer_function::ULTRAHDR_TF_HLG, &jpegR, DEFAULT_JPEG_QUALITY,
      nullptr);
  if (ret != OK) {
    FAIL() << "Error code is " << ret;
  }

  mRawP010ImageWithStride.width = TEST_IMAGE_WIDTH;
  mRawP010ImageWithStride.height = TEST_IMAGE_HEIGHT;
  mRawP010ImageWithStride.luma_stride = TEST_IMAGE_WIDTH + 128;
  mRawP010ImageWithStride.colorGamut = ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
  // Load input files.
  if (!loadP010Image(RAW_P010_IMAGE, &mRawP010ImageWithStride, true)) {
    FAIL() << "Load file " << RAW_P010_IMAGE << " failed";
  }

  jpegr_compressed_struct jpegRWithStride;
  jpegRWithStride.maxLength = jpegR.length;
  jpegRWithStride.data = malloc(jpegRWithStride.maxLength);
  ret = jpegRCodec.encodeJPEGR(
      &mRawP010ImageWithStride, ultrahdr_transfer_function::ULTRAHDR_TF_HLG, &jpegRWithStride,
      DEFAULT_JPEG_QUALITY, nullptr);
  if (ret != OK) {
    FAIL() << "Error code is " << ret;
  }
  ASSERT_EQ(jpegR.length, jpegRWithStride.length)
      << "Same input is yielding different output";
  ASSERT_EQ(0, memcmp(jpegR.data, jpegRWithStride.data, jpegR.length))
      << "Same input is yielding different output";

  mRawP010ImageWithChromaData.width = TEST_IMAGE_WIDTH;
  mRawP010ImageWithChromaData.height = TEST_IMAGE_HEIGHT;
  mRawP010ImageWithChromaData.luma_stride = TEST_IMAGE_WIDTH + 64;
  mRawP010ImageWithChromaData.chroma_stride = TEST_IMAGE_WIDTH + 256;
  mRawP010ImageWithChromaData.colorGamut = ultrahdr_color_gamut::ULTRAHDR_COLORGAMUT_BT2100;
  // Load input files.
  if (!loadP010Image(RAW_P010_IMAGE, &mRawP010ImageWithChromaData, false)) {
    FAIL() << "Load file " << RAW_P010_IMAGE << " failed";
  }
  jpegr_compressed_struct jpegRWithChromaData;
  jpegRWithChromaData.maxLength = jpegR.length;
  jpegRWithChromaData.data = malloc(jpegRWithChromaData.maxLength);
  ret = jpegRCodec.encodeJPEGR(
      &mRawP010ImageWithChromaData, ultrahdr_transfer_function::ULTRAHDR_TF_HLG,
      &jpegRWithChromaData, DEFAULT_JPEG_QUALITY, nullptr);
  if (ret != OK) {
    FAIL() << "Error code is " << ret;
  }
  ASSERT_EQ(jpegR.length, jpegRWithChromaData.length)
      << "Same input is yielding different output";
  ASSERT_EQ(0, memcmp(jpegR.data, jpegRWithChromaData.data, jpegR.length))
      << "Same input is yielding different output";

  free(jpegR.data);
  free(jpegRWithStride.data);
  free(jpegRWithChromaData.data);
}

/* Test Encode API-0 and decode */
TEST_F(JpegRTest, encodeFromP010ThenDecode) {
  int ret;
+31 −30
Original line number Diff line number Diff line
@@ -151,21 +151,22 @@ void InputDevice::addEmptyEventHubDevice(int32_t eventHubId) {
        return;
    }
    std::unique_ptr<InputDeviceContext> contextPtr(new InputDeviceContext(*this, eventHubId));
    std::vector<std::unique_ptr<InputMapper>> mappers;

    mDevices.insert({eventHubId, std::make_pair(std::move(contextPtr), std::move(mappers))});
    mDevices.insert(
            {eventHubId,
             std::make_pair<std::unique_ptr<InputDeviceContext>,
                            std::vector<std::unique_ptr<InputMapper>>>(std::move(contextPtr), {})});
}

void InputDevice::addEventHubDevice(int32_t eventHubId,
void InputDevice::populateMappers(int32_t eventHubId,
                                  const InputReaderConfiguration& readerConfig) {
    if (mDevices.find(eventHubId) != mDevices.end()) {
        return;
    }
    std::unique_ptr<InputDeviceContext> contextPtr(new InputDeviceContext(*this, eventHubId));
    std::vector<std::unique_ptr<InputMapper>> mappers = createMappers(*contextPtr, readerConfig);

    // insert the context into the devices set
    mDevices.insert({eventHubId, std::make_pair(std::move(contextPtr), std::move(mappers))});
    auto targetDevice = mDevices.find(eventHubId);
    LOG_ALWAYS_FATAL_IF(targetDevice == mDevices.end(),
                        "InputDevice::populateMappers(): missing device with eventHubId %d ",
                        eventHubId);
    // create and add mappers to device
    InputDeviceContext& context = *targetDevice->second.first;
    std::vector<std::unique_ptr<InputMapper>> mappers = createMappers(context, readerConfig);
    targetDevice->second.second = std::move(mappers);
    // Must change generation to flag this device as changed
    bumpGeneration();
}
@@ -440,29 +441,29 @@ int32_t InputDevice::getState(uint32_t sourceMask, int32_t code, GetStateFunc ge
}

std::vector<std::unique_ptr<InputMapper>> InputDevice::createMappers(
        InputDeviceContext& contextPtr, const InputReaderConfiguration& readerConfig) {
    ftl::Flags<InputDeviceClass> classes = contextPtr.getDeviceClasses();
        InputDeviceContext& context, const InputReaderConfiguration& readerConfig) {
    ftl::Flags<InputDeviceClass> classes = context.getDeviceClasses();
    std::vector<std::unique_ptr<InputMapper>> mappers;

    // Switch-like devices.
    if (classes.test(InputDeviceClass::SWITCH)) {
        mappers.push_back(createInputMapper<SwitchInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<SwitchInputMapper>(context, readerConfig));
    }

    // Scroll wheel-like devices.
    if (classes.test(InputDeviceClass::ROTARY_ENCODER)) {
        mappers.push_back(createInputMapper<RotaryEncoderInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<RotaryEncoderInputMapper>(context, readerConfig));
    }

    // Vibrator-like devices.
    if (classes.test(InputDeviceClass::VIBRATOR)) {
        mappers.push_back(createInputMapper<VibratorInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<VibratorInputMapper>(context, readerConfig));
    }

    // Battery-like devices or light-containing devices.
    // PeripheralController will be created with associated EventHub device.
    if (classes.test(InputDeviceClass::BATTERY) || classes.test(InputDeviceClass::LIGHT)) {
        mController = std::make_unique<PeripheralController>(contextPtr);
        mController = std::make_unique<PeripheralController>(context);
    }

    // Keyboard-like devices.
@@ -482,13 +483,13 @@ std::vector<std::unique_ptr<InputMapper>> InputDevice::createMappers(
    }

    if (keyboardSource != 0) {
        mappers.push_back(createInputMapper<KeyboardInputMapper>(contextPtr, readerConfig,
        mappers.push_back(createInputMapper<KeyboardInputMapper>(context, readerConfig,
                                                                 keyboardSource, keyboardType));
    }

    // Cursor-like devices.
    if (classes.test(InputDeviceClass::CURSOR)) {
        mappers.push_back(createInputMapper<CursorInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<CursorInputMapper>(context, readerConfig));
    }

    // Touchscreens and touchpad devices.
@@ -496,31 +497,31 @@ std::vector<std::unique_ptr<InputMapper>> InputDevice::createMappers(
            sysprop::InputProperties::enable_touchpad_gestures_library().value_or(true);
    // TODO(b/272518665): Fix the new touchpad stack for Sony DualShock 4 (5c4, 9cc) touchpads, or
    // at least load this setting from the IDC file.
    const InputDeviceIdentifier identifier = contextPtr.getDeviceIdentifier();
    const InputDeviceIdentifier identifier = context.getDeviceIdentifier();
    const bool isSonyDualShock4Touchpad = identifier.vendor == 0x054c &&
            (identifier.product == 0x05c4 || identifier.product == 0x09cc);
    if (ENABLE_TOUCHPAD_GESTURES_LIBRARY && classes.test(InputDeviceClass::TOUCHPAD) &&
        classes.test(InputDeviceClass::TOUCH_MT) && !isSonyDualShock4Touchpad) {
        mappers.push_back(createInputMapper<TouchpadInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<TouchpadInputMapper>(context, readerConfig));
    } else if (classes.test(InputDeviceClass::TOUCH_MT)) {
        mappers.push_back(std::make_unique<MultiTouchInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<MultiTouchInputMapper>(context, readerConfig));
    } else if (classes.test(InputDeviceClass::TOUCH)) {
        mappers.push_back(std::make_unique<SingleTouchInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<SingleTouchInputMapper>(context, readerConfig));
    }

    // Joystick-like devices.
    if (classes.test(InputDeviceClass::JOYSTICK)) {
        mappers.push_back(createInputMapper<JoystickInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<JoystickInputMapper>(context, readerConfig));
    }

    // Motion sensor enabled devices.
    if (classes.test(InputDeviceClass::SENSOR)) {
        mappers.push_back(createInputMapper<SensorInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<SensorInputMapper>(context, readerConfig));
    }

    // External stylus-like devices.
    if (classes.test(InputDeviceClass::EXTERNAL_STYLUS)) {
        mappers.push_back(createInputMapper<ExternalStylusInputMapper>(contextPtr, readerConfig));
        mappers.push_back(createInputMapper<ExternalStylusInputMapper>(context, readerConfig));
    }
    return mappers;
}
Loading