Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e6d0e201 authored by Nick Deakin's avatar Nick Deakin Committed by Android (Google) Code Review
Browse files

Merge "Several fixes to recovery map math."

parents fc7c70e4 594a4ca9
Loading
Loading
Loading
Loading
+133 −13
Original line number Diff line number Diff line
@@ -23,6 +23,9 @@

namespace android::recoverymap {

////////////////////////////////////////////////////////////////////////////////
// Framework

const float kSdrWhiteNits = 100.0f;

struct Color {
@@ -40,34 +43,141 @@ struct Color {
  };
};

inline Color operator+=(Color& lhs, const Color& rhs) {
  lhs.r += rhs.r;
  lhs.g += rhs.g;
  lhs.b += rhs.b;
  return lhs;
}
inline Color operator-=(Color& lhs, const Color& rhs) {
  lhs.r -= rhs.r;
  lhs.g -= rhs.g;
  lhs.b -= rhs.b;
  return lhs;
}

inline Color operator+(const Color& lhs, const Color& rhs) {
  Color temp = lhs;
  return temp += rhs;
}
inline Color operator-(const Color& lhs, const Color& rhs) {
  Color temp = lhs;
  return temp -= rhs;
}

inline Color operator+=(Color& lhs, const float rhs) {
  lhs.r += rhs;
  lhs.g += rhs;
  lhs.b += rhs;
  return lhs;
}
inline Color operator-=(Color& lhs, const float rhs) {
  lhs.r -= rhs;
  lhs.g -= rhs;
  lhs.b -= rhs;
  return lhs;
}
inline Color operator*=(Color& lhs, const float rhs) {
  lhs.r *= rhs;
  lhs.g *= rhs;
  lhs.b *= rhs;
  return lhs;
}
inline Color operator/=(Color& lhs, const float rhs) {
  lhs.r /= rhs;
  lhs.g /= rhs;
  lhs.b /= rhs;
  return lhs;
}

inline Color operator+(const Color& lhs, const float rhs) {
  Color temp = lhs;
  return temp += rhs;
}
inline Color operator-(const Color& lhs, const float rhs) {
  Color temp = lhs;
  return temp -= rhs;
}
inline Color operator*(const Color& lhs, const float rhs) {
  Color temp = lhs;
  return temp *= rhs;
}
inline Color operator/(const Color& lhs, const float rhs) {
  Color temp = lhs;
  return temp /= rhs;
}


////////////////////////////////////////////////////////////////////////////////
// sRGB transformations

/*
 * Calculate the luminance of a linear RGB sRGB pixel, according to IEC 61966-2-1.
 */
float srgbLuminance(Color e);

/*
 * Convert from OETF'd bt.2100 RGB to YUV, according to BT.2100
 * Convert from OETF'd srgb YUV to RGB, according to ECMA TR/98.
 */
Color bt2100RgbToYuv(Color e);
Color srgbYuvToRgb(Color e_gamma);

/*
 * Convert srgb YUV to RGB, according to ECMA TR/98.
 * Convert from OETF'd srgb RGB to YUV, according to ECMA TR/98.
 */
Color srgbYuvToRgb(Color e);
Color srgbRgbToYuv(Color e_gamma);

/*
 * TODO: better source for srgb transfer function
 * Convert from srgb to linear, according to https://en.wikipedia.org/wiki/SRGB.
 * Convert from srgb to linear, according to IEC 61966-2-1.
 *
 * [0.0, 1.0] range in and out.
 */
float srgbInvOetf(float e);
Color srgbInvOetf(Color e);
float srgbInvOetf(float e_gamma);
Color srgbInvOetf(Color e_gamma);


////////////////////////////////////////////////////////////////////////////////
// Display-P3 transformations




////////////////////////////////////////////////////////////////////////////////
// BT.2100 transformations - according to ITU-R BT.2100-2

/*
 * Calculate the luminance of a linear RGB BT.2100 pixel.
 */
float bt2100Luminance(Color e);

/*
 * Convert from OETF'd BT.2100 RGB to YUV.
 */
Color bt2100RgbToYuv(Color e_gamma);

/*
 * Convert from HLG to scene luminance in nits, according to BT.2100.
 * Convert from OETF'd BT.2100 YUV to RGB.
 */
float hlgInvOetf(float e);
Color bt2100YuvToRgb(Color e_gamma);

/*
 * Convert from scene luminance in nits to HLG,  according to BT.2100.
 * Convert from scene luminance in nits to HLG.
 */
Color hlgOetf(Color e);

/*
 * Convert from HLG to scene luminance in nits.
 */
Color hlgInvOetf(Color e_gamma);

////////////////////////////////////////////////////////////////////////////////
// Color space conversions




////////////////////////////////////////////////////////////////////////////////
// Recovery map calculations

/*
 * Calculate the 8-bit unsigned integer recovery value for the given SDR and HDR
 * luminances in linear space, and the hdr ratio to encode against.
@@ -85,6 +195,11 @@ Color applyRecovery(Color e, float recovery, float hdr_ratio);
 */
Color getYuv420Pixel(jr_uncompressed_ptr image, size_t x, size_t y);

/*
 * Helper for sampling from images.
 */
Color getP010Pixel(jr_uncompressed_ptr image, size_t x, size_t y);

/*
 * Sample the recovery value for the map from a given x,y coordinate on a scale
 * that is map scale factor larger than the map size.
@@ -97,8 +212,13 @@ float sampleMap(jr_uncompressed_ptr map, size_t map_scale_factor, size_t x, size
 *
 * Expect narrow-range image data for P010.
 */
float sampleYuv420Y(jr_uncompressed_ptr map, size_t map_scale_factor, size_t x, size_t y);
float sampleP010Y(jr_uncompressed_ptr map, size_t map_scale_factor, size_t x, size_t y);
Color sampleYuv420(jr_uncompressed_ptr map, size_t map_scale_factor, size_t x, size_t y);

/*
 * Sample the image Y value at the provided location, with a weighting based on nearby pixels
 * and the map scale factor. Assumes narrow-range image data for P010.
 */
Color sampleP010(jr_uncompressed_ptr map, size_t map_scale_factor, size_t x, size_t y);

} // namespace android::recoverymap

+21 −13
Original line number Diff line number Diff line
@@ -301,31 +301,38 @@ status_t RecoveryMap::generateRecoveryMap(jr_uncompressed_ptr uncompressed_yuv_4
  std::unique_ptr<uint8_t[]> map_data;
  map_data.reset(reinterpret_cast<uint8_t*>(dest->data));

  uint16_t yp_hdr_max = 0;
  float hdr_y_nits_max = 0.0f;
  for (size_t y = 0; y < image_height; ++y) {
    for (size_t x = 0; x < image_width; ++x) {
      size_t pixel_idx =  x + y * image_width;
      uint16_t yp_hdr = reinterpret_cast<uint8_t*>(uncompressed_yuv_420_image->data)[pixel_idx];
      if (yp_hdr > yp_hdr_max) {
        yp_hdr_max = yp_hdr;
      Color hdr_yuv_gamma = getP010Pixel(uncompressed_p010_image, x, y);
      Color hdr_rgb_gamma = bt2100YuvToRgb(hdr_yuv_gamma);
      Color hdr_rgb = hlgInvOetf(hdr_rgb_gamma);
      float hdr_y_nits = bt2100Luminance(hdr_rgb);

      if (hdr_y_nits > hdr_y_nits_max) {
        hdr_y_nits_max = hdr_y_nits;
      }
    }
  }

  float y_hdr_max_nits = hlgInvOetf(yp_hdr_max);
  hdr_ratio = y_hdr_max_nits / kSdrWhiteNits;
  hdr_ratio = hdr_y_nits_max / kSdrWhiteNits;

  for (size_t y = 0; y < map_height; ++y) {
    for (size_t x = 0; x < map_width; ++x) {
      float yp_sdr = sampleYuv420Y(uncompressed_yuv_420_image, kMapDimensionScaleFactor, x, y);
      float yp_hdr = sampleP010Y(uncompressed_p010_image, kMapDimensionScaleFactor, x, y);
      Color sdr_yuv_gamma = sampleYuv420(uncompressed_yuv_420_image,
                                         kMapDimensionScaleFactor, x, y);
      Color sdr_rgb_gamma = srgbYuvToRgb(sdr_yuv_gamma);
      Color sdr_rgb = srgbInvOetf(sdr_rgb_gamma);
      float sdr_y_nits = srgbLuminance(sdr_rgb);

      float y_sdr_nits = srgbInvOetf(yp_sdr);
      float y_hdr_nits = hlgInvOetf(yp_hdr);
      Color hdr_yuv_gamma = sampleP010(uncompressed_p010_image, kMapDimensionScaleFactor, x, y);
      Color hdr_rgb_gamma = bt2100YuvToRgb(hdr_yuv_gamma);
      Color hdr_rgb = hlgInvOetf(hdr_rgb_gamma);
      float hdr_y_nits = bt2100Luminance(hdr_rgb);

      size_t pixel_idx =  x + y * map_width;
      reinterpret_cast<uint8_t*>(dest->data)[pixel_idx] =
          encodeRecovery(y_sdr_nits, y_hdr_nits, hdr_ratio);
          encodeRecovery(sdr_y_nits, hdr_y_nits, hdr_ratio);
    }
  }

@@ -367,7 +374,8 @@ status_t RecoveryMap::applyRecoveryMap(jr_uncompressed_ptr uncompressed_yuv_420_
      Color rgb_hdr = applyRecovery(rgb_sdr, recovery, hdr_ratio);

      Color rgbp_hdr = hlgOetf(rgb_hdr);
      Color ypuv_hdr = bt2100RgbToYuv(rgbp_hdr);
      // TODO: actually just leave in RGB and convert to RGBA1010102 instead.
      Color ypuv_hdr = srgbRgbToYuv(rgbp_hdr);

      reinterpret_cast<uint16_t*>(dest->data)[pixel_y_idx] = ypuv_hdr.r;
      reinterpret_cast<uint16_t*>(dest->data)[pixel_count + pixel_uv_idx] = ypuv_hdr.g;
+127 −43
Original line number Diff line number Diff line
@@ -20,42 +20,102 @@

namespace android::recoverymap {

static const float kBt2100R = 0.2627f, kBt2100G = 0.6780f, kBt2100B = 0.0593f;
static const float kBt2100Cb = 1.8814f, kBt2100Cr = 1.4746f;
////////////////////////////////////////////////////////////////////////////////
// sRGB transformations

static const float kSrgbR = 0.299f, kSrgbG = 0.587f, kSrgbB = 0.114f;

Color bt2100RgbToYuv(Color e) {
  float yp = kBt2100R * e.r + kBt2100G * e.g + kBt2100B * e.b;
  return {{{yp, (e.b - yp) / kBt2100Cb, (e.r - yp) / kBt2100Cr }}};
float srgbLuminance(Color e) {
  return kSrgbR * e.r + kSrgbG * e.g + kSrgbB * e.b;
}

static const float kSrgbRCr = 1.402f, kSrgbGCb = 0.34414f, kSrgbGCr = 0.71414f, kSrgbBCb = 1.772f;

Color srgbYuvToRgb(Color e) {
  return {{{ e.y + kSrgbRCr * e.v, e.y - kSrgbGCb * e.u - kSrgbGCr * e.v, e.y + kSrgbBCb * e.u }}};
Color srgbYuvToRgb(Color e_gamma) {
  return {{{ e_gamma.y + kSrgbRCr * e_gamma.v,
             e_gamma.y - kSrgbGCb * e_gamma.u - kSrgbGCr * e_gamma.v,
             e_gamma.y + kSrgbBCb * e_gamma.u }}};
}

static const float kSrgbUR = -0.1687f, kSrgbUG = -0.3313f, kSrgbUB = 0.5f;
static const float kSrgbVR = 0.5f, kSrgbVG = -0.4187f, kSrgbVB = -0.0813f;

Color srgbRgbToYuv(Color e_gamma) {
  return {{{ kSrgbR * e_gamma.r + kSrgbG * e_gamma.g + kSrgbB * e_gamma.b,
             kSrgbUR * e_gamma.r + kSrgbUG * e_gamma.g + kSrgbUB * e_gamma.b,
             kSrgbVR * e_gamma.r + kSrgbVG * e_gamma.g + kSrgbVB * e_gamma.b }}};
}

float srgbInvOetf(float e) {
  if (e <= 0.04045f) {
    return e / 12.92f;
float srgbInvOetf(float e_gamma) {
  if (e_gamma <= 0.04045f) {
    return e_gamma / 12.92f;
  } else {
    return pow((e + 0.055f) / 1.055f, 2.4);
    return pow((e_gamma + 0.055f) / 1.055f, 2.4);
  }
}

Color srgbInvOetf(Color e) {
  return {{{ srgbInvOetf(e.r), srgbInvOetf(e.g), srgbInvOetf(e.b) }}};
Color srgbInvOetf(Color e_gamma) {
  return {{{ srgbInvOetf(e_gamma.r),
             srgbInvOetf(e_gamma.g),
             srgbInvOetf(e_gamma.b) }}};
}

static const float kHlgA = 0.17883277f, kHlgB = 0.28466892f, kHlgC = 0.55991073;

float hlgInvOetf(float e) {
  if (e <= 0.5f) {
    return pow(e, 2.0f) / 3.0f;
  } else {
    return (exp((e - kHlgC) / kHlgA) + kHlgB) / 12.0f;
////////////////////////////////////////////////////////////////////////////////
// Display-P3 transformations



////////////////////////////////////////////////////////////////////////////////
// BT.2100 transformations - according to ITU-R BT.2100-2

static const float kBt2100R = 0.2627f, kBt2100G = 0.6780f, kBt2100B = 0.0593f;

float bt2100Luminance(Color e) {
  return kBt2100R * e.r + kBt2100G * e.g + kBt2100B * e.b;
}

static const float kBt2100Cb = 1.8814f, kBt2100Cr = 1.4746f;

Color bt2100RgbToYuv(Color e_gamma) {
  float y_gamma = bt2100Luminance(e_gamma);
  return {{{ y_gamma,
             (e_gamma.b - y_gamma) / kBt2100Cb,
             (e_gamma.r - y_gamma) / kBt2100Cr }}};
}

// Derived from the reverse of bt2100RgbToYuv. The derivation for R and B are
// pretty straight forward; we just reverse the formulas for U and V above. But
// deriving the formula for G is a bit more complicated:
//
// Start with equation for luminance:
//   Y = kBt2100R * R + kBt2100G * G + kBt2100B * B
// Solve for G:
//   G = (Y - kBt2100R * R - kBt2100B * B) / kBt2100B
// Substitute equations for R and B in terms YUV:
//   G = (Y - kBt2100R * (Y + kBt2100Cr * V) - kBt2100B * (Y + kBt2100Cb * U)) / kBt2100B
// Simplify:
//   G = Y * ((1 - kBt2100R - kBt2100B) / kBt2100G)
//     + U * (kBt2100B * kBt2100Cb / kBt2100G)
//     + V * (kBt2100R * kBt2100Cr / kBt2100G)
//
// We then get the following coeficients for calculating G from YUV:
//
// Coef for Y = (1 - kBt2100R - kBt2100B) / kBt2100G = 1
// Coef for U = kBt2100B * kBt2100Cb / kBt2100G = kBt2100GCb = ~0.1645
// Coef for V = kBt2100R * kBt2100Cr / kBt2100G = kBt2100GCr = ~0.5713

static const float kBt2100GCb = kBt2100B * kBt2100Cb / kBt2100G;
static const float kBt2100GCr = kBt2100R * kBt2100Cr / kBt2100G;

Color bt2100YuvToRgb(Color e_gamma) {
  return {{{ e_gamma.y + kBt2100Cr * e_gamma.v,
             e_gamma.y - kBt2100GCb * e_gamma.u - kBt2100GCr * e_gamma.v,
             e_gamma.y + kBt2100Cb * e_gamma.u }}};
}

static const float kHlgA = 0.17883277f, kHlgB = 0.28466892f, kHlgC = 0.55991073;

static float hlgOetf(float e) {
  if (e <= 1.0f/12.0f) {
    return sqrt(3.0f * e);
@@ -68,6 +128,28 @@ Color hlgOetf(Color e) {
  return {{{ hlgOetf(e.r), hlgOetf(e.g), hlgOetf(e.b) }}};
}

float hlgInvOetf(float e_gamma) {
  if (e_gamma <= 0.5f) {
    return pow(e_gamma, 2.0f) / 3.0f;
  } else {
    return (exp((e_gamma - kHlgC) / kHlgA) + kHlgB) / 12.0f;
  }
}

Color hlgInvOetf(Color e_gamma) {
  return {{{ hlgInvOetf(e_gamma.r),
             hlgInvOetf(e_gamma.g),
             hlgInvOetf(e_gamma.b) }}};
}


////////////////////////////////////////////////////////////////////////////////
// Color conversions


////////////////////////////////////////////////////////////////////////////////
// Recovery map calculations

uint8_t encodeRecovery(float y_sdr, float y_hdr, float hdr_ratio) {
  float gain = 1.0f;
  if (y_sdr > 0.0f) {
@@ -137,40 +219,42 @@ Color getYuv420Pixel(jr_uncompressed_ptr image, size_t x, size_t y) {
             (static_cast<float>(v_uint) - 128.0f) / 255.0f }}};
}

typedef float (*sampleComponentFn)(jr_uncompressed_ptr, size_t, size_t);
Color getP010Pixel(jr_uncompressed_ptr image, size_t x, size_t y) {
  size_t pixel_count = image->width * image->height;

  size_t pixel_y_idx = x + y * image->width;
  size_t pixel_uv_idx = x / 2 + (y / 2) * (image->width / 2);

  uint16_t y_uint = reinterpret_cast<uint16_t*>(image->data)[pixel_y_idx];
  uint16_t u_uint = reinterpret_cast<uint16_t*>(image->data)[pixel_count + pixel_uv_idx * 2];
  uint16_t v_uint = reinterpret_cast<uint16_t*>(image->data)[pixel_count + pixel_uv_idx * 2 + 1];

  // Conversions include taking narrow-range into account.
  return {{{ static_cast<float>(y_uint) / 940.0f,
             (static_cast<float>(u_uint) - 64.0f) / 940.0f - 0.5f,
             (static_cast<float>(v_uint) - 64.0f) / 940.0f - 0.5f }}};
}

typedef Color (*getPixelFn)(jr_uncompressed_ptr, size_t, size_t);

static float sampleComponent(jr_uncompressed_ptr image, size_t map_scale_factor, size_t x, size_t y,
                             sampleComponentFn sample_fn) {
  float e = 0.0f;
static Color samplePixels(jr_uncompressed_ptr image, size_t map_scale_factor, size_t x, size_t y,
                          getPixelFn get_pixel_fn) {
  Color e = {{{ 0.0f, 0.0f, 0.0f }}};
  for (size_t dy = 0; dy < map_scale_factor; ++dy) {
    for (size_t dx = 0; dx < map_scale_factor; ++dx) {
      e += sample_fn(image, x * map_scale_factor + dx, y * map_scale_factor + dy);
      e += get_pixel_fn(image, x * map_scale_factor + dx, y * map_scale_factor + dy);
    }
  }

  return e / static_cast<float>(map_scale_factor * map_scale_factor);
}

static float getYuv420Y(jr_uncompressed_ptr image, size_t x, size_t y) {
  size_t pixel_idx = x + y * image->width;
  uint8_t y_uint = reinterpret_cast<uint8_t*>(image->data)[pixel_idx];
  return static_cast<float>(y_uint) / 255.0f;
}


float sampleYuv420Y(jr_uncompressed_ptr image, size_t map_scale_factor, size_t x, size_t y) {
  return sampleComponent(image, map_scale_factor, x, y, getYuv420Y);
}

static float getP010Y(jr_uncompressed_ptr image, size_t x, size_t y) {
  size_t pixel_idx = x + y * image->width;
  uint8_t y_uint = reinterpret_cast<uint16_t*>(image->data)[pixel_idx];
  // Expecting narrow range input
  return (static_cast<float>(y_uint) - 64.0f) / 960.0f;
Color sampleYuv420(jr_uncompressed_ptr image, size_t map_scale_factor, size_t x, size_t y) {
  return samplePixels(image, map_scale_factor, x, y, getYuv420Pixel);
}

float sampleP010Y(jr_uncompressed_ptr image, size_t map_scale_factor, size_t x, size_t y) {
  return sampleComponent(image, map_scale_factor, x, y, getP010Y);
Color sampleP010(jr_uncompressed_ptr image, size_t map_scale_factor, size_t x, size_t y) {
  return samplePixels(image, map_scale_factor, x, y, getP010Pixel);
}

} // namespace android::recoverymap
+8 −0
Original line number Diff line number Diff line
@@ -27,7 +27,15 @@ cc_test {
    srcs: [
        "recoverymap_test.cpp",
    ],
    shared_libs: [
        "libimage_io",
        "libjpeg",
        "liblog",
    ],
    static_libs: [
        "libgtest",
        "libjpegdecoder",
        "libjpegencoder",
        "libjpegrecoverymap",
    ],
}
+27 −3
Original line number Diff line number Diff line
@@ -14,9 +14,33 @@
 * limitations under the License.
 */

#include <gtest/gtest.h>
#include <jpegrecoverymap/recoverymap.h>

namespace android {
namespace android::recoverymap {

// Add new tests here.
} // namespace android
class RecoveryMapTest : public testing::Test {
public:
  RecoveryMapTest();
  ~RecoveryMapTest();
protected:
  virtual void SetUp();
  virtual void TearDown();
};

RecoveryMapTest::RecoveryMapTest() {}
RecoveryMapTest::~RecoveryMapTest() {}

void RecoveryMapTest::SetUp() {}
void RecoveryMapTest::TearDown() {}

TEST_F(RecoveryMapTest, build) {
  // Force all of the recovery map lib to be linked by calling all public functions.
  RecoveryMap recovery_map;
  recovery_map.encodeJPEGR(nullptr, nullptr, nullptr, 0, nullptr);
  recovery_map.encodeJPEGR(nullptr, nullptr, nullptr, nullptr);
  recovery_map.encodeJPEGR(nullptr, nullptr, nullptr);
  recovery_map.decodeJPEGR(nullptr, nullptr, nullptr, false);
}

} // namespace android::recoverymap