Commit 32c4161373 for aom

commit 32c416137354019828c0f07f6de6208aa012337f
Author: Ram Mohan M <ram.mohan@ittiam.com>
Date:   Mon May 4 15:03:28 2026 +0530

    Use g_bit_depth during input validation

    The libaom encoder requires the input frames passed to it by the
    application for encoding have the same bit-depth as codec bit-depth. If
    the input bit-depth is less than codec bit-depth, then the application
    must upshift the frame before passing it. The application may
    communicate the actual input bit-depth via g_input_bit_depth so that
    quality metrics are computed with reference to actual input.

    As the input is expected to have same precision as codec bit-depth,
    this commit modifies the input validator to use codec bit-depth.

    Also updated API documentation to reflect these changes.

    Bug: 503993976
    Bug: 503987489

    Change-Id: Id7d7aa60681462d58a905a63df9bc18c52afe49a

diff --git a/aom/aom_encoder.h b/aom/aom_encoder.h
index 9d412af331..2e7f6bbe73 100644
--- a/aom/aom_encoder.h
+++ b/aom/aom_encoder.h
@@ -471,11 +471,15 @@ typedef struct aom_codec_enc_cfg {
    */
   aom_bit_depth_t g_bit_depth;

-  /*!\brief Bit-depth of the input frames
-   *
-   * This value identifies the bit_depth of the input frames in bits.
-   * Note that the frames passed as input to the encoder must have
-   * this bit-depth.
+  /*!\brief Bit-depth of the input source
+   *
+   * This value identifies the actual bit-depth of the input source in bits.
+   * Note that the frames passed as input to the encoder must match codec
+   * bit-depth. If there is a mismatch between source bit-depth and codec
+   * bit-depth, then the application is required to upshift the frame to the
+   * codec bit-depth before passing it for encoding. Additionally, this variable
+   * is used by the library to compute quality metrics at source bit-depth. So,
+   * source bit-depth must not exceed codec bit-depth.
    */
   unsigned int g_input_bit_depth;

diff --git a/av1/arg_defs.c b/av1/arg_defs.c
index b4025ee2f4..2eb3b99291 100644
--- a/av1/arg_defs.c
+++ b/av1/arg_defs.c
@@ -194,7 +194,8 @@ const av1_codec_arg_definitions_t g_av1_codec_arg_defs = {
               "Display warnings, but do not prompt user to continue"),
   .bitdeptharg =
       ARG_DEF_ENUM("b", "bit-depth", 1, "Bit depth for codec", bitdepth_enum),
-  .inbitdeptharg = ARG_DEF(NULL, "input-bit-depth", 1, "Bit depth of input"),
+  .inbitdeptharg =
+      ARG_DEF(NULL, "input-bit-depth", 1, "Actual bit depth of input source"),

   .input_chroma_subsampling_x = ARG_DEF(NULL, "input-chroma-subsampling-x", 1,
                                         "Chroma subsampling x value"),
diff --git a/av1/av1_cx_iface.c b/av1/av1_cx_iface.c
index 99402f6b56..324cecb906 100644
--- a/av1/av1_cx_iface.c
+++ b/av1/av1_cx_iface.c
@@ -802,6 +802,8 @@ static aom_codec_err_t validate_config(aom_codec_alg_priv_t *ctx,
   RANGE_CHECK_HI(extra_cfg, cq_level, 63);
   RANGE_CHECK(cfg, g_bit_depth, AOM_BITS_8, AOM_BITS_12);
   RANGE_CHECK(cfg, g_input_bit_depth, 8, 12);
+  if (cfg->g_input_bit_depth > cfg->g_bit_depth)
+    ERROR("Input bit-depth must not exceed codec bit-depth");
   RANGE_CHECK(extra_cfg, content, AOM_CONTENT_DEFAULT, AOM_CONTENT_INVALID - 1);

   if (cfg->g_pass >= AOM_RC_SECOND_PASS) {
@@ -842,10 +844,6 @@ static aom_codec_err_t validate_config(aom_codec_alg_priv_t *ctx,
       cfg->g_bit_depth > AOM_BITS_10) {
     ERROR("Codec bit-depth 12 not supported in profile < 2");
   }
-  if (cfg->g_profile <= (unsigned int)PROFILE_1 &&
-      cfg->g_input_bit_depth > 10) {
-    ERROR("Source bit-depth 12 not supported in profile < 2");
-  }

   if (cfg->rc_end_usage == AOM_Q) {
     RANGE_CHECK_HI(cfg, use_fixed_qp_offsets, 2);
@@ -1023,7 +1021,7 @@ static aom_codec_err_t validate_img(aom_codec_alg_priv_t *ctx,
 #if CONFIG_AV1_HIGHBITDEPTH
   if (ctx->extra_cfg.validate_hbd_input &&
       (img->fmt & AOM_IMG_FMT_HIGHBITDEPTH)) {
-    const unsigned int bit_depth = ctx->oxcf.input_cfg.input_bit_depth;
+    const unsigned int bit_depth = ctx->cfg.g_bit_depth;
     const int max_val = 1 << bit_depth;
     // Note there is no high bitdepth version of NV12 defined. If one is
     // added, `num_planes` should be 2 in that case.
diff --git a/av1/encoder/encoder.h b/av1/encoder/encoder.h
index 0e38bb8a9b..36510f05b4 100644
--- a/av1/encoder/encoder.h
+++ b/av1/encoder/encoder.h
@@ -768,7 +768,7 @@ typedef struct {
 typedef struct {
   // Indicates the framerate of the input video.
   double init_framerate;
-  // Indicates the bit-depth of the input video.
+  // Indicates the actual bit-depth of the input video.
   unsigned int input_bit_depth;
   // Indicates the maximum number of frames to be encoded.
   unsigned int limit;
diff --git a/test/postproc_filters_test.cc b/test/postproc_filters_test.cc
index f907aef278..4a61d2e35e 100644
--- a/test/postproc_filters_test.cc
+++ b/test/postproc_filters_test.cc
@@ -22,18 +22,14 @@

 namespace {

-class PostprocFiltersTest
-    : public ::libaom_test::CodecTestWith2Params<int, unsigned int>,
-      public ::libaom_test::EncoderTest {
+class PostprocFiltersTest : public ::libaom_test::CodecTestWithParam<int>,
+                            public ::libaom_test::EncoderTest {
  protected:
   PostprocFiltersTest()
       : EncoderTest(GET_PARAM(0)), set_skip_postproc_filtering_(false),
-        frame_number_(0), cpu_used_(GET_PARAM(1)), bd_(GET_PARAM(2)) {}
+        frame_number_(0), cpu_used_(GET_PARAM(1)) {}

-  void SetUp() override {
-    InitializeConfig(::libaom_test::kAllIntra);
-    cfg_.g_input_bit_depth = bd_;
-  }
+  void SetUp() override { InitializeConfig(::libaom_test::kAllIntra); }

   void PreEncodeFrameHook(::libaom_test::VideoSource *video,
                           ::libaom_test::Encoder *encoder) override {
@@ -121,7 +117,6 @@ class PostprocFiltersTest
   static constexpr int kFrames = 30;
   static constexpr unsigned int kCqLevel = 18;
   int cpu_used_;
-  unsigned int bd_;
 };

 class PostprocFiltersTestLarge : public PostprocFiltersTest {};
@@ -130,11 +125,9 @@ TEST_P(PostprocFiltersTest, MD5Match) { DoTest(); }

 TEST_P(PostprocFiltersTestLarge, MD5Match) { DoTest(); }

-AV1_INSTANTIATE_TEST_SUITE(PostprocFiltersTest, ::testing::Values(9),
-                           ::testing::Values(8, 10));
+AV1_INSTANTIATE_TEST_SUITE(PostprocFiltersTest, ::testing::Values(9));

 // Test cpu_used 3 and 6.
-AV1_INSTANTIATE_TEST_SUITE(PostprocFiltersTestLarge, ::testing::Values(3, 6),
-                           ::testing::Values(8, 10));
+AV1_INSTANTIATE_TEST_SUITE(PostprocFiltersTestLarge, ::testing::Values(3, 6));

 }  // namespace