aom: Fix unsigned int overflow in init_rate_histogram()

From 9759ad181d0b67450c8ed25fcbca9a20da5508e5 Mon Sep 17 00:00:00 2001
From: Wan-Teh Chang <[EMAIL REDACTED]>
Date: Tue, 25 Jun 2024 16:22:51 -0700
Subject: [PATCH] Fix unsigned int overflow in init_rate_histogram()

Tested: Build libaom with -DSANITIZE=integer and the run
./aomenc husky.yuv -o AV1_husky_2000000_1000000_1000000.webm --good \
  --cpu-used=2 -v -t 0 -w 352 -h 288 --fps=1000000/1000000 \
  --target-bitrate=2000000 --limit=150 --test-decode=fatal --passes=2 \
  --lag-in-frames=35 --min-q=0 --max-q=63 --min-gf-interval=4 \
  --max-gf-interval=32 --arnr-maxframes=7 --arnr-strength=5 \
  --kf-max-dist=9999 --aq-mode=0 --undershoot-pct=100 \
  --overshoot-pct=100 --bias-pct=50

This unsigned integer overflow seems to be caused by
g_timebase.num=1000000.

Bug: 349440066
Change-Id: I0339674d6283b8875087ba6dc6564b79ffeb090b
---
 stats/rate_hist.c | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/stats/rate_hist.c b/stats/rate_hist.c
index 1b4f2ee00..32872465f 100644
--- a/stats/rate_hist.c
+++ b/stats/rate_hist.c
@@ -12,10 +12,11 @@
 #include "stats/rate_hist.h"
 
 #include <assert.h>
-#include <stdlib.h>
+#include <math.h>
 #include <limits.h>
+#include <stdint.h>
 #include <stdio.h>
-#include <math.h>
+#include <stdlib.h>
 
 #define RATE_BINS 100
 #define HIST_BAR_MAX 40
@@ -48,7 +49,8 @@ struct rate_hist *init_rate_histogram(const aom_codec_enc_cfg_t *cfg,
   // Determine the number of samples in the buffer. Use the file's framerate
   // to determine the number of frames in rc_buf_sz milliseconds, with an
   // adjustment (5/4) to account for alt-refs
-  hist->samples = cfg->rc_buf_sz * 5 / 4 * fps->num / fps->den / 1000;
+  hist->samples =
+      (int)((int64_t)cfg->rc_buf_sz * 5 / 4 * fps->num / fps->den / 1000);
 
   // prevent division by zero
   if (hist->samples == 0) hist->samples = 1;