diff --git a/library/effect/src/main/java/com/google/android/exoplayer2/effect/DefaultVideoFrameProcessor.java b/library/effect/src/main/java/com/google/android/exoplayer2/effect/DefaultVideoFrameProcessor.java index 2301e7d570c..085dacebc8e 100644 --- a/library/effect/src/main/java/com/google/android/exoplayer2/effect/DefaultVideoFrameProcessor.java +++ b/library/effect/src/main/java/com/google/android/exoplayer2/effect/DefaultVideoFrameProcessor.java @@ -97,7 +97,8 @@ public Builder setEnableColorTransfers(boolean enableColorTransfers) { /** * Sets the {@link TextureOutputListener}. * - *

If set, the {@link VideoFrameProcessor} will output to an OpenGL texture. + *

If set, the {@link VideoFrameProcessor} will output to an OpenGL texture, accessible via + * {@link TextureOutputListener#onTextureRendered}. Otherwise, no texture will be rendered to. */ @VisibleForTesting @CanIgnoreReturnValue diff --git a/library/effect/src/main/java/com/google/android/exoplayer2/effect/FinalShaderProgramWrapper.java b/library/effect/src/main/java/com/google/android/exoplayer2/effect/FinalShaderProgramWrapper.java index ed6319ed9cb..2a6c0dbd5fb 100644 --- a/library/effect/src/main/java/com/google/android/exoplayer2/effect/FinalShaderProgramWrapper.java +++ b/library/effect/src/main/java/com/google/android/exoplayer2/effect/FinalShaderProgramWrapper.java @@ -458,7 +458,9 @@ private void configureOutputTexture(int outputWidth, int outputHeight) throws Gl } int outputTexId = GlUtil.createTexture( - outputWidth, outputHeight, /* useHighPrecisionColorComponents= */ false); + outputWidth, + outputHeight, + /* useHighPrecisionColorComponents= */ ColorInfo.isTransferHdr(outputColorInfo)); outputTexture = glObjectsProvider.createBuffersForTexture(outputTexId, outputWidth, outputHeight); } diff --git a/library/transformer/src/androidTest/java/com/google/android/exoplayer2/transformer/mh/DefaultVideoFrameProcessorTextureOutputPixelTest.java b/library/transformer/src/androidTest/java/com/google/android/exoplayer2/transformer/mh/DefaultVideoFrameProcessorTextureOutputPixelTest.java index 0693916380b..62d9037537b 100644 --- a/library/transformer/src/androidTest/java/com/google/android/exoplayer2/transformer/mh/DefaultVideoFrameProcessorTextureOutputPixelTest.java +++ b/library/transformer/src/androidTest/java/com/google/android/exoplayer2/transformer/mh/DefaultVideoFrameProcessorTextureOutputPixelTest.java @@ -15,24 +15,38 @@ */ package com.google.android.exoplayer2.transformer.mh; +import static androidx.test.core.app.ApplicationProvider.getApplicationContext; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE; +import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceArgb8888; import static com.google.android.exoplayer2.testutil.BitmapPixelTestUtil.readBitmap; +import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT; +import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_720P_4_SECOND_HDR10_FORMAT; +import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_FORMAT; +import static com.google.android.exoplayer2.transformer.AndroidTestUtil.recordTestSkipped; import static com.google.android.exoplayer2.util.Assertions.checkNotNull; +import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; import static com.google.common.truth.Truth.assertThat; +import android.content.Context; import android.graphics.Bitmap; import android.graphics.SurfaceTexture; import android.view.Surface; import androidx.test.ext.junit.runners.AndroidJUnit4; +import com.google.android.exoplayer2.C; +import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.effect.BitmapOverlay; import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor; import com.google.android.exoplayer2.effect.OverlayEffect; import com.google.android.exoplayer2.testutil.BitmapPixelTestUtil; import com.google.android.exoplayer2.testutil.VideoFrameProcessorTestRunner; +import com.google.android.exoplayer2.transformer.AndroidTestUtil; +import com.google.android.exoplayer2.transformer.EncoderUtil; import com.google.android.exoplayer2.util.GlTextureInfo; import com.google.android.exoplayer2.util.GlUtil; +import com.google.android.exoplayer2.util.Util; +import com.google.android.exoplayer2.video.ColorInfo; import com.google.common.collect.ImmutableList; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.junit.After; @@ -54,19 +68,38 @@ public final class DefaultVideoFrameProcessorTextureOutputPixelTest { private static final String BITMAP_OVERLAY_PNG_ASSET_PATH = "media/bitmap/sample_mp4_first_frame/electrical_colors/overlay_bitmap_FrameProcessor.png"; private static final String OVERLAY_PNG_ASSET_PATH = "media/bitmap/input_images/media3test.png"; - /** Input video of which we only use the first frame. */ + + private static final String ORIGINAL_HLG10_PNG_ASSET_PATH = + "media/bitmap/sample_mp4_first_frame/electrical_colors/original_hlg10.png"; + private static final String ORIGINAL_HDR10_PNG_ASSET_PATH = + "media/bitmap/sample_mp4_first_frame/electrical_colors/original_hdr10.png"; + + /** Input SDR video of which we only use the first frame. */ private static final String INPUT_SDR_MP4_ASSET_STRING = "media/mp4/sample.mp4"; + /** Input PQ video of which we only use the first frame. */ + private static final String INPUT_PQ_MP4_ASSET_STRING = "media/mp4/hdr10-720p.mp4"; + /** Input HLG video of which we only use the first frame. */ + private static final String INPUT_HLG10_MP4_ASSET_STRING = "media/mp4/hlg-1080p.mp4"; private @MonotonicNonNull VideoFrameProcessorTestRunner videoFrameProcessorTestRunner; @After public void release() { - checkNotNull(videoFrameProcessorTestRunner).release(); + if (videoFrameProcessorTestRunner != null) { + videoFrameProcessorTestRunner.release(); + } } @Test public void noEffects_matchesGoldenFile() throws Exception { String testId = "noEffects_matchesGoldenFile"; + if (AndroidTestUtil.skipAndLogIfFormatsUnsupported( + getApplicationContext(), + testId, + /* inputFormat= */ MP4_ASSET_FORMAT, + /* outputFormat= */ null)) { + return; + } videoFrameProcessorTestRunner = getDefaultFrameProcessorTestRunnerBuilder(testId).build(); Bitmap expectedBitmap = readBitmap(ORIGINAL_PNG_ASSET_PATH); @@ -82,6 +115,13 @@ public void noEffects_matchesGoldenFile() throws Exception { @Test public void bitmapOverlay_matchesGoldenFile() throws Exception { String testId = "bitmapOverlay_matchesGoldenFile"; + if (AndroidTestUtil.skipAndLogIfFormatsUnsupported( + getApplicationContext(), + testId, + /* inputFormat= */ MP4_ASSET_FORMAT, + /* outputFormat= */ null)) { + return; + } Bitmap overlayBitmap = readBitmap(OVERLAY_PNG_ASSET_PATH); BitmapOverlay bitmapOverlay = BitmapOverlay.createStaticBitmapOverlay(overlayBitmap); videoFrameProcessorTestRunner = @@ -99,8 +139,79 @@ public void bitmapOverlay_matchesGoldenFile() throws Exception { .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE); } - // TODO(b/227624622): Add a test for HDR input after BitmapPixelTestUtil can read HDR bitmaps, - // using GlEffectWrapper to ensure usage of intermediate textures. + @Test + public void noEffects_hlg10Input_matchesGoldenFile() throws Exception { + String testId = "noEffects_hlg10Input_matchesGoldenFile"; + Context context = getApplicationContext(); + Format format = MP4_ASSET_1080P_5_SECOND_HLG10_FORMAT; + if (!deviceSupportsHdrEditing(format)) { + recordTestSkipped(context, testId, "No HLG editing support"); + return; + } + if (AndroidTestUtil.skipAndLogIfFormatsUnsupported( + context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) { + return; + } + ColorInfo hlg10ColorInfo = + new ColorInfo.Builder() + .setColorSpace(C.COLOR_SPACE_BT2020) + .setColorRange(C.COLOR_RANGE_LIMITED) + .setColorTransfer(C.COLOR_TRANSFER_HLG) + .build(); + videoFrameProcessorTestRunner = + getDefaultFrameProcessorTestRunnerBuilder(testId) + .setInputColorInfo(hlg10ColorInfo) + .setOutputColorInfo(hlg10ColorInfo) + .setVideoAssetPath(INPUT_HLG10_MP4_ASSET_STRING) + .build(); + Bitmap expectedBitmap = readBitmap(ORIGINAL_HLG10_PNG_ASSET_PATH); + + Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd(); + + // TODO(b/207848601): Switch to using proper tooling for testing against golden data. + float averagePixelAbsoluteDifference = + BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16( + expectedBitmap, actualBitmap); + assertThat(averagePixelAbsoluteDifference) + .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16); + } + + @Test + public void noEffects_hdr10Input_matchesGoldenFile() throws Exception { + String testId = "noEffects_hdr10Input_matchesGoldenFile"; + Context context = getApplicationContext(); + Format format = MP4_ASSET_720P_4_SECOND_HDR10_FORMAT; + if (!deviceSupportsHdrEditing(format)) { + recordTestSkipped(context, testId, "No HLG editing support"); + return; + } + if (AndroidTestUtil.skipAndLogIfFormatsUnsupported( + context, testId, /* inputFormat= */ format, /* outputFormat= */ null)) { + return; + } + ColorInfo hdr10ColorInfo = + new ColorInfo.Builder() + .setColorSpace(C.COLOR_SPACE_BT2020) + .setColorRange(C.COLOR_RANGE_LIMITED) + .setColorTransfer(C.COLOR_TRANSFER_ST2084) + .build(); + videoFrameProcessorTestRunner = + getDefaultFrameProcessorTestRunnerBuilder(testId) + .setInputColorInfo(hdr10ColorInfo) + .setOutputColorInfo(hdr10ColorInfo) + .setVideoAssetPath(INPUT_PQ_MP4_ASSET_STRING) + .build(); + Bitmap expectedBitmap = readBitmap(ORIGINAL_HDR10_PNG_ASSET_PATH); + + Bitmap actualBitmap = videoFrameProcessorTestRunner.processFirstFrameAndEnd(); + + // TODO(b/207848601): Switch to using proper tooling for testing against golden data. + float averagePixelAbsoluteDifference = + BitmapPixelTestUtil.getBitmapAveragePixelAbsoluteDifferenceFp16( + expectedBitmap, actualBitmap); + assertThat(averagePixelAbsoluteDifference) + .isAtMost(MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16); + } private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunnerBuilder( String testId) { @@ -124,11 +235,13 @@ private VideoFrameProcessorTestRunner.Builder getDefaultFrameProcessorTestRunner private static final class TextureBitmapReader implements VideoFrameProcessorTestRunner.BitmapReader { // TODO(b/239172735): This outputs an incorrect black output image on emulators. + private boolean useHighPrecisionColorComponents; private @MonotonicNonNull Bitmap outputBitmap; @Override - public Surface getSurface(int width, int height) { + public Surface getSurface(int width, int height, boolean useHighPrecisionColorComponents) { + this.useHighPrecisionColorComponents = useHighPrecisionColorComponents; int texId; try { texId = GlUtil.createExternalTexture(); @@ -149,8 +262,23 @@ public void readBitmapFromTexture(GlTextureInfo outputTexture, long presentation GlUtil.focusFramebufferUsingCurrentContext( outputTexture.fboId, outputTexture.width, outputTexture.height); outputBitmap = - BitmapPixelTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer( - outputTexture.width, outputTexture.height); + createBitmapFromCurrentGlFrameBuffer( + outputTexture.width, outputTexture.height, useHighPrecisionColorComponents); } + + private static Bitmap createBitmapFromCurrentGlFrameBuffer( + int width, int height, boolean useHighPrecisionColorComponents) throws GlUtil.GlException { + if (!useHighPrecisionColorComponents) { + return BitmapPixelTestUtil.createArgb8888BitmapFromCurrentGlFramebuffer(width, height); + } + checkState(Util.SDK_INT > 26, "useHighPrecisionColorComponents only supported on API 26+"); + return BitmapPixelTestUtil.createFp16BitmapFromCurrentGlFramebuffer(width, height); + } + } + + private static boolean deviceSupportsHdrEditing(Format format) { + return !EncoderUtil.getSupportedEncodersForHdrEditing( + checkNotNull(checkNotNull(format).sampleMimeType), format.colorInfo) + .isEmpty(); } } diff --git a/testdata/src/test/assets/media/bitmap/sample_mp4_first_frame/electrical_colors/original_hdr10.png b/testdata/src/test/assets/media/bitmap/sample_mp4_first_frame/electrical_colors/original_hdr10.png new file mode 100644 index 00000000000..81585215f46 Binary files /dev/null and b/testdata/src/test/assets/media/bitmap/sample_mp4_first_frame/electrical_colors/original_hdr10.png differ diff --git a/testdata/src/test/assets/media/bitmap/sample_mp4_first_frame/electrical_colors/original_hlg10.png b/testdata/src/test/assets/media/bitmap/sample_mp4_first_frame/electrical_colors/original_hlg10.png new file mode 100644 index 00000000000..a48c469d7f1 Binary files /dev/null and b/testdata/src/test/assets/media/bitmap/sample_mp4_first_frame/electrical_colors/original_hlg10.png differ diff --git a/testutils/src/main/java/com/google/android/exoplayer2/testutil/BitmapPixelTestUtil.java b/testutils/src/main/java/com/google/android/exoplayer2/testutil/BitmapPixelTestUtil.java index 08de3d45a3d..d84557dbf58 100644 --- a/testutils/src/main/java/com/google/android/exoplayer2/testutil/BitmapPixelTestUtil.java +++ b/testutils/src/main/java/com/google/android/exoplayer2/testutil/BitmapPixelTestUtil.java @@ -29,6 +29,7 @@ import android.graphics.PixelFormat; import android.media.Image; import android.opengl.GLES20; +import android.opengl.GLES30; import android.opengl.GLUtils; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; @@ -49,8 +50,9 @@ public class BitmapPixelTestUtil { private static final String TAG = "BitmapPixelTestUtil"; /** - * Maximum allowed average pixel difference between the expected and actual edited images in pixel - * difference-based tests, between emulators. + * Maximum allowed average pixel difference between bitmaps generated using emulators. + * + *

This value is for for 8-bit primaries in pixel difference-based tests. * *

The value is chosen so that differences in decoder behavior across emulator versions don't * affect whether the test passes, but substantial distortions introduced by changes in tested @@ -63,8 +65,9 @@ public class BitmapPixelTestUtil { public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE = 1.f; /** - * Maximum allowed average pixel difference between the expected and actual edited images in pixel - * difference-based tests, between devices, or devices and emulators. + * Maximum allowed average pixel difference between bitmaps generated using devices. + * + *

This value is for for 8-bit primaries in pixel difference-based tests. * *

The value is chosen so that differences in decoder behavior across devices don't affect * whether the test passes, but substantial distortions introduced by changes in tested components @@ -79,6 +82,23 @@ public class BitmapPixelTestUtil { */ public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE = 5.f; + /** + * Maximum allowed average pixel difference between bitmaps with 16-bit primaries generated using + * devices. + * + *

The value is chosen so that differences in decoder behavior across devices in pixel + * difference-based tests don't affect whether the test passes, but substantial distortions + * introduced by changes in tested components will cause the test to fail. + * + *

When the difference is close to the threshold, manually inspect expected/actual bitmaps to + * confirm failure, as it's possible this is caused by a difference in the codec or graphics + * implementation as opposed to an issue in the tested component. + * + *

This value is larger than {@link #MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE} to support the + * larger variance in decoder outputs between different physical devices and emulators. + */ + public static final float MAXIMUM_AVERAGE_PIXEL_ABSOLUTE_DIFFERENCE_DIFFERENT_DEVICE_FP16 = .01f; + /** * Reads a bitmap from the specified asset location. * @@ -134,10 +154,11 @@ public static Bitmap createArgb8888BitmapWithSolidColor(int width, int height, i } /** - * Returns the average difference between the expected and actual bitmaps, calculated using the - * maximum difference across all color channels for each pixel, then divided by the total number - * of pixels in the image. The bitmap resolutions must match and they must use configuration - * {@link Bitmap.Config#ARGB_8888}. + * Returns the average difference between the expected and actual bitmaps. + * + *

Calculated using the maximum difference across all color channels for each pixel, then + * divided by the total number of pixels in the image. Bitmap resolutions must match and must use + * configuration {@link Bitmap.Config#ARGB_8888}. * *

Tries to save a difference bitmap between expected and actual bitmaps. * @@ -155,11 +176,9 @@ public static float getBitmapAveragePixelAbsoluteDifferenceArgb8888( Bitmap actual, @Nullable String testId, @Nullable String differencesBitmapPath) { + assertBitmapsMatch(expected, actual); int width = actual.getWidth(); int height = actual.getHeight(); - assertThat(width).isEqualTo(expected.getWidth()); - assertThat(height).isEqualTo(expected.getHeight()); - assertThat(actual.getConfig()).isEqualTo(Bitmap.Config.ARGB_8888); long sumMaximumAbsoluteDifferences = 0; // Debug-only image diff without alpha. To use, set a breakpoint right before the method return // to view the difference between the expected and actual bitmaps. A passing test should show @@ -192,6 +211,53 @@ public static float getBitmapAveragePixelAbsoluteDifferenceArgb8888( return (float) sumMaximumAbsoluteDifferences / (width * height); } + /** + * Returns the average difference between the expected and actual bitmaps. + * + *

Calculated using the maximum difference across all color channels for each pixel, then + * divided by the total number of pixels in the image. Bitmap resolutions must match and must use + * configuration {@link Bitmap.Config#RGBA_F16}. + * + * @param expected The expected {@link Bitmap}. + * @param actual The actual {@link Bitmap} produced by the test. + * @return The average of the maximum absolute pixel-wise differences between the expected and + * actual bitmaps. + */ + @RequiresApi(29) // Bitmap#getColor() + public static float getBitmapAveragePixelAbsoluteDifferenceFp16(Bitmap expected, Bitmap actual) { + assertBitmapsMatch(expected, actual); + int width = actual.getWidth(); + int height = actual.getHeight(); + float sumMaximumAbsoluteDifferences = 0; + + for (int y = 0; y < height; y++) { + for (int x = 0; x < width; x++) { + Color actualColor = actual.getColor(x, y); + Color expectedColor = expected.getColor(x, y); + + float alphaDifference = abs(actualColor.alpha() - expectedColor.alpha()); + float redDifference = abs(actualColor.red() - expectedColor.red()); + float blueDifference = abs(actualColor.blue() - expectedColor.blue()); + float greenDifference = abs(actualColor.green() - expectedColor.green()); + + float maximumAbsoluteDifference = 0; + maximumAbsoluteDifference = max(maximumAbsoluteDifference, alphaDifference); + maximumAbsoluteDifference = max(maximumAbsoluteDifference, redDifference); + maximumAbsoluteDifference = max(maximumAbsoluteDifference, blueDifference); + maximumAbsoluteDifference = max(maximumAbsoluteDifference, greenDifference); + + sumMaximumAbsoluteDifferences += maximumAbsoluteDifference; + } + } + return sumMaximumAbsoluteDifferences / (width * height); + } + + private static void assertBitmapsMatch(Bitmap expected, Bitmap actual) { + assertThat(actual.getWidth()).isEqualTo(expected.getWidth()); + assertThat(actual.getHeight()).isEqualTo(expected.getHeight()); + assertThat(actual.getConfig()).isEqualTo(expected.getConfig()); + } + /** * Returns the average difference between the expected and actual bitmaps, calculated using the * maximum difference across all color channels for each pixel, then divided by the total number @@ -244,7 +310,8 @@ public static void maybeSaveTestBitmap( } /** - * Creates a bitmap with the values of the current OpenGL framebuffer. + * Creates a {@link Bitmap.Config#ARGB_8888} bitmap with the values of the current OpenGL + * framebuffer. * *

This method may block until any previously called OpenGL commands are complete. * @@ -254,16 +321,39 @@ public static void maybeSaveTestBitmap( */ public static Bitmap createArgb8888BitmapFromCurrentGlFramebuffer(int width, int height) throws GlUtil.GlException { - ByteBuffer rgba8888Buffer = ByteBuffer.allocateDirect(width * height * 4); - // TODO(b/227624622): Add support for reading HDR bitmaps. + return createBitmapFromCurrentGlFrameBuffer( + width, height, /* pixelSize= */ 4, GLES20.GL_UNSIGNED_BYTE, Bitmap.Config.ARGB_8888); + } + + /** + * Creates a {@link Bitmap.Config#RGBA_F16} bitmap with the values of the current OpenGL + * framebuffer. + * + *

This method may block until any previously called OpenGL commands are complete. + * + * @param width The width of the pixel rectangle to read. + * @param height The height of the pixel rectangle to read. + * @return A {@link Bitmap} with the framebuffer's values. + */ + @RequiresApi(26) // Bitmap.Config.RGBA_F16 + public static Bitmap createFp16BitmapFromCurrentGlFramebuffer(int width, int height) + throws GlUtil.GlException { + return createBitmapFromCurrentGlFrameBuffer( + width, height, /* pixelSize= */ 8, GLES30.GL_HALF_FLOAT, Bitmap.Config.RGBA_F16); + } + + private static Bitmap createBitmapFromCurrentGlFrameBuffer( + int width, int height, int pixelSize, int glReadPixelsFormat, Bitmap.Config bitmapConfig) + throws GlUtil.GlException { + ByteBuffer pixelBuffer = ByteBuffer.allocateDirect(width * height * pixelSize); GLES20.glReadPixels( - 0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgba8888Buffer); + /* x= */ 0, /* y= */ 0, width, height, GLES20.GL_RGBA, glReadPixelsFormat, pixelBuffer); GlUtil.checkGlError(); - Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + Bitmap bitmap = Bitmap.createBitmap(width, height, bitmapConfig); // According to https://www.khronos.org/opengl/wiki/Pixel_Transfer#Endian_issues, // the colors will have the order RGBA in client memory. This is what the bitmap expects: - // https://developer.android.com/reference/android/graphics/Bitmap.Config#ARGB_8888. - bitmap.copyPixelsFromBuffer(rgba8888Buffer); + // https://developer.android.com/reference/android/graphics/Bitmap.Config. + bitmap.copyPixelsFromBuffer(pixelBuffer); // Flip the bitmap as its positive y-axis points down while OpenGL's positive y-axis points up. return flipBitmapVertically(bitmap); } @@ -275,7 +365,6 @@ public static Bitmap createArgb8888BitmapFromCurrentGlFramebuffer(int width, int * @return The identifier of the newly created texture. */ public static int createGlTextureFromBitmap(Bitmap bitmap) throws GlUtil.GlException { - // TODO(b/227624622): Add support for reading HDR bitmaps. int texId = GlUtil.createTexture( bitmap.getWidth(), bitmap.getHeight(), /* useHighPrecisionColorComponents= */ false); diff --git a/testutils/src/main/java/com/google/android/exoplayer2/testutil/VideoFrameProcessorTestRunner.java b/testutils/src/main/java/com/google/android/exoplayer2/testutil/VideoFrameProcessorTestRunner.java index acc8760cd8d..654f000c3df 100644 --- a/testutils/src/main/java/com/google/android/exoplayer2/testutil/VideoFrameProcessorTestRunner.java +++ b/testutils/src/main/java/com/google/android/exoplayer2/testutil/VideoFrameProcessorTestRunner.java @@ -278,7 +278,12 @@ private VideoFrameProcessorTestRunner( new VideoFrameProcessor.Listener() { @Override public void onOutputSizeChanged(int width, int height) { - Surface outputSurface = bitmapReader.getSurface(width, height); + Surface outputSurface = + bitmapReader.getSurface( + width, + height, + /* useHighPrecisionColorComponents= */ ColorInfo.isTransferHdr( + outputColorInfo)); checkNotNull(videoFrameProcessor) .setOutputSurfaceInfo(new SurfaceInfo(outputSurface, width, height)); } @@ -359,7 +364,7 @@ public interface OnOutputFrameAvailableListener { public interface BitmapReader { /** Returns the {@link VideoFrameProcessor} output {@link Surface}. */ - Surface getSurface(int width, int height); + Surface getSurface(int width, int height, boolean useHighPrecisionColorComponents); /** Returns the output {@link Bitmap}. */ Bitmap getBitmap(); @@ -378,7 +383,7 @@ public static final class SurfaceBitmapReader @Override @SuppressLint("WrongConstant") - public Surface getSurface(int width, int height) { + public Surface getSurface(int width, int height, boolean useHighPrecisionColorComponents) { imageReader = ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, /* maxImages= */ 1); return imageReader.getSurface();