Skip to content

Commit

Permalink
Transformer: Support Texture asset loading
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 530888319
  • Loading branch information
tof-tof committed May 11, 2023
1 parent f20ed11 commit 2b79dee
Show file tree
Hide file tree
Showing 7 changed files with 566 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,22 @@
import static com.google.android.exoplayer2.util.MimeTypes.VIDEO_H265;

import android.content.Context;
import android.graphics.Bitmap;
import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.os.Build;
import android.util.Pair;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.effect.DefaultGlObjectsProvider;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
import com.google.android.exoplayer2.util.GlObjectsProvider;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MediaFormatUtil;
import com.google.android.exoplayer2.util.MimeTypes;
Expand Down Expand Up @@ -507,6 +515,37 @@ public final class AndroidTestUtil {

public static final String MP3_ASSET_URI_STRING = "asset:///media/mp3/test.mp3";

/**
* Creates the GL objects needed to set up a GL environment including an {@link EGLDisplay} and an
* {@link EGLContext}.
*/
public static EGLContext createOpenGlObjects() throws GlUtil.GlException {
EGLDisplay eglDisplay = GlUtil.createEglDisplay();
int[] configAttributes = GlUtil.EGL_CONFIG_ATTRIBUTES_RGBA_8888;
GlObjectsProvider glObjectsProvider =
new DefaultGlObjectsProvider(/* sharedEglContext= */ null);
EGLContext eglContext =
glObjectsProvider.createEglContext(eglDisplay, /* openGlVersion= */ 2, configAttributes);
glObjectsProvider.createFocusedPlaceholderEglSurface(eglContext, eglDisplay, configAttributes);
return eglContext;
}

/**
* Generates a {@linkplain android.opengl.GLES10#GL_TEXTURE_2D traditional GLES texture} from the
* given bitmap.
*
* <p>Must have a GL context set up.
*/
public static int generateTextureFromBitmap(Bitmap bitmap) throws GlUtil.GlException {
int texId =
GlUtil.createTexture(
bitmap.getWidth(), bitmap.getHeight(), /* useHighPrecisionColorComponents= */ false);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texId);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /* level= */ 0, bitmap, /* border= */ 0);
GlUtil.checkGlError();
return texId;
}

/**
* Log in logcat and in an analysis file that this test was skipped.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,20 @@
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S_URI_STRING;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.MP4_ASSET_WITH_INCREASING_TIMESTAMPS_URI_STRING;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.PNG_ASSET_URI_STRING;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.createOpenGlObjects;
import static com.google.android.exoplayer2.transformer.AndroidTestUtil.generateTextureFromBitmap;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;

import android.content.Context;
import android.graphics.Bitmap;
import android.net.Uri;
import android.opengl.EGLContext;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
Expand All @@ -33,12 +42,19 @@
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.audio.SonicAudioProcessor;
import com.google.android.exoplayer2.effect.Contrast;
import com.google.android.exoplayer2.effect.DefaultGlObjectsProvider;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.effect.FrameCache;
import com.google.android.exoplayer2.effect.Presentation;
import com.google.android.exoplayer2.effect.RgbFilter;
import com.google.android.exoplayer2.effect.TimestampWrapper;
import com.google.android.exoplayer2.upstream.DataSourceBitmapLoader;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.GlUtil;
import com.google.android.exoplayer2.util.VideoFrameProcessingException;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import com.google.common.collect.ImmutableList;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.junit.Test;
import org.junit.runner.RunWith;

Expand All @@ -50,6 +66,7 @@
public class TransformerEndToEndTest {

private final Context context = ApplicationProvider.getApplicationContext();
private volatile @MonotonicNonNull TextureAssetLoader textureAssetLoader;

@Test
public void videoEditing_withImageInput_completesWithCorrectFrameCountAndDuration()
Expand Down Expand Up @@ -98,6 +115,118 @@ public void videoTranscoding_withImageInput_completesWithCorrectFrameCountAndDur
.isEqualTo((C.MILLIS_PER_SECOND / expectedFrameCount) * (expectedFrameCount - 1));
}

@Test
public void videoEditing_withTextureInput_completesWithCorrectFrameCountAndDuration()
throws Exception {
String testId = "videoEditing_withTextureInput_completesWithCorrectFrameCountAndDuration";
Bitmap bitmap =
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestTextureAssetLoaderFactory(bitmap.getWidth(), bitmap.getHeight()))
.build();
int expectedFrameCount = 2;
EGLContext currentContext = createOpenGlObjects();
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(new DefaultGlObjectsProvider(currentContext))
.build();
ImmutableList<Effect> videoEffects = ImmutableList.of(Presentation.createForHeight(480));
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY))
.setDurationUs(C.MICROS_PER_SECOND)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
videoEffects,
videoFrameProcessorFactory))
.build();
int texId = generateTextureFromBitmap(bitmap);
HandlerThread textureQueuingThread = new HandlerThread("textureQueuingThread");
textureQueuingThread.start();
Looper looper = checkNotNull(textureQueuingThread.getLooper());
Handler textureHandler =
new Handler(looper) {
@Override
public void handleMessage(Message msg) {
if (textureAssetLoader != null
&& textureAssetLoader.queueInputTexture(texId, /* presentationTimeUs= */ 0)) {
textureAssetLoader.queueInputTexture(
texId, /* presentationTimeUs= */ C.MICROS_PER_SECOND / 2);
textureAssetLoader.signalEndOfVideoInput();
return;
}
sendEmptyMessage(0);
}
};

textureHandler.sendEmptyMessage(0);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);

assertThat(result.exportResult.videoFrameCount).isEqualTo(expectedFrameCount);
// Expected timestamp of the last frame.
assertThat(result.exportResult.durationMs).isEqualTo(C.MILLIS_PER_SECOND / 2);
}

@Test
public void videoTranscoding_withTextureInput_completesWithCorrectFrameCountAndDuration()
throws Exception {
String testId = "videoTranscoding_withTextureInput_completesWithCorrectFrameCountAndDuration";
Bitmap bitmap =
new DataSourceBitmapLoader(context).loadBitmap(Uri.parse(PNG_ASSET_URI_STRING)).get();
Transformer transformer =
new Transformer.Builder(context)
.setAssetLoaderFactory(
new TestTextureAssetLoaderFactory(bitmap.getWidth(), bitmap.getHeight()))
.build();
int expectedFrameCount = 2;
EGLContext currentContext = createOpenGlObjects();
DefaultVideoFrameProcessor.Factory videoFrameProcessorFactory =
new DefaultVideoFrameProcessor.Factory.Builder()
.setGlObjectsProvider(new DefaultGlObjectsProvider(currentContext))
.build();
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(MediaItem.fromUri(Uri.EMPTY))
.setDurationUs(C.MICROS_PER_SECOND)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
/* videoEffects= */ ImmutableList.of(),
videoFrameProcessorFactory))
.build();
int texId = generateTextureFromBitmap(bitmap);
HandlerThread textureQueuingThread = new HandlerThread("textureQueuingThread");
textureQueuingThread.start();
Looper looper = checkNotNull(textureQueuingThread.getLooper());
Handler textureHandler =
new Handler(looper) {
@Override
public void handleMessage(Message msg) {
if (textureAssetLoader != null
&& textureAssetLoader.queueInputTexture(texId, /* presentationTimeUs= */ 0)) {
textureAssetLoader.queueInputTexture(
texId, /* presentationTimeUs= */ C.MICROS_PER_SECOND / 2);
textureAssetLoader.signalEndOfVideoInput();
return;
}
sendEmptyMessage(0);
}
};
textureHandler.sendEmptyMessage(0);
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);

assertThat(result.exportResult.videoFrameCount).isEqualTo(expectedFrameCount);
// Expected timestamp of the last frame.
assertThat(result.exportResult.durationMs).isEqualTo(C.MILLIS_PER_SECOND / 2);
}

@Test
public void videoEditing_completesWithConsistentFrameCount() throws Exception {
Transformer transformer =
Expand Down Expand Up @@ -366,6 +495,34 @@ public void loopingImage_producesExpectedResult() throws Exception {
assertThat(result.exportResult.durationMs).isEqualTo(3100);
}

private final class TestTextureAssetLoaderFactory implements AssetLoader.Factory {

private final int width;
private final int height;

TestTextureAssetLoaderFactory(int width, int height) {
this.width = width;
this.height = height;
}

@Override
public TextureAssetLoader createAssetLoader(
EditedMediaItem editedMediaItem, Looper looper, AssetLoader.Listener listener) {
Format format = new Format.Builder().setWidth(width).setHeight(height).build();
OnInputFrameProcessedListener frameProcessedListener =
texId -> {
try {
GlUtil.deleteTexture(texId);
} catch (GlUtil.GlException e) {
throw new VideoFrameProcessingException(e);
}
};
textureAssetLoader =
new TextureAssetLoader(editedMediaItem, listener, format, frameProcessedListener);
return textureAssetLoader;
}
}

private static final class VideoUnsupportedEncoderFactory implements Codec.EncoderFactory {

private final Codec.EncoderFactory encoderFactory;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import android.view.Surface;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import com.google.android.exoplayer2.video.ColorInfo;

/** Consumer of encoded media samples, raw audio or raw video frames. */
Expand Down Expand Up @@ -80,6 +81,31 @@ default boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameR

// Methods to pass raw video input.

/**
* Provides a {@link OnInputFrameProcessedListener} to the consumer.
*
* <p>Should only be used for raw video data when input is provided by texture ID.
*
* @param listener The {@link OnInputFrameProcessedListener}.
*/
default void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
throw new UnsupportedOperationException();
}

/**
* Attempts to provide an input texture to the consumer.
*
* <p>Should only be used for raw video data.
*
* @param texId The ID of the texture to queue to the consumer.
* @param presentationTimeUs The presentation time for the texture, in microseconds.
* @return Whether the texture was successfully queued. If {@code false}, the caller should try
* again later.
*/
default boolean queueInputTexture(int texId, long presentationTimeUs) {
throw new UnsupportedOperationException();
}

/**
* Returns the input {@link Surface}, where the consumer reads input frames from.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.HandlerWrapper;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.VideoFrameProcessor.OnInputFrameProcessedListener;
import com.google.android.exoplayer2.video.ColorInfo;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
Expand Down Expand Up @@ -422,6 +423,24 @@ public boolean queueInputBitmap(Bitmap inputBitmap, long durationUs, int frameRa
return sampleConsumer.queueInputBitmap(inputBitmap, durationUs, frameRate);
}

@Override
public boolean queueInputTexture(int texId, long presentationTimeUs) {
long globalTimestampUs = totalDurationUs + presentationTimeUs;
if (isLooping && globalTimestampUs >= maxSequenceDurationUs) {
if (isMaxSequenceDurationUsFinal && !videoLoopingEnded) {
videoLoopingEnded = true;
signalEndOfVideoInput();
}
return false;
}
return sampleConsumer.queueInputTexture(texId, presentationTimeUs);
}

@Override
public void setOnInputFrameProcessedListener(OnInputFrameProcessedListener listener) {
sampleConsumer.setOnInputFrameProcessedListener(listener);
}

@Override
public Surface getInputSurface() {
return sampleConsumer.getInputSurface();
Expand Down
Loading

0 comments on commit 2b79dee

Please sign in to comment.