Video transcoding.

master
Alan Evans 2019-07-25 15:23:59 -04:00
parent 453f93a84f
commit f9946083dd
10 changed files with 2149 additions and 0 deletions

View File

@ -0,0 +1,54 @@
package org.thoughtcrime.securesms.video.videoconverter;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import java.io.File;
import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
public final class AndroidMuxer implements Muxer {
private final MediaMuxer muxer;
AndroidMuxer(final @NonNull File file) throws IOException {
muxer = new MediaMuxer(file.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
@RequiresApi(26)
AndroidMuxer(final @NonNull FileDescriptor fileDescriptor) throws IOException {
muxer = new MediaMuxer(fileDescriptor, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
@Override
public void start() {
muxer.start();
}
@Override
public void stop() {
muxer.stop();
}
@Override
public int addTrack(final @NonNull MediaFormat format) {
return muxer.addTrack(format);
}
@Override
public void writeSampleData(final int trackIndex, final @NonNull ByteBuffer byteBuf, final @NonNull MediaCodec.BufferInfo bufferInfo) {
muxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
}
@Override
public void release() {
muxer.release();
}
}

View File

@ -0,0 +1,418 @@
package org.thoughtcrime.securesms.video.videoconverter;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.thoughtcrime.securesms.logging.Log;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Locale;
final class AudioTrackConverter {
private static final String TAG = "media-converter";
private static final boolean VERBOSE = false; // lots of logging
private static final String OUTPUT_AUDIO_MIME_TYPE = "audio/mp4a-latm"; // Advanced Audio Coding
private static final int OUTPUT_AUDIO_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; //MediaCodecInfo.CodecProfileLevel.AACObjectHE;
private static final int TIMEOUT_USEC = 10000;
private final long mTimeFrom;
private final long mTimeTo;
private final int mAudioBitrate;
final long mInputDuration;
private final MediaExtractor mAudioExtractor;
private final MediaCodec mAudioDecoder;
private final MediaCodec mAudioEncoder;
private final ByteBuffer[] mAudioDecoderInputBuffers;
private ByteBuffer[] mAudioDecoderOutputBuffers;
private final ByteBuffer[] mAudioEncoderInputBuffers;
private ByteBuffer[] mAudioEncoderOutputBuffers;
private final MediaCodec.BufferInfo mAudioDecoderOutputBufferInfo;
private final MediaCodec.BufferInfo mAudioEncoderOutputBufferInfo;
MediaFormat mEncoderOutputAudioFormat;
boolean mAudioExtractorDone;
private boolean mAudioDecoderDone;
boolean mAudioEncoderDone;
private int mOutputAudioTrack = -1;
private int mPendingAudioDecoderOutputBufferIndex = -1;
long mMuxingAudioPresentationTime;
private int mAudioExtractedFrameCount;
private int mAudioDecodedFrameCount;
private int mAudioEncodedFrameCount;
private Muxer mMuxer;
static @Nullable
AudioTrackConverter create(
final @NonNull MediaConverter.Input input,
final long timeFrom,
final long timeTo,
final int audioBitrate) throws IOException {
final MediaExtractor audioExtractor = input.createExtractor();
final int audioInputTrack = getAndSelectAudioTrackIndex(audioExtractor);
if (audioInputTrack == -1) {
audioExtractor.release();
return null;
}
return new AudioTrackConverter(audioExtractor, audioInputTrack, timeFrom, timeTo, audioBitrate);
}
private AudioTrackConverter(
final @NonNull MediaExtractor audioExtractor,
final int audioInputTrack,
long timeFrom,
long timeTo,
int audioBitrate) throws IOException {
mTimeFrom = timeFrom;
mTimeTo = timeTo;
mAudioExtractor = audioExtractor;
mAudioBitrate = audioBitrate;
final MediaCodecInfo audioCodecInfo = MediaConverter.selectCodec(OUTPUT_AUDIO_MIME_TYPE);
if (audioCodecInfo == null) {
// Don't fail CTS if they don't have an AAC codec (not here, anyway).
Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_AUDIO_MIME_TYPE);
throw new FileNotFoundException();
}
if (VERBOSE) Log.d(TAG, "audio found codec: " + audioCodecInfo.getName());
final MediaFormat inputAudioFormat = mAudioExtractor.getTrackFormat(audioInputTrack);
mInputDuration = inputAudioFormat.containsKey(MediaFormat.KEY_DURATION) ? inputAudioFormat.getLong(MediaFormat.KEY_DURATION) : 0;
final MediaFormat outputAudioFormat =
MediaFormat.createAudioFormat(
OUTPUT_AUDIO_MIME_TYPE,
inputAudioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE),
inputAudioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
outputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, audioBitrate);
outputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE);
// Create a MediaCodec for the desired codec, then configure it as an encoder with
// our desired properties. Request a Surface to use for input.
mAudioEncoder = createAudioEncoder(audioCodecInfo, outputAudioFormat);
// Create a MediaCodec for the decoder, based on the extractor's format.
mAudioDecoder = createAudioDecoder(inputAudioFormat);
mAudioDecoderInputBuffers = mAudioDecoder.getInputBuffers();
mAudioDecoderOutputBuffers = mAudioDecoder.getOutputBuffers();
mAudioEncoderInputBuffers = mAudioEncoder.getInputBuffers();
mAudioEncoderOutputBuffers = mAudioEncoder.getOutputBuffers();
mAudioDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
mAudioEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
if (mTimeFrom > 0) {
mAudioExtractor.seekTo(mTimeFrom * 1000, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
Log.i(TAG, "Seek audio:" + mTimeFrom + " " + mAudioExtractor.getSampleTime());
}
}
void setMuxer(final @NonNull Muxer muxer) throws IOException {
mMuxer = muxer;
if (mEncoderOutputAudioFormat != null) {
Log.d(TAG, "muxer: adding audio track.");
if (!mEncoderOutputAudioFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
mEncoderOutputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate);
}
if (!mEncoderOutputAudioFormat.containsKey(MediaFormat.KEY_AAC_PROFILE)) {
mEncoderOutputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE);
}
mOutputAudioTrack = muxer.addTrack(mEncoderOutputAudioFormat);
}
}
void step() throws IOException {
// Extract audio from file and feed to decoder.
// Do not extract audio if we have determined the output format but we are not yet
// ready to mux the frames.
while (!mAudioExtractorDone && (mEncoderOutputAudioFormat == null || mMuxer != null)) {
int decoderInputBufferIndex = mAudioDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no audio decoder input buffer");
break;
}
if (VERBOSE) {
Log.d(TAG, "audio decoder: returned input buffer: " + decoderInputBufferIndex);
}
final ByteBuffer decoderInputBuffer = mAudioDecoderInputBuffers[decoderInputBufferIndex];
final int size = mAudioExtractor.readSampleData(decoderInputBuffer, 0);
final long presentationTime = mAudioExtractor.getSampleTime();
if (VERBOSE) {
Log.d(TAG, "audio extractor: returned buffer of size " + size);
Log.d(TAG, "audio extractor: returned buffer for time " + presentationTime);
}
mAudioExtractorDone = size < 0 || (mTimeTo > 0 && presentationTime > mTimeTo * 1000);
if (mAudioExtractorDone) {
if (VERBOSE) Log.d(TAG, "audio extractor: EOS");
mAudioDecoder.queueInputBuffer(
decoderInputBufferIndex,
0,
0,
0,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
mAudioDecoder.queueInputBuffer(
decoderInputBufferIndex,
0,
size,
presentationTime,
mAudioExtractor.getSampleFlags());
}
mAudioExtractor.advance();
mAudioExtractedFrameCount++;
// We extracted a frame, let's try something else next.
break;
}
// Poll output frames from the audio decoder.
// Do not poll if we already have a pending buffer to feed to the encoder.
while (!mAudioDecoderDone && mPendingAudioDecoderOutputBufferIndex == -1
&& (mEncoderOutputAudioFormat == null || mMuxer != null)) {
final int decoderOutputBufferIndex =
mAudioDecoder.dequeueOutputBuffer(
mAudioDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no audio decoder output buffer");
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (VERBOSE) Log.d(TAG, "audio decoder: output buffers changed");
mAudioDecoderOutputBuffers = mAudioDecoder.getOutputBuffers();
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (VERBOSE) {
MediaFormat decoderOutputAudioFormat = mAudioDecoder.getOutputFormat();
Log.d(TAG, "audio decoder: output format changed: " + decoderOutputAudioFormat);
}
break;
}
if (VERBOSE) {
Log.d(TAG, "audio decoder: returned output buffer: " + decoderOutputBufferIndex);
Log.d(TAG, "audio decoder: returned buffer of size " + mAudioDecoderOutputBufferInfo.size);
}
if ((mAudioDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
if (VERBOSE) Log.d(TAG, "audio decoder: codec config buffer");
mAudioDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
if (mAudioDecoderOutputBufferInfo.presentationTimeUs < mTimeFrom * 1000 &&
(mAudioDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
if (VERBOSE)
Log.d(TAG, "audio decoder: frame prior to " + mAudioDecoderOutputBufferInfo.presentationTimeUs);
mAudioDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
if (VERBOSE) {
Log.d(TAG, "audio decoder: returned buffer for time " + mAudioDecoderOutputBufferInfo.presentationTimeUs);
Log.d(TAG, "audio decoder: output buffer is now pending: " + mPendingAudioDecoderOutputBufferIndex);
}
mPendingAudioDecoderOutputBufferIndex = decoderOutputBufferIndex;
mAudioDecodedFrameCount++;
// We extracted a pending frame, let's try something else next.
break;
}
// Feed the pending decoded audio buffer to the audio encoder.
while (mPendingAudioDecoderOutputBufferIndex != -1) {
if (VERBOSE) {
Log.d(TAG, "audio decoder: attempting to process pending buffer: " + mPendingAudioDecoderOutputBufferIndex);
}
final int encoderInputBufferIndex = mAudioEncoder.dequeueInputBuffer(TIMEOUT_USEC);
if (encoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no audio encoder input buffer");
break;
}
if (VERBOSE) {
Log.d(TAG, "audio encoder: returned input buffer: " + encoderInputBufferIndex);
}
final ByteBuffer encoderInputBuffer = mAudioEncoderInputBuffers[encoderInputBufferIndex];
final int size = mAudioDecoderOutputBufferInfo.size;
final long presentationTime = mAudioDecoderOutputBufferInfo.presentationTimeUs;
if (VERBOSE) {
Log.d(TAG, "audio decoder: processing pending buffer: " + mPendingAudioDecoderOutputBufferIndex);
}
if (VERBOSE) {
Log.d(TAG, "audio decoder: pending buffer of size " + size);
Log.d(TAG, "audio decoder: pending buffer for time " + presentationTime);
}
if (size >= 0) {
final ByteBuffer decoderOutputBuffer = mAudioDecoderOutputBuffers[mPendingAudioDecoderOutputBufferIndex].duplicate();
decoderOutputBuffer.position(mAudioDecoderOutputBufferInfo.offset);
decoderOutputBuffer.limit(mAudioDecoderOutputBufferInfo.offset + size);
encoderInputBuffer.position(0);
encoderInputBuffer.put(decoderOutputBuffer);
mAudioEncoder.queueInputBuffer(
encoderInputBufferIndex,
0,
size,
presentationTime,
mAudioDecoderOutputBufferInfo.flags);
}
mAudioDecoder.releaseOutputBuffer(mPendingAudioDecoderOutputBufferIndex, false);
mPendingAudioDecoderOutputBufferIndex = -1;
if ((mAudioDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "audio decoder: EOS");
mAudioDecoderDone = true;
}
// We enqueued a pending frame, let's try something else next.
break;
}
// Poll frames from the audio encoder and send them to the muxer.
while (!mAudioEncoderDone && (mEncoderOutputAudioFormat == null || mMuxer != null)) {
final int encoderOutputBufferIndex = mAudioEncoder.dequeueOutputBuffer(mAudioEncoderOutputBufferInfo, TIMEOUT_USEC);
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no audio encoder output buffer");
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (VERBOSE) Log.d(TAG, "audio encoder: output buffers changed");
mAudioEncoderOutputBuffers = mAudioEncoder.getOutputBuffers();
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (VERBOSE) Log.d(TAG, "audio encoder: output format changed");
Preconditions.checkState("audio encoder changed its output format again?", mOutputAudioTrack < 0);
mEncoderOutputAudioFormat = mAudioEncoder.getOutputFormat();
break;
}
Preconditions.checkState("should have added track before processing output", mMuxer != null);
if (VERBOSE) {
Log.d(TAG, "audio encoder: returned output buffer: " + encoderOutputBufferIndex);
Log.d(TAG, "audio encoder: returned buffer of size " + mAudioEncoderOutputBufferInfo.size);
}
final ByteBuffer encoderOutputBuffer = mAudioEncoderOutputBuffers[encoderOutputBufferIndex];
if ((mAudioEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
if (VERBOSE) Log.d(TAG, "audio encoder: codec config buffer");
// Simply ignore codec config buffers.
mAudioEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (VERBOSE) {
Log.d(TAG, "audio encoder: returned buffer for time " + mAudioEncoderOutputBufferInfo.presentationTimeUs);
}
if (mAudioEncoderOutputBufferInfo.size != 0) {
mMuxer.writeSampleData(mOutputAudioTrack, encoderOutputBuffer, mAudioEncoderOutputBufferInfo);
mMuxingAudioPresentationTime = Math.max(mMuxingAudioPresentationTime, mAudioEncoderOutputBufferInfo.presentationTimeUs);
}
if ((mAudioEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "audio encoder: EOS");
mAudioEncoderDone = true;
}
mAudioEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
mAudioEncodedFrameCount++;
// We enqueued an encoded frame, let's try something else next.
break;
}
}
void release() throws Exception {
Exception exception = null;
try {
if (mAudioExtractor != null) {
mAudioExtractor.release();
}
} catch (Exception e) {
Log.e(TAG, "error while releasing mAudioExtractor", e);
exception = e;
}
try {
if (mAudioDecoder != null) {
mAudioDecoder.stop();
mAudioDecoder.release();
}
} catch (Exception e) {
Log.e(TAG, "error while releasing mAudioDecoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (mAudioEncoder != null) {
mAudioEncoder.stop();
mAudioEncoder.release();
}
} catch (Exception e) {
Log.e(TAG, "error while releasing mAudioEncoder", e);
if (exception == null) {
exception = e;
}
}
if (exception != null) {
throw exception;
}
}
String dumpState() {
return String.format(Locale.US,
"A{"
+ "extracted:%d(done:%b) "
+ "decoded:%d(done:%b) "
+ "encoded:%d(done:%b) "
+ "pending:%d "
+ "muxing:%b(track:%d} )",
mAudioExtractedFrameCount, mAudioExtractorDone,
mAudioDecodedFrameCount, mAudioDecoderDone,
mAudioEncodedFrameCount, mAudioEncoderDone,
mPendingAudioDecoderOutputBufferIndex,
mMuxer != null, mOutputAudioTrack);
}
void verifyEndState() {
Preconditions.checkState("no frame should be pending", -1 == mPendingAudioDecoderOutputBufferIndex);
}
private static @NonNull
MediaCodec createAudioDecoder(final @NonNull MediaFormat inputFormat) throws IOException {
final MediaCodec decoder = MediaCodec.createDecoderByType(MediaConverter.getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, null, null, 0);
decoder.start();
return decoder;
}
private static @NonNull
MediaCodec createAudioEncoder(final @NonNull MediaCodecInfo codecInfo, final @NonNull MediaFormat format) throws IOException {
final MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoder.start();
return encoder;
}
private static int getAndSelectAudioTrackIndex(MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (VERBOSE) {
Log.d(TAG, "format for track " + index + " is " + MediaConverter.getMimeTypeFor(extractor.getTrackFormat(index)));
}
if (isAudioFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
return index;
}
}
return -1;
}
private static boolean isAudioFormat(final @NonNull MediaFormat format) {
return MediaConverter.getMimeTypeFor(format).startsWith("audio/");
}
}

View File

@ -0,0 +1,6 @@
package org.thoughtcrime.securesms.video.videoconverter;
public final class BadVideoException extends Exception {
BadVideoException() {
}
}

View File

@ -0,0 +1,187 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file has been modified by Signal.
*/
package org.thoughtcrime.securesms.video.videoconverter;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.view.Surface;
import org.thoughtcrime.securesms.logging.Log;
/**
* Holds state associated with a Surface used for MediaCodec encoder input.
* <p>
* The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
* to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
* to the video encoder.
*/
final class InputSurface {
private static final String TAG = "InputSurface";
private static final boolean VERBOSE = false;
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private static final int EGL_OPENGL_ES2_BIT = 4;
private EGLDisplay mEGLDisplay;
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
private Surface mSurface;
/**
* Creates an InputSurface from a Surface.
*/
public InputSurface(Surface surface) {
if (surface == null) {
throw new NullPointerException();
}
mSurface = surface;
eglSetup();
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private void eglSetup() {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
mEGLDisplay = null;
throw new RuntimeException("unable to initialize EGL14");
}
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
// to be able to tell if the frame is reasonable.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0)) {
throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
}
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
attrib_list, 0);
checkEglError("eglCreateContext");
if (mEGLContext == null) {
throw new RuntimeException("null context");
}
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
if (mEGLSurface == null) {
throw new RuntimeException("surface was null");
}
}
/**
* Discard all resources held by this class, notably the EGL context. Also releases the
* Surface that was passed to our constructor.
*/
public void release() {
if (EGL14.eglGetCurrentContext().equals(mEGLContext)) {
// Clear the current context and surface to ensure they are discarded immediately.
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
}
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
//EGL14.eglTerminate(mEGLDisplay);
mSurface.release();
// null everything out so future attempts to use this object will cause an NPE
mEGLDisplay = null;
mEGLContext = null;
mEGLSurface = null;
mSurface = null;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*/
public boolean swapBuffers() {
return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
}
/**
* Returns the Surface that the MediaCodec receives buffers from.
*/
public Surface getSurface() {
return mSurface;
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
}
/**
* Checks for EGL errors.
*/
private void checkEglError(String msg) {
boolean failed = false;
int error;
while ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
failed = true;
}
if (failed) {
throw new RuntimeException("EGL error encountered (see log)");
}
}
}

View File

@ -0,0 +1,409 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file has been modified by Signal.
*/
package org.thoughtcrime.securesms.video.videoconverter;
import android.content.Context;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaDataSource;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.net.Uri;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.StringDef;
import androidx.annotation.WorkerThread;
import org.thoughtcrime.securesms.logging.Log;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@SuppressWarnings("WeakerAccess")
public final class MediaConverter {
private static final String TAG = "media-converter";
private static final boolean VERBOSE = false; // lots of logging
// Describes when the annotation will be discarded
@Retention(RetentionPolicy.SOURCE)
@StringDef({VIDEO_CODEC_H264, VIDEO_CODEC_H265})
public @interface VideoCodec {}
public static final String VIDEO_CODEC_H264 = "video/avc";
public static final String VIDEO_CODEC_H265 = "video/hevc";
private Input mInput;
private Output mOutput;
private long mTimeFrom;
private long mTimeTo;
private int mVideoResolution;
private int mVideoBitrate = 2000000; // 2Mbps
private @VideoCodec String mVideoCodec = VIDEO_CODEC_H264;
private int mAudioBitrate = 128000; // 128Kbps
private Listener mListener;
private boolean mCancelled;
public interface Listener {
boolean onProgress(int percent);
}
public MediaConverter() {
}
@SuppressWarnings("unused")
public void setInput(final @NonNull File file) {
mInput = new FileInput(file);
}
@SuppressWarnings("unused")
public void setInput(final @NonNull Context context, final @NonNull Uri uri) {
mInput = new UriInput(context, uri);
}
@RequiresApi(23)
@SuppressWarnings("unused")
public void setInput(final @NonNull MediaDataSource mediaDataSource) {
mInput = new MediaDataSourceInput(mediaDataSource);
}
@SuppressWarnings("unused")
public void setOutput(final @NonNull File file) {
mOutput = new FileOutput(file);
}
@SuppressWarnings("unused")
@RequiresApi(26)
public void setOutput(final @NonNull FileDescriptor fileDescriptor) {
mOutput = new FileDescriptorOutput(fileDescriptor);
}
@SuppressWarnings("unused")
public void setTimeRange(long timeFrom, long timeTo) {
mTimeFrom = timeFrom;
mTimeTo = timeTo;
if (timeTo > 0 && timeFrom >= timeTo) {
throw new IllegalArgumentException("timeFrom:" + timeFrom + " timeTo:" + timeTo);
}
}
@SuppressWarnings("unused")
public void setVideoResolution(int videoResolution) {
mVideoResolution = videoResolution;
}
@SuppressWarnings("unused")
public void setVideoCodec(final @VideoCodec String videoCodec) throws FileNotFoundException {
if (selectCodec(videoCodec) == null) {
throw new FileNotFoundException();
}
mVideoCodec = videoCodec;
}
@SuppressWarnings("unused")
public void setVideoBitrate(final int videoBitrate) {
mVideoBitrate = videoBitrate;
}
@SuppressWarnings("unused")
public void setAudioBitrate(final int audioBitrate) {
mAudioBitrate = audioBitrate;
}
@SuppressWarnings("unused")
public void setListener(final Listener listener) {
mListener = listener;
}
@WorkerThread
public void convert() throws BadVideoException, IOException {
// Exception that may be thrown during release.
Exception exception = null;
Muxer muxer = null;
VideoTrackConverter videoTrackConverter = null;
AudioTrackConverter audioTrackConverter = null;
try {
videoTrackConverter = VideoTrackConverter.create(mInput, mTimeFrom, mTimeTo, mVideoResolution, mVideoBitrate, mVideoCodec);
audioTrackConverter = AudioTrackConverter.create(mInput, mTimeFrom, mTimeTo, mAudioBitrate);
if (videoTrackConverter == null && audioTrackConverter == null) {
Log.e(TAG, "no video and audio tracks");
throw new BadVideoException();
}
muxer = mOutput.createMuxer();
doExtractDecodeEditEncodeMux(
videoTrackConverter,
audioTrackConverter,
muxer);
} catch (BadVideoException | IOException e) {
Log.e(TAG, "error converting", e);
exception = e;
throw e;
} catch (Exception e) {
Log.e(TAG, "error converting", e);
exception = e;
} finally {
if (VERBOSE) Log.d(TAG, "releasing extractor, decoder, encoder, and muxer");
// Try to release everything we acquired, even if one of the releases fails, in which
// case we save the first exception we got and re-throw at the end (unless something
// other exception has already been thrown). This guarantees the first exception thrown
// is reported as the cause of the error, everything is (attempted) to be released, and
// all other exceptions appear in the logs.
try {
if (videoTrackConverter != null) {
videoTrackConverter.release();
}
} catch (Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (audioTrackConverter != null) {
audioTrackConverter.release();
}
} catch (Exception e) {
if (exception == null) {
exception = e;
}
}
try {
if (muxer != null) {
muxer.stop();
muxer.release();
}
} catch (Exception e) {
Log.e(TAG, "error while releasing muxer", e);
if (exception == null) {
exception = e;
}
}
}
if (exception != null) {
throw new RuntimeException(exception);
}
}
/**
* Does the actual work for extracting, decoding, encoding and muxing.
*/
private void doExtractDecodeEditEncodeMux(
final @Nullable VideoTrackConverter videoTrackConverter,
final @Nullable AudioTrackConverter audioTrackConverter,
final @NonNull Muxer muxer) throws IOException {
boolean muxing = false;
int percentProcessed = 0;
long inputDuration = Math.max(
videoTrackConverter == null ? 0 : videoTrackConverter.mInputDuration,
audioTrackConverter == null ? 0 : audioTrackConverter.mInputDuration);
while (!mCancelled &&
((videoTrackConverter != null && !videoTrackConverter.mVideoEncoderDone) ||
(audioTrackConverter != null &&!audioTrackConverter.mAudioEncoderDone))) {
if (VERBOSE) {
Log.d(TAG, "loop: " +
(videoTrackConverter == null ? "" : videoTrackConverter.dumpState()) +
(audioTrackConverter == null ? "" : audioTrackConverter.dumpState()) +
" muxing:" + muxing);
}
if (videoTrackConverter != null && (audioTrackConverter == null || audioTrackConverter.mAudioExtractorDone || videoTrackConverter.mMuxingVideoPresentationTime <= audioTrackConverter.mMuxingAudioPresentationTime)) {
videoTrackConverter.step();
}
if (audioTrackConverter != null && (videoTrackConverter == null || videoTrackConverter.mVideoExtractorDone || videoTrackConverter.mMuxingVideoPresentationTime >= audioTrackConverter.mMuxingAudioPresentationTime)) {
audioTrackConverter.step();
}
if (inputDuration != 0 && mListener != null) {
final long timeFromUs = mTimeFrom <= 0 ? 0 : mTimeFrom * 1000;
final long timeToUs = mTimeTo <= 0 ? inputDuration : mTimeTo * 1000;
final int curPercentProcessed = (int) (100 *
(Math.max(
videoTrackConverter == null ? 0 : videoTrackConverter.mMuxingVideoPresentationTime,
audioTrackConverter == null ? 0 : audioTrackConverter.mMuxingAudioPresentationTime)
- timeFromUs) / (timeToUs - timeFromUs));
if (curPercentProcessed != percentProcessed) {
percentProcessed = curPercentProcessed;
mCancelled = mCancelled || mListener.onProgress(percentProcessed);
}
}
if (!muxing
&& (videoTrackConverter == null || videoTrackConverter.mEncoderOutputVideoFormat != null)
&& (audioTrackConverter == null || audioTrackConverter.mEncoderOutputAudioFormat != null)) {
if (videoTrackConverter != null) {
videoTrackConverter.setMuxer(muxer);
}
if (audioTrackConverter != null) {
audioTrackConverter.setMuxer(muxer);
}
Log.d(TAG, "muxer: starting");
muxer.start();
muxing = true;
}
}
// Basic sanity checks.
if (videoTrackConverter != null) {
videoTrackConverter.verifyEndState();
}
if (audioTrackConverter != null) {
audioTrackConverter.verifyEndState();
}
// TODO: Check the generated output file.
}
static String getMimeTypeFor(MediaFormat format) {
return format.getString(MediaFormat.KEY_MIME);
}
/**
* Returns the first codec capable of encoding the specified MIME type, or null if no match was
* found.
*/
static MediaCodecInfo selectCodec(final String mimeType) {
final int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) {
continue;
}
final String[] types = codecInfo.getSupportedTypes();
for (String type : types) {
if (type.equalsIgnoreCase(mimeType)) {
return codecInfo;
}
}
}
return null;
}
interface Input {
@NonNull
MediaExtractor createExtractor() throws IOException;
}
private static class FileInput implements Input {
final File file;
FileInput(final @NonNull File file) {
this.file = file;
}
@Override
public @NonNull
MediaExtractor createExtractor() throws IOException {
final MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(file.getAbsolutePath());
return extractor;
}
}
private static class UriInput implements Input {
final Uri uri;
final Context context;
UriInput(final @NonNull Context context, final @NonNull Uri uri) {
this.uri = uri;
this.context = context;
}
@Override
public @NonNull
MediaExtractor createExtractor() throws IOException {
final MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(context, uri, null);
return extractor;
}
}
@RequiresApi(23)
private static class MediaDataSourceInput implements Input {
private final MediaDataSource mediaDataSource;
MediaDataSourceInput(final @NonNull MediaDataSource mediaDataSource) {
this.mediaDataSource = mediaDataSource;
}
@Override
public @NonNull
MediaExtractor createExtractor() throws IOException {
final MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(mediaDataSource);
return extractor;
}
}
interface Output {
@NonNull
Muxer createMuxer() throws IOException;
}
private static class FileOutput implements Output {
final File file;
FileOutput(final @NonNull File file) {
this.file = file;
}
@Override
public @NonNull
Muxer createMuxer() throws IOException {
return new AndroidMuxer(file);
}
}
@RequiresApi(26)
private static class FileDescriptorOutput implements Output {
final FileDescriptor fileDescriptor;
FileDescriptorOutput(final @NonNull FileDescriptor fileDescriptor) {
this.fileDescriptor = fileDescriptor;
}
@Override
public @NonNull
Muxer createMuxer() throws IOException {
return new AndroidMuxer(fileDescriptor);
}
}
}

View File

@ -0,0 +1,22 @@
package org.thoughtcrime.securesms.video.videoconverter;
import android.media.MediaCodec;
import android.media.MediaFormat;
import androidx.annotation.NonNull;
import java.io.IOException;
import java.nio.ByteBuffer;
public interface Muxer {
void start() throws IOException;
void stop() throws IOException;
int addTrack(@NonNull MediaFormat format) throws IOException;
void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf, @NonNull MediaCodec.BufferInfo bufferInfo) throws IOException;
void release();
}

View File

@ -0,0 +1,297 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file has been modified by Signal.
*/
package org.thoughtcrime.securesms.video.videoconverter;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.view.Surface;
import org.thoughtcrime.securesms.logging.Log;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
/**
* Holds state associated with a Surface used for MediaCodec decoder output.
* <p>
* The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
* and then create a Surface for that SurfaceTexture. The Surface can be passed to
* MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
* texture with updateTexImage, then render the texture with GL to a pbuffer.
* <p>
* The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
* Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
* we just draw it on whatever surface is current.
* <p>
* By default, the Surface will be using a BufferQueue in asynchronous mode, so we
* can potentially drop frames.
*/
final class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
private static final String TAG = "OutputSurface";
private static final boolean VERBOSE = false;
private static final int EGL_OPENGL_ES2_BIT = 4;
private EGL10 mEGL;
private EGLDisplay mEGLDisplay;
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
private SurfaceTexture mSurfaceTexture;
private Surface mSurface;
private final Object mFrameSyncObject = new Object(); // guards mFrameAvailable
private boolean mFrameAvailable;
private TextureRender mTextureRender;
/**
* Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new
* EGL context and surface will be made current. Creates a Surface that can be passed
* to MediaCodec.configure().
*/
public OutputSurface(int width, int height) {
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException();
}
eglSetup(width, height);
makeCurrent();
setup();
}
/**
* Creates an OutputSurface using the current EGL context. Creates a Surface that can be
* passed to MediaCodec.configure().
*/
public OutputSurface() {
setup();
}
/**
* Creates instances of TextureRender and SurfaceTexture, and a Surface associated
* with the SurfaceTexture.
*/
private void setup() {
mTextureRender = new TextureRender();
mTextureRender.surfaceCreated();
// Even if we don't access the SurfaceTexture after the constructor returns, we
// still need to keep a reference to it. The Surface doesn't retain a reference
// at the Java level, so if we don't either then the object can get GCed, which
// causes the native finalizer to run.
if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
// This doesn't work if OutputSurface is created on the thread that CTS started for
// these test cases.
//
// The CTS-created thread has a Looper, and the SurfaceTexture constructor will
// create a Handler that uses it. The "frame available" message is delivered
// there, but since we're not a Looper-based thread we'll never see it. For
// this to do anything useful, OutputSurface must be created on a thread without
// a Looper, so that SurfaceTexture uses the main application Looper instead.
//
// Java language note: passing "this" out of a constructor is generally unwise,
// but we should be able to get away with it here.
mSurfaceTexture.setOnFrameAvailableListener(this);
mSurface = new Surface(mSurfaceTexture);
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
*/
private void eglSetup(int width, int height) {
mEGL = (EGL10)EGLContext.getEGL();
mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (!mEGL.eglInitialize(mEGLDisplay, null)) {
throw new RuntimeException("unable to initialize EGL10");
}
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
// to be able to tell if the frame is reasonable.
int[] attribList = {
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) {
throw new RuntimeException("unable to find RGB888+pbuffer EGL config");
}
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL10.EGL_NONE
};
mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT,
attrib_list);
checkEglError("eglCreateContext");
if (mEGLContext == null) {
throw new RuntimeException("null context");
}
// Create a pbuffer surface. By using this for output, we can use glReadPixels
// to test values in the output.
int[] surfaceAttribs = {
EGL10.EGL_WIDTH, width,
EGL10.EGL_HEIGHT, height,
EGL10.EGL_NONE
};
mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs);
checkEglError("eglCreatePbufferSurface");
if (mEGLSurface == null) {
throw new RuntimeException("surface was null");
}
}
/**
* Discard all resources held by this class, notably the EGL context.
*/
public void release() {
if (mEGL != null) {
if (mEGL.eglGetCurrentContext().equals(mEGLContext)) {
// Clear the current context and surface to ensure they are discarded immediately.
mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_CONTEXT);
}
mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
//mEGL.eglTerminate(mEGLDisplay);
}
mSurface.release();
// this causes a bunch of warnings that appear harmless but might confuse someone:
// W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
//mSurfaceTexture.release();
// null everything out so future attempts to use this object will cause an NPE
mEGLDisplay = null;
mEGLContext = null;
mEGLSurface = null;
mEGL = null;
mTextureRender = null;
mSurface = null;
mSurfaceTexture = null;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
if (mEGL == null) {
throw new RuntimeException("not configured for makeCurrent");
}
checkEglError("before makeCurrent");
if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Returns the Surface that we draw onto.
*/
public Surface getSurface() {
return mSurface;
}
/**
* Replaces the fragment shader.
*/
public void changeFragmentShader(String fragmentShader) {
mTextureRender.changeFragmentShader(fragmentShader);
}
/**
* Latches the next buffer into the texture. Must be called from the thread that created
* the OutputSurface object, after the onFrameAvailable callback has signaled that new
* data is available.
*/
public void awaitNewImage() {
final int TIMEOUT_MS = 500;
synchronized (mFrameSyncObject) {
while (!mFrameAvailable) {
try {
// Wait for onFrameAvailable() to signal us. Use a timeout to avoid
// stalling the test if it doesn't arrive.
mFrameSyncObject.wait(TIMEOUT_MS);
if (!mFrameAvailable) {
// TODO: if "spurious wakeup", continue while loop
throw new RuntimeException("Surface frame wait timed out");
}
} catch (InterruptedException ie) {
// shouldn't happen
throw new RuntimeException(ie);
}
}
mFrameAvailable = false;
}
// Latch the data.
mTextureRender.checkGlError("before updateTexImage");
mSurfaceTexture.updateTexImage();
}
/**
* Draws the data from SurfaceTexture onto the current EGL surface.
*/
public void drawImage() {
mTextureRender.drawFrame(mSurfaceTexture);
}
@Override
public void onFrameAvailable(SurfaceTexture st) {
if (VERBOSE) Log.d(TAG, "new frame available");
synchronized (mFrameSyncObject) {
if (mFrameAvailable) {
throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
}
mFrameAvailable = true;
mFrameSyncObject.notifyAll();
}
}
/**
* Checks for EGL errors.
*/
private void checkEglError(String msg) {
boolean failed = false;
int error;
while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
failed = true;
}
if (failed) {
throw new RuntimeException("EGL error encountered (see log)");
}
}
}

View File

@ -0,0 +1,10 @@
package org.thoughtcrime.securesms.video.videoconverter;
final class Preconditions {
static void checkState(final Object errorMessage, final boolean expression) {
if (!expression) {
throw new IllegalStateException(String.valueOf(errorMessage));
}
}
}

View File

@ -0,0 +1,245 @@
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file has been modified by Signal.
*/
package org.thoughtcrime.securesms.video.videoconverter;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import org.thoughtcrime.securesms.logging.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Code for rendering a texture onto a surface using OpenGL ES 2.0.
*/
final class TextureRender {
private static final String TAG = "TextureRender";
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private final FloatBuffer mTriangleVertices;
private static final String VERTEX_SHADER =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private final float[] mMVPMatrix = new float[16];
private final float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID = -12345;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
public TextureRender() {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
}
public int getTextureId() {
return mTextureID;
}
public void drawFrame(SurfaceTexture st) {
checkGlError("onDrawFrame start");
st.getTransformMatrix(mSTMatrix);
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();
}
/**
* Initializes GL state. Call this after the EGL surface has been created and made current.
*/
public void surfaceCreated() {
mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameter");
}
/**
* Replaces the fragment shader.
*/
public void changeFragmentShader(String fragmentShader) {
GLES20.glDeleteProgram(mProgram);
mProgram = createProgram(VERTEX_SHADER, fragmentShader);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type=" + shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
checkGlError("glCreateProgram");
if (program == 0) {
Log.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
public void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
}

View File

@ -0,0 +1,501 @@
package org.thoughtcrime.securesms.video.videoconverter;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.view.Surface;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.thoughtcrime.securesms.logging.Log;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Locale;
import java.util.concurrent.atomic.AtomicReference;
final class VideoTrackConverter {
private static final String TAG = "media-converter";
private static final boolean VERBOSE = false; // lots of logging
private static final int OUTPUT_VIDEO_IFRAME_INTERVAL = 1; // 1 second between I-frames
private static final int OUTPUT_VIDEO_FRAME_RATE = 30; // needed only for MediaFormat.KEY_I_FRAME_INTERVAL to work; the actual frame rate matches the source
private static final int TIMEOUT_USEC = 10000;
private final long mTimeFrom;
private final long mTimeTo;
final long mInputDuration;
private final MediaExtractor mVideoExtractor;
private final MediaCodec mVideoDecoder;
private final MediaCodec mVideoEncoder;
private final InputSurface mInputSurface;
private final OutputSurface mOutputSurface;
private final ByteBuffer[] mVideoDecoderInputBuffers;
private ByteBuffer[] mVideoEncoderOutputBuffers;
private final MediaCodec.BufferInfo mVideoDecoderOutputBufferInfo;
private final MediaCodec.BufferInfo mVideoEncoderOutputBufferInfo;
MediaFormat mEncoderOutputVideoFormat;
boolean mVideoExtractorDone;
private boolean mVideoDecoderDone;
boolean mVideoEncoderDone;
private int mOutputVideoTrack = -1;
long mMuxingVideoPresentationTime;
private int mVideoExtractedFrameCount;
private int mVideoDecodedFrameCount;
private int mVideoEncodedFrameCount;
private Muxer mMuxer;
static @Nullable
VideoTrackConverter create(
final @NonNull MediaConverter.Input input,
final long timeFrom,
final long timeTo,
final int videoResolution,
final int videoBitrate,
final @NonNull String videoCodec) throws IOException {
final MediaExtractor videoExtractor = input.createExtractor();
final int videoInputTrack = getAndSelectVideoTrackIndex(videoExtractor);
if (videoInputTrack == -1) {
videoExtractor.release();
return null;
}
return new VideoTrackConverter(videoExtractor, videoInputTrack, timeFrom, timeTo, videoResolution, videoBitrate, videoCodec);
}
private VideoTrackConverter(
final @NonNull MediaExtractor videoExtractor,
final int videoInputTrack,
final long timeFrom,
final long timeTo,
final int videoResolution,
final int videoBitrate,
final @NonNull String videoCodec) throws IOException {
mTimeFrom = timeFrom;
mTimeTo = timeTo;
mVideoExtractor = videoExtractor;
final MediaCodecInfo videoCodecInfo = MediaConverter.selectCodec(videoCodec);
if (videoCodecInfo == null) {
// Don't fail CTS if they don't have an AVC codec (not here, anyway).
Log.e(TAG, "Unable to find an appropriate codec for " + videoCodec);
throw new FileNotFoundException();
}
if (VERBOSE) Log.d(TAG, "video found codec: " + videoCodecInfo.getName());
final MediaFormat inputVideoFormat = mVideoExtractor.getTrackFormat(videoInputTrack);
mInputDuration = inputVideoFormat.containsKey(MediaFormat.KEY_DURATION) ? inputVideoFormat.getLong(MediaFormat.KEY_DURATION) : 0;
final int rotation = inputVideoFormat.containsKey(MediaFormat.KEY_ROTATION) ? inputVideoFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
final int width = inputVideoFormat.getInteger(MediaFormat.KEY_WIDTH);
final int height = inputVideoFormat.getInteger(MediaFormat.KEY_HEIGHT);
int outputWidth = width;
int outputHeight = height;
if (outputWidth < outputHeight) {
outputWidth = videoResolution;
outputHeight = height * outputWidth / width;
} else {
outputHeight = videoResolution;
outputWidth = width * outputHeight / height;
}
// many encoders do not work when height and width are not multiple of 16 (also, some iPhones do not play some heights)
outputHeight = (outputHeight + 7) & ~0xF;
outputWidth = (outputWidth + 7) & ~0xF;
final int outputWidthRotated;
final int outputHeightRotated;
if ((rotation % 180 == 90)) {
//noinspection SuspiciousNameCombination
outputWidthRotated = outputHeight;
//noinspection SuspiciousNameCombination
outputHeightRotated = outputWidth;
} else {
outputWidthRotated = outputWidth;
outputHeightRotated = outputHeight;
}
final MediaFormat outputVideoFormat = MediaFormat.createVideoFormat(videoCodec, outputWidthRotated, outputHeightRotated);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
outputVideoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
outputVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, videoBitrate);
outputVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE);
outputVideoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "video format: " + outputVideoFormat);
// Create a MediaCodec for the desired codec, then configure it as an encoder with
// our desired properties. Request a Surface to use for input.
final AtomicReference<Surface> inputSurfaceReference = new AtomicReference<>();
mVideoEncoder = createVideoEncoder(videoCodecInfo, outputVideoFormat, inputSurfaceReference);
mInputSurface = new InputSurface(inputSurfaceReference.get());
mInputSurface.makeCurrent();
// Create a MediaCodec for the decoder, based on the extractor's format.
mOutputSurface = new OutputSurface();
mOutputSurface.changeFragmentShader(createFragmentShader(
inputVideoFormat.getInteger(MediaFormat.KEY_WIDTH), inputVideoFormat.getInteger(MediaFormat.KEY_HEIGHT),
outputWidth, outputHeight));
mVideoDecoder = createVideoDecoder(inputVideoFormat, mOutputSurface.getSurface());
mVideoDecoderInputBuffers = mVideoDecoder.getInputBuffers();
mVideoEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
mVideoDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
mVideoEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
if (mTimeFrom > 0) {
mVideoExtractor.seekTo(mTimeFrom * 1000, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
Log.i(TAG, "Seek video:" + mTimeFrom + " " + mVideoExtractor.getSampleTime());
}
}
void setMuxer(final @NonNull Muxer muxer) throws IOException {
mMuxer = muxer;
if (mEncoderOutputVideoFormat != null) {
Log.d(TAG, "muxer: adding video track.");
mOutputVideoTrack = muxer.addTrack(mEncoderOutputVideoFormat);
}
}
void step() throws IOException {
// Extract video from file and feed to decoder.
// Do not extract video if we have determined the output format but we are not yet
// ready to mux the frames.
while (!mVideoExtractorDone
&& (mEncoderOutputVideoFormat == null || mMuxer != null)) {
int decoderInputBufferIndex = mVideoDecoder.dequeueInputBuffer(TIMEOUT_USEC);
if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no video decoder input buffer");
break;
}
if (VERBOSE) {
Log.d(TAG, "video decoder: returned input buffer: " + decoderInputBufferIndex);
}
final ByteBuffer decoderInputBuffer = mVideoDecoderInputBuffers[decoderInputBufferIndex];
final int size = mVideoExtractor.readSampleData(decoderInputBuffer, 0);
final long presentationTime = mVideoExtractor.getSampleTime();
if (VERBOSE) {
Log.d(TAG, "video extractor: returned buffer of size " + size);
Log.d(TAG, "video extractor: returned buffer for time " + presentationTime);
}
mVideoExtractorDone = size < 0 || (mTimeTo > 0 && presentationTime > mTimeTo * 1000);
if (mVideoExtractorDone) {
if (VERBOSE) Log.d(TAG, "video extractor: EOS");
mVideoDecoder.queueInputBuffer(
decoderInputBufferIndex,
0,
0,
0,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
mVideoDecoder.queueInputBuffer(
decoderInputBufferIndex,
0,
size,
presentationTime,
mVideoExtractor.getSampleFlags());
}
mVideoExtractor.advance();
mVideoExtractedFrameCount++;
// We extracted a frame, let's try something else next.
break;
}
// Poll output frames from the video decoder and feed the encoder.
while (!mVideoDecoderDone && (mEncoderOutputVideoFormat == null || mMuxer != null)) {
final int decoderOutputBufferIndex =
mVideoDecoder.dequeueOutputBuffer(
mVideoDecoderOutputBufferInfo, TIMEOUT_USEC);
if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no video decoder output buffer");
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (VERBOSE) Log.d(TAG, "video decoder: output buffers changed");
break;
}
if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (VERBOSE) {
Log.d(TAG, "video decoder: output format changed: " + mVideoDecoder.getOutputFormat());
}
break;
}
if (VERBOSE) {
Log.d(TAG, "video decoder: returned output buffer: "
+ decoderOutputBufferIndex);
Log.d(TAG, "video decoder: returned buffer of size "
+ mVideoDecoderOutputBufferInfo.size);
}
if ((mVideoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
if (VERBOSE) Log.d(TAG, "video decoder: codec config buffer");
mVideoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
if (mVideoDecoderOutputBufferInfo.presentationTimeUs < mTimeFrom * 1000 &&
(mVideoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
if (VERBOSE) Log.d(TAG, "video decoder: frame prior to " + mVideoDecoderOutputBufferInfo.presentationTimeUs);
mVideoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
break;
}
if (VERBOSE) {
Log.d(TAG, "video decoder: returned buffer for time " + mVideoDecoderOutputBufferInfo.presentationTimeUs);
}
boolean render = mVideoDecoderOutputBufferInfo.size != 0;
mVideoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, render);
if (render) {
if (VERBOSE) Log.d(TAG, "output surface: await new image");
mOutputSurface.awaitNewImage();
// Edit the frame and send it to the encoder.
if (VERBOSE) Log.d(TAG, "output surface: draw image");
mOutputSurface.drawImage();
mInputSurface.setPresentationTime(mVideoDecoderOutputBufferInfo.presentationTimeUs * 1000);
if (VERBOSE) Log.d(TAG, "input surface: swap buffers");
mInputSurface.swapBuffers();
if (VERBOSE) Log.d(TAG, "video encoder: notified of new frame");
}
if ((mVideoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "video decoder: EOS");
mVideoDecoderDone = true;
mVideoEncoder.signalEndOfInputStream();
}
mVideoDecodedFrameCount++;
// We extracted a pending frame, let's try something else next.
break;
}
// Poll frames from the video encoder and send them to the muxer.
while (!mVideoEncoderDone && (mEncoderOutputVideoFormat == null || mMuxer != null)) {
final int encoderOutputBufferIndex = mVideoEncoder.dequeueOutputBuffer(mVideoEncoderOutputBufferInfo, TIMEOUT_USEC);
if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (VERBOSE) Log.d(TAG, "no video encoder output buffer");
if (mVideoDecoderDone) {
// on some devices and encoder stops after signalEndOfInputStream
Log.w(TAG, "mVideoDecoderDone, but didn't get BUFFER_FLAG_END_OF_STREAM");
mVideoEncodedFrameCount = mVideoDecodedFrameCount;
mVideoEncoderDone = true;
}
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (VERBOSE) Log.d(TAG, "video encoder: output buffers changed");
mVideoEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
break;
}
if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (VERBOSE) Log.d(TAG, "video encoder: output format changed");
Preconditions.checkState("video encoder changed its output format again?", mOutputVideoTrack < 0);
mEncoderOutputVideoFormat = mVideoEncoder.getOutputFormat();
break;
}
Preconditions.checkState("should have added track before processing output", mMuxer != null);
if (VERBOSE) {
Log.d(TAG, "video encoder: returned output buffer: " + encoderOutputBufferIndex);
Log.d(TAG, "video encoder: returned buffer of size " + mVideoEncoderOutputBufferInfo.size);
}
final ByteBuffer encoderOutputBuffer = mVideoEncoderOutputBuffers[encoderOutputBufferIndex];
if ((mVideoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
if (VERBOSE) Log.d(TAG, "video encoder: codec config buffer");
// Simply ignore codec config buffers.
mVideoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
break;
}
if (VERBOSE) {
Log.d(TAG, "video encoder: returned buffer for time " + mVideoEncoderOutputBufferInfo.presentationTimeUs);
}
if (mVideoEncoderOutputBufferInfo.size != 0) {
mMuxer.writeSampleData(mOutputVideoTrack, encoderOutputBuffer, mVideoEncoderOutputBufferInfo);
mMuxingVideoPresentationTime = Math.max(mMuxingVideoPresentationTime, mVideoEncoderOutputBufferInfo.presentationTimeUs);
}
if ((mVideoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (VERBOSE) Log.d(TAG, "video encoder: EOS");
mVideoEncoderDone = true;
}
mVideoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
mVideoEncodedFrameCount++;
// We enqueued an encoded frame, let's try something else next.
break;
}
}
void release() throws Exception {
Exception exception = null;
try {
if (mVideoExtractor != null) {
mVideoExtractor.release();
}
} catch (Exception e) {
Log.e(TAG, "error while releasing mVideoExtractor", e);
exception = e;
}
try {
if (mVideoDecoder != null) {
mVideoDecoder.stop();
mVideoDecoder.release();
}
} catch (Exception e) {
Log.e(TAG, "error while releasing mVideoDecoder", e);
if (exception == null) {
exception = e;
}
}
try {
if (mOutputSurface != null) {
mOutputSurface.release();
}
} catch (Exception e) {
Log.e(TAG, "error while releasing mOutputSurface", e);
if (exception == null) {
exception = e;
}
}
try {
if (mInputSurface != null) {
mInputSurface.release();
}
} catch (Exception e) {
Log.e(TAG, "error while releasing mInputSurface", e);
if (exception == null) {
exception = e;
}
}
try {
if (mVideoEncoder != null) {
mVideoEncoder.stop();
mVideoEncoder.release();
}
} catch (Exception e) {
Log.e(TAG, "error while releasing mVideoEncoder", e);
if (exception == null) {
exception = e;
}
}
if (exception != null) {
throw exception;
}
}
String dumpState() {
return String.format(Locale.US,
"V{"
+ "extracted:%d(done:%b) "
+ "decoded:%d(done:%b) "
+ "encoded:%d(done:%b) "
+ "muxing:%b(track:%d)} ",
mVideoExtractedFrameCount, mVideoExtractorDone,
mVideoDecodedFrameCount, mVideoDecoderDone,
mVideoEncodedFrameCount, mVideoEncoderDone,
mMuxer != null, mOutputVideoTrack);
}
void verifyEndState() {
Preconditions.checkState("encoded (" + mVideoEncodedFrameCount + ") and decoded (" + mVideoDecodedFrameCount + ") video frame counts should match", mVideoDecodedFrameCount == mVideoEncodedFrameCount);
Preconditions.checkState("decoded frame count should be less than extracted frame count", mVideoDecodedFrameCount <= mVideoExtractedFrameCount);
}
private static String createFragmentShader(
final int srcWidth,
final int srcHeight,
final int dstWidth,
final int dstHeight) {
final float kernelSizeX = (float) srcWidth / (float) dstWidth;
final float kernelSizeY = (float) srcHeight / (float) dstHeight;
Log.i(TAG, "kernel " + kernelSizeX + "x" + kernelSizeY);
final String shader;
if (kernelSizeX <= 2 && kernelSizeY <= 2) {
shader =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
} else {
final int kernelRadiusX = (int) Math.ceil(kernelSizeX - .1f) / 2;
final int kernelRadiusY = (int) Math.ceil(kernelSizeY - .1f) / 2;
final float stepX = kernelSizeX / (1 + 2 * kernelRadiusX) * (1f / srcWidth);
final float stepY = kernelSizeY / (1 + 2 * kernelRadiusY) * (1f / srcHeight);
final float sum = (1 + 2 * kernelRadiusX) * (1 + 2 * kernelRadiusY);
final StringBuilder colorLoop = new StringBuilder();
for (int i = -kernelRadiusX; i <=kernelRadiusX; i++) {
for (int j = -kernelRadiusY; j <=kernelRadiusY; j++) {
if (i != 0 || j != 0) {
colorLoop.append(" + texture2D(sTexture, vTextureCoord.xy + vec2(")
.append(i * stepX).append(", ").append(j * stepY).append("))\n");
}
}
}
shader =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = (texture2D(sTexture, vTextureCoord)\n" +
colorLoop.toString() +
" ) / " + sum + ";\n" +
"}\n";
}
Log.i(TAG, shader);
return shader;
}
private @NonNull
MediaCodec createVideoDecoder(
final @NonNull MediaFormat inputFormat,
final @NonNull Surface surface) throws IOException {
final MediaCodec decoder = MediaCodec.createDecoderByType(MediaConverter.getMimeTypeFor(inputFormat));
decoder.configure(inputFormat, surface, null, 0);
decoder.start();
return decoder;
}
private @NonNull
MediaCodec createVideoEncoder(
final @NonNull MediaCodecInfo codecInfo,
final @NonNull MediaFormat format,
final @NonNull AtomicReference<Surface> surfaceReference) throws IOException {
final MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// Must be called before start()
surfaceReference.set(encoder.createInputSurface());
encoder.start();
return encoder;
}
private static int getAndSelectVideoTrackIndex(@NonNull MediaExtractor extractor) {
for (int index = 0; index < extractor.getTrackCount(); ++index) {
if (VERBOSE) {
Log.d(TAG, "format for track " + index + " is " + MediaConverter.getMimeTypeFor(extractor.getTrackFormat(index)));
}
if (isVideoFormat(extractor.getTrackFormat(index))) {
extractor.selectTrack(index);
return index;
}
}
return -1;
}
private static boolean isVideoFormat(final @NonNull MediaFormat format) {
return MediaConverter.getMimeTypeFor(format).startsWith("video/");
}
}