Add in-app video recording for supported devices.

master
alex-signal 2019-10-09 17:16:23 -03:00 committed by Greyson Parrelli
parent 43954a176a
commit ff60b5b731
31 changed files with 1873 additions and 90 deletions

View File

@ -67,8 +67,8 @@ dependencies {
implementation 'androidx.multidex:multidex:2.0.1'
implementation 'androidx.lifecycle:lifecycle-extensions:2.0.0'
implementation 'androidx.lifecycle:lifecycle-common-java8:2.0.0'
implementation "androidx.camera:camera-core:1.0.0-alpha02"
implementation "androidx.camera:camera-camera2:1.0.0-alpha02"
implementation "androidx.camera:camera-core:1.0.0-alpha04"
implementation "androidx.camera:camera-camera2:1.0.0-alpha04"
implementation('com.google.firebase:firebase-messaging:17.3.4') {
exclude group: 'com.google.firebase', module: 'firebase-core'
@ -182,8 +182,8 @@ dependencyVerification {
'androidx.legacy:legacy-support-v13:65f5fcb57644d381d471a00fdf50f90b808be6b48a8ae57fb4ea39b7da8cca86',
'androidx.cardview:cardview:1193c04c22a3d6b5946dae9f4e8c59d6adde6a71b6bd5d87fb99d82dda1afec7',
'androidx.gridlayout:gridlayout:a7e5dc6f39dbc3dc6ac6d57b02a9c6fd792e80f0e45ddb3bb08e8f03d23c8755',
'androidx.camera:camera-camera2:9dc33e45da983ebd29a888401ac700323ff573821eee3fa4d993dfa3d316ee2e',
'androidx.camera:camera-core:bf32bfcb5d103d865c6af1221a1d82e994c917b53c0bc080f1e9750bdc21cbb9',
'androidx.camera:camera-camera2:b7897230aec96365d675712c92f5edcb8b464badfd61788c8f956ec2d6e49bfe',
'androidx.camera:camera-core:e1c70de55600a0caf826eb4f8a75c96c5ff8f0b626bf08413d31e80ffa55f8ba',
'androidx.exifinterface:exifinterface:ee48be10aab8f54efff4c14b77d11e10b9eeee4379d5ef6bf297a2923c55cc11',
'androidx.constraintlayout:constraintlayout:5ff864def9d41cd04e08348d69591143bae3ceff4284cf8608bceb98c36ac830',
'androidx.multidex:multidex:42dd32ff9f97f85771b82a20003a8d70f68ab7b4ba328964312ce0732693db09',
@ -249,8 +249,7 @@ dependencyVerification {
'androidx.lifecycle:lifecycle-livedata:c82609ced8c498f0a701a30fb6771bb7480860daee84d82e0a81ee86edf7ba39',
'androidx.lifecycle:lifecycle-livedata-core:fde334ec7e22744c0f5bfe7caf1a84c9d717327044400577bdf9bd921ec4f7bc',
'androidx.arch.core:core-runtime:87e65fc767c712b437649c7cee2431ebb4bed6daef82e501d4125b3ed3f65f8e',
'androidx.concurrent:concurrent-listenablefuture-callback:14dce0acbffd705cfe9fb378960f851a9d8fc3f293d1157c310c9624a561d0a8',
'androidx.concurrent:concurrent-listenablefuture:f9ef396ca4a43b9685d28bec117b278aa9171de0e446e5138e931074e3462feb',
'androidx.concurrent:concurrent-futures:50812a53912255e3e0f2147d13bbbb81937c3726fda2e984e77a27c7207d96a1',
'com.github.bumptech.glide:gifdecoder:7ee9402ae1c48fac9232b67e81f881c217b907b3252e49ce57bdb97937ebb270',
'androidx.versionedparcelable:versionedparcelable:948c751f6352d4c0f93f15fa1bf506c59083bc7754264dd9a325a6da0e2eec05',
'androidx.collection:collection:632a0e5407461de774409352940e292a291037724207a787820c77daf7d33b72',
@ -268,6 +267,7 @@ dependencyVerification {
'androidx.annotation:annotation:d38d63edb30f1467818d50aaf05f8a692dea8b31392a049bfa991b159ad5b692',
'androidx.constraintlayout:constraintlayout-solver:965c177e64fbd81bd1d27b402b66ef9d7bc7b5cb5f718044bf7a453abc542045',
'com.google.auto.value:auto-value-annotations:0e951fee8c31f60270bc46553a8586001b7b93dbb12aec06373aa99a150392c0',
'com.google.guava:listenablefuture:e4ad7607e5c0477c6f890ef26a49cb8d1bb4dffb650bab4502afee64644e3069',
'org.signal:signal-metadata-android:02323bc29317fa9d3b62fab0b507c94ba2e9bcc4a78d588888ffd313853757b3',
'org.whispersystems:signal-service-java:045026003e2ddef0325fe1e930de9ce503010aec8e8a8ac6ddbdd9a79f94e878',
'com.github.bumptech.glide:disklrucache:4696a81340eb6beee21ab93f703ed6e7ae49fb4ce3bc2fbc546e5bacd21b96b9',

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<shape android:shape="oval" xmlns:android="http://schemas.android.com/apk/res/android">
<solid android:color="#FF261F" />
</shape>

View File

@ -8,11 +8,12 @@
<org.thoughtcrime.securesms.mediasend.CameraButtonView
android:id="@+id/camera_capture_button"
android:layout_width="80dp"
android:layout_height="80dp"
android:layout_marginEnd="24dp"
android:background="@drawable/ic_camera_shutter"
android:layout_width="96dp"
android:layout_height="96dp"
android:layout_marginEnd="25dp"
android:contentDescription="@string/CameraXFragment_capture_description"
app:imageCaptureSize="60dp"
app:recordSize="42dp"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintBottom_toBottomOf="parent"/>

View File

@ -8,11 +8,12 @@
<org.thoughtcrime.securesms.mediasend.CameraButtonView
android:id="@+id/camera_capture_button"
android:layout_width="80dp"
android:layout_height="80dp"
android:layout_marginBottom="24dp"
android:background="@drawable/ic_camera_shutter"
android:layout_width="96dp"
android:layout_height="96dp"
android:layout_marginBottom="25dp"
android:contentDescription="@string/CameraXFragment_capture_description"
app:imageCaptureSize="60dp"
app:recordSize="42dp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent" />
@ -41,7 +42,7 @@
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintEnd_toEndOf="parent"
tools:visibility="visible" />
<com.makeramen.roundedimageview.RoundedImageView
android:id="@+id/camera_gallery_button"
android:layout_width="36dp"

View File

@ -6,6 +6,7 @@
<org.thoughtcrime.securesms.mediasend.camerax.CameraXView
android:id="@+id/camerax_camera"
android:layout_gravity="center"
android:layout_width="match_parent"
android:layout_height="match_parent" />

View File

@ -1,6 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<org.thoughtcrime.securesms.video.VideoPlayer
xmlns:android="http://schemas.android.com/apk/res/android"
android:background="@color/grey_400_transparent"
android:layout_width="match_parent"
android:layout_height="match_parent">

View File

@ -85,6 +85,8 @@
<attr name="emoji_category_emoticons" format="reference"/>
<attr name="emoji_variation_selector_background" format="reference|color" />
<attr name="camera_button_style" />
<attr name="quick_camera_icon" format="reference"/>
<attr name="quick_mic_icon" format="reference"/>

View File

@ -96,6 +96,7 @@
<string name="CameraActivity_image_save_failure">Failed to save image.</string>
<!-- CameraXFragment -->
<string name="CameraXFragment_video_recording_available">Tap to take a picture, or keep your finger on the capture button to record a video.</string>
<string name="CameraXFragment_capture_description">Capture</string>
<string name="CameraXFragment_change_camera_description">Change camera</string>
<string name="CameraXFragment_open_gallery_description">Open gallery</string>
@ -1344,6 +1345,7 @@
<string name="preferences__linked_devices">Linked devices</string>
<string name="preferences__light_theme">Light</string>
<string name="preferences__dark_theme">Dark</string>
<string name="preferences__system_theme">System</string>
<string name="preferences__appearance">Appearance</string>
<string name="preferences__theme">Theme</string>
<string name="preferences__default">Default</string>

View File

@ -340,4 +340,9 @@
<item name="titleTextStyle">@style/TextSecure.TitleTextStyle.Conversation</item>
</style>
<declare-styleable name="CameraButtonView">
<attr name="imageCaptureSize" format="dimension" />
<attr name="recordSize" format="dimension" />
</declare-styleable>
</resources>

View File

@ -1,49 +1,234 @@
package org.thoughtcrime.securesms.mediasend;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import androidx.appcompat.widget.AppCompatButton;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.util.Util;
public final class CameraButtonView extends AppCompatButton {
public class CameraButtonView extends View {
private enum CameraButtonMode { IMAGE, MIXED }
private static final float CAPTURE_ARC_STROKE_WIDTH = 6f;
private static final float HALF_CAPTURE_ARC_STROKE_WIDTH = CAPTURE_ARC_STROKE_WIDTH / 2;
private static final float PROGRESS_ARC_STROKE_WIDTH = 12f;
private static final float HALF_PROGRESS_ARC_STROKE_WIDTH = PROGRESS_ARC_STROKE_WIDTH / 2;
private static final float MINIMUM_ALLOWED_ZOOM_STEP = 0.005f;
private static final float DEADZONE_REDUCTION_PERCENT = 0.35f;
private static final int DRAG_DISTANCE_MULTIPLIER = 3;
private static final Interpolator ZOOM_INTERPOLATOR = new DecelerateInterpolator();
private final @NonNull Paint outlinePaint = outlinePaint();
private final @NonNull Paint backgroundPaint = backgroundPaint();
private final @NonNull Paint arcPaint = arcPaint();
private final @NonNull Paint recordPaint = recordPaint();
private final @NonNull Paint progressPaint = progressPaint();
private Animation shrinkAnimation;
private Animation growAnimation;
private Animation shrinkAnimation;
public CameraButtonView(Context context) {
super(context);
init(context);
private boolean isRecordingVideo;
private float progressPercent = 0f;
private float latestIncrement = 0f;
private @NonNull CameraButtonMode cameraButtonMode = CameraButtonMode.IMAGE;
private @Nullable VideoCaptureListener videoCaptureListener;
private final float imageCaptureSize;
private final float recordSize;
private final RectF progressRect = new RectF();
private final Rect deadzoneRect = new Rect();
private final @NonNull OnLongClickListener internalLongClickListener = v -> {
notifyVideoCaptureStarted();
shrinkAnimation.cancel();
setScaleX(1f);
setScaleY(1f);
isRecordingVideo = true;
return true;
};
public CameraButtonView(@NonNull Context context) {
this(context, null);
}
public CameraButtonView(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
public CameraButtonView(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, R.attr.camera_button_style);
}
public CameraButtonView(Context context, AttributeSet attrs, int defStyleAttr) {
public CameraButtonView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context);
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraButtonView, defStyleAttr, 0);
imageCaptureSize = a.getDimensionPixelSize(R.styleable.CameraButtonView_imageCaptureSize, -1);
recordSize = a.getDimensionPixelSize(R.styleable.CameraButtonView_recordSize, -1);
a.recycle();
initializeImageAnimations();
}
public void init(Context context) {
shrinkAnimation = AnimationUtils.loadAnimation(context, R.anim.camera_capture_button_shrink);
growAnimation = AnimationUtils.loadAnimation(context, R.anim.camera_capture_button_grow);
private static Paint recordPaint() {
Paint recordPaint = new Paint();
recordPaint.setColor(0xFFF44336);
recordPaint.setAntiAlias(true);
recordPaint.setStyle(Paint.Style.FILL);
return recordPaint;
}
private static Paint outlinePaint() {
Paint outlinePaint = new Paint();
outlinePaint.setColor(0x26000000);
outlinePaint.setAntiAlias(true);
outlinePaint.setStyle(Paint.Style.STROKE);
outlinePaint.setStrokeWidth(1.5f);
return outlinePaint;
}
private static Paint backgroundPaint() {
Paint backgroundPaint = new Paint();
backgroundPaint.setColor(0x4CFFFFFF);
backgroundPaint.setAntiAlias(true);
backgroundPaint.setStyle(Paint.Style.FILL);
return backgroundPaint;
}
private static Paint arcPaint() {
Paint arcPaint = new Paint();
arcPaint.setColor(0xFFFFFFFF);
arcPaint.setAntiAlias(true);
arcPaint.setStyle(Paint.Style.STROKE);
arcPaint.setStrokeWidth(CAPTURE_ARC_STROKE_WIDTH);
return arcPaint;
}
private static Paint progressPaint() {
Paint progressPaint = new Paint();
progressPaint.setColor(0xFFFFFFFF);
progressPaint.setAntiAlias(true);
progressPaint.setStyle(Paint.Style.STROKE);
progressPaint.setStrokeWidth(PROGRESS_ARC_STROKE_WIDTH);
progressPaint.setShadowLayer(4, 0, 2, 0x40000000);
return progressPaint;
}
private void initializeImageAnimations() {
shrinkAnimation = AnimationUtils.loadAnimation(getContext(), R.anim.camera_capture_button_shrink);
growAnimation = AnimationUtils.loadAnimation(getContext(), R.anim.camera_capture_button_grow);
shrinkAnimation.setFillAfter(true);
shrinkAnimation.setFillEnabled(true);
growAnimation.setFillAfter(true);
growAnimation.setFillEnabled(true);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (isRecordingVideo) {
drawForVideoCapture(canvas);
} else {
drawForImageCapture(canvas);
}
}
private void drawForImageCapture(Canvas canvas) {
float centerX = getWidth() / 2f;
float centerY = getHeight() / 2f;
float radius = imageCaptureSize / 2f;
canvas.drawCircle(centerX, centerY, radius, backgroundPaint);
canvas.drawCircle(centerX, centerY, radius, outlinePaint);
canvas.drawCircle(centerX, centerY, radius - HALF_CAPTURE_ARC_STROKE_WIDTH, arcPaint);
}
private void drawForVideoCapture(Canvas canvas) {
float centerX = getWidth() / 2f;
float centerY = getHeight() / 2f;
canvas.drawCircle(centerX, centerY, centerY, backgroundPaint);
canvas.drawCircle(centerX, centerY, centerY, outlinePaint);
canvas.drawCircle(centerX, centerY, recordSize / 2f, recordPaint);
progressRect.top = HALF_PROGRESS_ARC_STROKE_WIDTH;
progressRect.left = HALF_PROGRESS_ARC_STROKE_WIDTH;
progressRect.right = getWidth() - HALF_PROGRESS_ARC_STROKE_WIDTH;
progressRect.bottom = getHeight() - HALF_PROGRESS_ARC_STROKE_WIDTH;
canvas.drawArc(progressRect, 270f, 360f * progressPercent, false, progressPaint);
}
@Override
public void setOnLongClickListener(@Nullable OnLongClickListener listener) {
throw new IllegalStateException("Use setVideoCaptureListener instead");
}
public void setVideoCaptureListener(@Nullable VideoCaptureListener videoCaptureListener) {
if (isRecordingVideo) throw new IllegalStateException("Cannot set video capture listener while recording");
if (videoCaptureListener != null) {
this.cameraButtonMode = CameraButtonMode.MIXED;
this.videoCaptureListener = videoCaptureListener;
super.setOnLongClickListener(internalLongClickListener);
} else {
this.cameraButtonMode = CameraButtonMode.IMAGE;
this.videoCaptureListener = null;
super.setOnLongClickListener(null);
}
}
public void setProgress(float percentage) {
progressPercent = Util.clamp(percentage, 0f, 1f);
invalidate();
}
@Override
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()) {
if (cameraButtonMode == CameraButtonMode.IMAGE) {
return handleImageModeTouchEvent(event);
}
boolean eventWasHandled = handleVideoModeTouchEvent(event);
int action = event.getAction();
if (action == MotionEvent.ACTION_UP || action == MotionEvent.ACTION_CANCEL) {
isRecordingVideo = false;
}
return eventWasHandled;
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
getLocalVisibleRect(deadzoneRect);
deadzoneRect.left += (int) (getWidth() * DEADZONE_REDUCTION_PERCENT / 2f);
deadzoneRect.top += (int) (getHeight() * DEADZONE_REDUCTION_PERCENT / 2f);
deadzoneRect.right -= (int) (getWidth() * DEADZONE_REDUCTION_PERCENT / 2f);
deadzoneRect.bottom -= (int) (getHeight() * DEADZONE_REDUCTION_PERCENT / 2f);
}
private boolean handleImageModeTouchEvent(MotionEvent event) {
int action = event.getAction();
switch (action) {
case MotionEvent.ACTION_DOWN:
if (isEnabled()) {
startAnimation(shrinkAnimation);
@ -53,12 +238,72 @@ public final class CameraButtonView extends AppCompatButton {
case MotionEvent.ACTION_UP:
startAnimation(growAnimation);
return true;
default:
return super.onTouchEvent(event);
}
return false;
}
@Override
public boolean performClick() {
return super.performClick();
private boolean handleVideoModeTouchEvent(MotionEvent event) {
int action = event.getAction();
switch (action) {
case MotionEvent.ACTION_DOWN:
latestIncrement = 0f;
if (isEnabled()) {
startAnimation(shrinkAnimation);
}
case MotionEvent.ACTION_MOVE:
if (isRecordingVideo && eventIsNotInsideDeadzone(event)) {
float maxRange = getHeight() * DRAG_DISTANCE_MULTIPLIER;
float deltaY = Math.abs(event.getY() - deadzoneRect.top);
float increment = Math.min(1f, deltaY / maxRange);
if (Math.abs(increment - latestIncrement) < MINIMUM_ALLOWED_ZOOM_STEP) {
break;
}
latestIncrement = increment;
notifyZoomPercent(ZOOM_INTERPOLATOR.getInterpolation(increment));
invalidate();
}
break;
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP:
if (!isRecordingVideo) {
startAnimation(growAnimation);
}
notifyVideoCaptureEnded();
break;
}
return super.onTouchEvent(event);
}
private boolean eventIsNotInsideDeadzone(MotionEvent event) {
return Math.round(event.getY()) < deadzoneRect.top;
}
private void notifyVideoCaptureStarted() {
if (!isRecordingVideo && videoCaptureListener != null) {
videoCaptureListener.onVideoCaptureStarted();
}
}
private void notifyVideoCaptureEnded() {
if (isRecordingVideo && videoCaptureListener != null) {
videoCaptureListener.onVideoCaptureComplete();
}
}
private void notifyZoomPercent(float percent) {
if (isRecordingVideo && videoCaptureListener != null) {
videoCaptureListener.onZoomIncremented(percent);
}
}
interface VideoCaptureListener {
void onVideoCaptureStarted();
void onVideoCaptureComplete();
void onZoomIncremented(float percent);
}
}

View File

@ -7,6 +7,8 @@ import androidx.annotation.NonNull;
import androidx.camera.core.CameraX;
import androidx.fragment.app.Fragment;
import java.io.FileDescriptor;
public interface CameraFragment {
@SuppressLint("RestrictedApi")
@ -21,6 +23,7 @@ public interface CameraFragment {
interface Controller {
void onCameraError();
void onImageCaptured(@NonNull byte[] data, int width, int height);
void onVideoCaptured(@NonNull FileDescriptor fd);
void onGalleryClicked();
int getDisplayRotation();
void onCameraCountButtonClicked();

View File

@ -1,16 +1,20 @@
package org.thoughtcrime.securesms.mediasend;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.os.Bundle;
import android.view.GestureDetector;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.RotateAnimation;
import android.widget.ImageView;
@ -22,22 +26,28 @@ import androidx.annotation.RequiresApi;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageProxy;
import androidx.core.content.ContextCompat;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.ViewModelProviders;
import com.bumptech.glide.Glide;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.components.TooltipPopup;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXFlashToggleView;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXUtil;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXView;
import org.thoughtcrime.securesms.mms.DecryptableStreamUriLoader.DecryptableUri;
import org.thoughtcrime.securesms.mms.MediaConstraints;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
import org.thoughtcrime.securesms.util.Stopwatch;
import org.thoughtcrime.securesms.util.TextSecurePreferences;
import org.thoughtcrime.securesms.util.ThemeUtil;
import org.thoughtcrime.securesms.util.concurrent.SimpleTask;
import org.whispersystems.libsignal.util.guava.Optional;
import java.io.FileDescriptor;
import java.io.IOException;
/**
@ -47,13 +57,15 @@ import java.io.IOException;
@RequiresApi(21)
public class CameraXFragment extends Fragment implements CameraFragment {
private static final String TAG = Log.tag(CameraXFragment.class);
private static final String TAG = Log.tag(CameraXFragment.class);
private static final String HAS_DISMISSED_VIDEO_RECORDING_TOOLTIP = "camerax.fragment.has.dismissed.video.recording.tooltip";
private CameraXView camera;
private ViewGroup controlsContainer;
private Controller controller;
private MediaSendViewModel viewModel;
private View selfieFlash;
private CameraXView camera;
private ViewGroup controlsContainer;
private Controller controller;
private MediaSendViewModel viewModel;
private View selfieFlash;
private MemoryFileDescriptor videoFileDescriptor;
public static CameraXFragment newInstance() {
return new CameraXFragment();
@ -110,6 +122,8 @@ public class CameraXFragment extends Fragment implements CameraFragment {
public void onDestroyView() {
super.onDestroyView();
CameraX.unbindAll();
closeVideoFileDescriptor();
}
@Override
@ -162,11 +176,11 @@ public class CameraXFragment extends Fragment implements CameraFragment {
@SuppressLint({"ClickableViewAccessibility", "MissingPermission"})
private void initControls() {
View flipButton = requireView().findViewById(R.id.camera_flip_button);
View captureButton = requireView().findViewById(R.id.camera_capture_button);
View galleryButton = requireView().findViewById(R.id.camera_gallery_button);
View countButton = requireView().findViewById(R.id.camera_count_button);
CameraXFlashToggleView flashButton = requireView().findViewById(R.id.camera_flash_button);
View flipButton = requireView().findViewById(R.id.camera_flip_button);
CameraButtonView captureButton = requireView().findViewById(R.id.camera_capture_button);
View galleryButton = requireView().findViewById(R.id.camera_gallery_button);
View countButton = requireView().findViewById(R.id.camera_count_button);
CameraXFlashToggleView flashButton = requireView().findViewById(R.id.camera_flash_button);
selfieFlash = requireView().findViewById(R.id.camera_selfie_flash);
@ -214,9 +228,94 @@ public class CameraXFragment extends Fragment implements CameraFragment {
galleryButton.setOnClickListener(v -> controller.onGalleryClicked());
countButton.setOnClickListener(v -> controller.onCameraCountButtonClicked());
if (MediaConstraints.isVideoTranscodeAvailable()) {
try {
closeVideoFileDescriptor();
videoFileDescriptor = CameraXVideoCaptureHelper.createFileDescriptor(requireContext());
Animation inAnimation = AnimationUtils.loadAnimation(requireContext(), R.anim.fade_in);
Animation outAnimation = AnimationUtils.loadAnimation(requireContext(), R.anim.fade_out);
camera.setCaptureMode(CameraXView.CaptureMode.MIXED);
captureButton.setVideoCaptureListener(new CameraXVideoCaptureHelper(
captureButton,
camera,
videoFileDescriptor,
new CameraXVideoCaptureHelper.Callback() {
@Override
public void onVideoRecordStarted() {
hideAndDisableControlsForVideoRecording(captureButton, flashButton, flipButton, outAnimation);
}
@Override
public void onVideoSaved(@NonNull FileDescriptor fd) {
showAndEnableControlsAfterVideoRecording(captureButton, flashButton, flipButton, inAnimation);
controller.onVideoCaptured(fd);
}
@Override
public void onVideoError(@Nullable Throwable cause) {
showAndEnableControlsAfterVideoRecording(captureButton, flashButton, flipButton, inAnimation);
controller.onCameraError();
}
}
));
displayVideoRecordingTooltipIfNecessary(captureButton);
} catch (IOException e) {
Log.w(TAG, "Video capture is not supported on this device.");
}
}
viewModel.onCameraControlsInitialized();
}
private void displayVideoRecordingTooltipIfNecessary(CameraButtonView captureButton) {
if (shouldDisplayVideoRecordingTooltip()) {
int displayRotation = requireActivity().getWindowManager().getDefaultDisplay().getRotation();
TooltipPopup.forTarget(captureButton)
.setOnDismissListener(this::neverDisplayVideoRecordingTooltipAgain)
.setBackgroundTint(ContextCompat.getColor(requireContext(), R.color.signal_primary))
.setTextColor(ThemeUtil.getThemedColor(requireContext(), R.attr.conversation_title_color))
.setText(R.string.CameraXFragment_video_recording_available)
.show(displayRotation == Surface.ROTATION_0 || displayRotation == Surface.ROTATION_180 ? TooltipPopup.POSITION_ABOVE : TooltipPopup.POSITION_START);
}
}
private boolean shouldDisplayVideoRecordingTooltip() {
return !TextSecurePreferences.getBooleanPreference(requireContext(), HAS_DISMISSED_VIDEO_RECORDING_TOOLTIP, false);
}
private void neverDisplayVideoRecordingTooltipAgain() {
TextSecurePreferences.setBooleanPreference(requireContext(), HAS_DISMISSED_VIDEO_RECORDING_TOOLTIP, true);
}
private void hideAndDisableControlsForVideoRecording(@NonNull View captureButton,
@NonNull View flashButton,
@NonNull View flipButton,
@NonNull Animation outAnimation)
{
captureButton.setEnabled(false);
flashButton.startAnimation(outAnimation);
flashButton.setVisibility(View.INVISIBLE);
flipButton.startAnimation(outAnimation);
flipButton.setVisibility(View.INVISIBLE);
}
private void showAndEnableControlsAfterVideoRecording(@NonNull View captureButton,
@NonNull View flashButton,
@NonNull View flipButton,
@NonNull Animation inAnimation)
{
requireActivity().runOnUiThread(() -> {
captureButton.setEnabled(true);
flashButton.startAnimation(inAnimation);
flashButton.setVisibility(View.VISIBLE);
flipButton.startAnimation(inAnimation);
flipButton.setVisibility(View.VISIBLE);
});
}
private void onCaptureClicked() {
Stopwatch stopwatch = new Stopwatch("Capture");
@ -261,4 +360,14 @@ public class CameraXFragment extends Fragment implements CameraFragment {
flashHelper.startFlash();
}
private void closeVideoFileDescriptor() {
if (videoFileDescriptor != null) {
try {
videoFileDescriptor.close();
} catch (IOException e) {
Log.w(TAG, "Failed to close video file descriptor", e);
}
}
}
}

View File

@ -0,0 +1,179 @@
package org.thoughtcrime.securesms.mediasend;
import android.animation.Animator;
import android.animation.ValueAnimator;
import android.content.Context;
import android.util.DisplayMetrics;
import android.util.Log;
import android.util.Size;
import android.view.ViewGroup;
import android.view.animation.LinearInterpolator;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import org.thoughtcrime.securesms.animation.AnimationCompleteListener;
import org.thoughtcrime.securesms.components.TooltipPopup;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXView;
import org.thoughtcrime.securesms.mediasend.camerax.VideoCapture;
import org.thoughtcrime.securesms.util.MemoryFileDescriptor;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.FileDescriptor;
import java.io.IOException;
@RequiresApi(26)
class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener {
private static final String TAG = CameraXVideoCaptureHelper.class.getName();
private static final String VIDEO_DEBUG_LABEL = "video-capture";
private static final long VIDEO_SIZE = 10 * 1024 * 1024;
private final @NonNull CameraXView camera;
private final @NonNull Callback callback;
private final @NonNull MemoryFileDescriptor memoryFileDescriptor;
private final ValueAnimator updateProgressAnimator = ValueAnimator.ofFloat(0f, 1f)
.setDuration(VideoUtil.VIDEO_MAX_LENGTH_S * 1000);
private final VideoCapture.OnVideoSavedListener videoSavedListener = new VideoCapture.OnVideoSavedListener() {
@Override
public void onVideoSaved(@NonNull FileDescriptor fileDescriptor) {
try {
camera.setZoomLevel(0f);
memoryFileDescriptor.seek(0);
callback.onVideoSaved(fileDescriptor);
} catch (IOException e) {
callback.onVideoError(e);
}
}
@Override
public void onError(@NonNull VideoCapture.VideoCaptureError videoCaptureError,
@NonNull String message,
@Nullable Throwable cause)
{
callback.onVideoError(cause);
}
};
CameraXVideoCaptureHelper(@NonNull CameraButtonView captureButton,
@NonNull CameraXView camera,
@NonNull MemoryFileDescriptor memoryFileDescriptor,
@NonNull Callback callback)
{
this.camera = camera;
this.memoryFileDescriptor = memoryFileDescriptor;
this.callback = callback;
updateProgressAnimator.setInterpolator(new LinearInterpolator());
updateProgressAnimator.addUpdateListener(anim -> captureButton.setProgress(anim.getAnimatedFraction()));
updateProgressAnimator.addListener(new AnimationCompleteListener() {
@Override
public void onAnimationEnd(Animator animation) {
onVideoCaptureComplete();
}
});
}
@Override
public void onVideoCaptureStarted() {
Log.d(TAG, "onVideoCaptureStarted");
this.camera.setZoomLevel(0f);
callback.onVideoRecordStarted();
shrinkCaptureArea(() -> {
camera.startRecording(memoryFileDescriptor.getFileDescriptor(), videoSavedListener);
updateProgressAnimator.start();
});
}
private void shrinkCaptureArea(@NonNull Runnable onCaptureAreaShrank) {
Size screenSize = getScreenSize();
Size videoRecordingSize = VideoUtil.getVideoRecordingSize();
float scale = getSurfaceScaleForRecording();
float targetWidthForAnimation = videoRecordingSize.getWidth() * scale;
float scaleX = targetWidthForAnimation / screenSize.getWidth();
final ValueAnimator cameraMetricsAnimator;
if (scaleX == 1f) {
float targetHeightForAnimation = videoRecordingSize.getHeight() * scale;
cameraMetricsAnimator = ValueAnimator.ofFloat(screenSize.getHeight(), targetHeightForAnimation);
} else {
cameraMetricsAnimator = ValueAnimator.ofFloat(screenSize.getWidth(), targetWidthForAnimation);
}
ViewGroup.LayoutParams params = camera.getLayoutParams();
cameraMetricsAnimator.setInterpolator(new LinearInterpolator());
cameraMetricsAnimator.setDuration(200);
cameraMetricsAnimator.addListener(new AnimationCompleteListener() {
@Override
public void onAnimationEnd(Animator animation) {
scaleCameraViewToMatchRecordingSizeAndAspectRatio();
onCaptureAreaShrank.run();
}
});
cameraMetricsAnimator.addUpdateListener(animation -> {
if (scaleX == 1f) {
params.height = Math.round((float) animation.getAnimatedValue());
} else {
params.width = Math.round((float) animation.getAnimatedValue());
}
camera.setLayoutParams(params);
});
cameraMetricsAnimator.start();
}
private void scaleCameraViewToMatchRecordingSizeAndAspectRatio() {
ViewGroup.LayoutParams layoutParams = camera.getLayoutParams();
Size videoRecordingSize = VideoUtil.getVideoRecordingSize();
float scale = getSurfaceScaleForRecording();
layoutParams.height = videoRecordingSize.getHeight();
layoutParams.width = videoRecordingSize.getWidth();
camera.setLayoutParams(layoutParams);
camera.setScaleX(scale);
camera.setScaleY(scale);
}
private Size getScreenSize() {
DisplayMetrics metrics = camera.getResources().getDisplayMetrics();
return new Size(metrics.widthPixels, metrics.heightPixels);
}
private float getSurfaceScaleForRecording() {
Size videoRecordingSize = VideoUtil.getVideoRecordingSize();
Size screenSize = getScreenSize();
return Math.min(screenSize.getHeight(), screenSize.getWidth()) / (float) Math.min(videoRecordingSize.getHeight(), videoRecordingSize.getWidth());
}
@Override
public void onVideoCaptureComplete() {
Log.d(TAG, "onVideoCaptureComplete");
updateProgressAnimator.cancel();
camera.stopRecording();
}
@Override
public void onZoomIncremented(float increment) {
float range = camera.getMaxZoomLevel() - camera.getMinZoomLevel();
camera.setZoomLevel(range * increment);
}
static MemoryFileDescriptor createFileDescriptor(@NonNull Context context) throws MemoryFileDescriptor.MemoryFileException {
return MemoryFileDescriptor.newMemoryFileDescriptor(
context,
VIDEO_DEBUG_LABEL,
VIDEO_SIZE
);
}
interface Callback {
void onVideoRecordStarted();
void onVideoSaved(@NonNull FileDescriptor fd);
void onVideoError(@Nullable Throwable cause);
}
}

View File

@ -25,6 +25,7 @@ import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.view.ContextThemeWrapper;
import androidx.core.util.Supplier;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentManager;
import androidx.lifecycle.ViewModelProviders;
@ -64,6 +65,8 @@ import org.thoughtcrime.securesms.recipients.RecipientId;
import org.thoughtcrime.securesms.scribbles.ImageEditorFragment;
import org.thoughtcrime.securesms.sms.MessageSender;
import org.thoughtcrime.securesms.util.CharacterCalculator.CharacterState;
import org.thoughtcrime.securesms.util.Function3;
import org.thoughtcrime.securesms.util.IOFunction;
import org.thoughtcrime.securesms.util.MediaUtil;
import org.thoughtcrime.securesms.util.Stopwatch;
import org.thoughtcrime.securesms.util.TextSecurePreferences;
@ -71,9 +74,12 @@ import org.thoughtcrime.securesms.util.Util;
import org.thoughtcrime.securesms.util.concurrent.SignalExecutors;
import org.thoughtcrime.securesms.util.concurrent.SimpleTask;
import org.thoughtcrime.securesms.util.views.Stub;
import org.thoughtcrime.securesms.video.VideoUtil;
import org.whispersystems.libsignal.util.guava.Optional;
import java.io.ByteArrayOutputStream;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
@ -380,21 +386,53 @@ public class MediaSendActivity extends PassphraseRequiredActionBarActivity imple
@Override
public void onImageCaptured(@NonNull byte[] data, int width, int height) {
Log.i(TAG, "Camera image captured.");
onMediaCaptured(() -> data,
ignored -> (long) data.length,
(blobProvider, bytes, ignored) -> blobProvider.forData(bytes),
MediaUtil.IMAGE_JPEG,
width,
height);
}
@Override
public void onVideoCaptured(@NonNull FileDescriptor fd) {
Log.i(TAG, "Camera video captured.");
onMediaCaptured(() -> new FileInputStream(fd),
fin -> fin.getChannel().size(),
BlobProvider::forData,
VideoUtil.RECORDED_VIDEO_CONTENT_TYPE,
0,
0);
}
private <T> void onMediaCaptured(Supplier<T> dataSupplier,
IOFunction<T, Long> getLength,
Function3<BlobProvider, T, Long, BlobProvider.BlobBuilder> createBlobBuilder,
String mimeType,
int width,
int height)
{
SimpleTask.run(getLifecycle(), () -> {
try {
Uri uri = BlobProvider.getInstance()
.forData(data)
.withMimeType(MediaUtil.IMAGE_JPEG)
.createForSingleSessionOnDisk(this);
return new Media(uri,
MediaUtil.IMAGE_JPEG,
System.currentTimeMillis(),
width,
height,
data.length,
Optional.of(Media.ALL_MEDIA_BUCKET_ID),
Optional.absent());
T data = dataSupplier.get();
long length = getLength.apply(data);
Uri uri = createBlobBuilder.apply(BlobProvider.getInstance(), data, length)
.withMimeType(mimeType)
.createForSingleSessionOnDisk(this);
return new Media(
uri,
mimeType,
System.currentTimeMillis(),
width,
height,
length,
Optional.of(Media.ALL_MEDIA_BUCKET_ID),
Optional.absent()
);
} catch (IOException e) {
return null;
}
@ -406,7 +444,7 @@ public class MediaSendActivity extends PassphraseRequiredActionBarActivity imple
Log.i(TAG, "Camera capture stored: " + media.getUri().toString());
viewModel.onImageCaptured(media);
viewModel.onMediaCaptured(media);
navigateToMediaSend(Locale.getDefault());
});
}

View File

@ -47,7 +47,7 @@ public class MediaSendVideoFragment extends Fragment implements MediaSendPageFra
VideoSlide slide = new VideoSlide(requireContext(), uri, 0);
((VideoPlayer) view).setWindow(requireActivity().getWindow());
((VideoPlayer) view).setVideoSource(slide, false);
((VideoPlayer) view).setVideoSource(slide, true);
}
@Override

View File

@ -366,7 +366,7 @@ class MediaSendViewModel extends ViewModel {
hudState.setValue(buildHudState());
}
void onImageCaptured(@NonNull Media media) {
void onMediaCaptured(@NonNull Media media) {
lastCameraCapture = Optional.of(media);
List<Media> selected = selectedMedia.getValue();

View File

@ -26,7 +26,6 @@ import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.os.Looper;
import android.util.Log;
import android.util.Rational;
import android.util.Size;
@ -46,17 +45,20 @@ import androidx.camera.core.ImageCapture.OnImageSavedListener;
import androidx.camera.core.ImageCaptureConfig;
import androidx.camera.core.Preview;
import androidx.camera.core.PreviewConfig;
import androidx.camera.core.VideoCapture;
import androidx.camera.core.VideoCapture.OnVideoSavedListener;
import androidx.camera.core.VideoCaptureConfig;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleObserver;
import androidx.lifecycle.LifecycleOwner;
import androidx.lifecycle.OnLifecycleEvent;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXView.CaptureMode;
import org.thoughtcrime.securesms.mms.MediaConstraints;
import org.thoughtcrime.securesms.util.FeatureFlags;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.File;
import java.io.FileDescriptor;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.Set;
@ -122,8 +124,14 @@ final class CameraXModule {
mImageCaptureConfigBuilder =
new ImageCaptureConfig.Builder().setTargetName("ImageCapture");
// Begin Signal Custom Code Block
mVideoCaptureConfigBuilder =
new VideoCaptureConfig.Builder().setTargetName("VideoCapture");
new VideoCaptureConfig.Builder()
.setAudioBitRate(VideoUtil.AUDIO_BIT_RATE)
.setVideoFrameRate(VideoUtil.VIDEO_FRAME_RATE)
.setBitRate(VideoUtil.VIDEO_BIT_RATE)
.setTargetName("VideoCapture");
// End Signal Custom Code Block
}
/**
@ -246,9 +254,21 @@ final class CameraXModule {
mImageCaptureConfigBuilder.setTargetResolution(new Size(1920, 1920));
mImageCapture = new ImageCapture(mImageCaptureConfigBuilder.build());
// Begin Signal Custom Code Block
Size size = VideoUtil.getVideoRecordingSize();
mVideoCaptureConfigBuilder.setTargetResolution(size);
mVideoCaptureConfigBuilder.setMaxResolution(size);
// End Signal Custom Code Block
mVideoCaptureConfigBuilder.setTargetRotation(getDisplaySurfaceRotation());
mVideoCaptureConfigBuilder.setLensFacing(mCameraLensFacing);
mVideoCapture = new VideoCapture(mVideoCaptureConfigBuilder.build());
// Begin Signal Custom Code Block
if (MediaConstraints.isVideoTranscodeAvailable()) {
mVideoCapture = new VideoCapture(mVideoCaptureConfigBuilder.build());
}
// End Signal Custom Code Block
mPreviewConfigBuilder.setLensFacing(mCameraLensFacing);
int relativeCameraOrientation = getRelativeCameraOrientation(false);
@ -344,7 +364,10 @@ final class CameraXModule {
mImageCapture.takePicture(saveLocation, listener, metadata);
}
public void startRecording(File file, final OnVideoSavedListener listener) {
// Begin Signal Custom Code Block
@RequiresApi(26)
// End Signal Custom Code Block
public void startRecording(FileDescriptor file, final VideoCapture.OnVideoSavedListener listener) {
if (mVideoCapture == null) {
return;
}
@ -362,14 +385,18 @@ final class CameraXModule {
file,
new VideoCapture.OnVideoSavedListener() {
@Override
public void onVideoSaved(File savedFile) {
// Begin Signal Custom Code Block
public void onVideoSaved(FileDescriptor savedFileDescriptor) {
// End Signal Custom Code Block
mVideoIsRecording.set(false);
listener.onVideoSaved(savedFile);
// Begin Signal Custom Code Block
listener.onVideoSaved(savedFileDescriptor);
// End Signal Custom Code Block
}
@Override
public void onError(
VideoCapture.UseCaseError useCaseError,
VideoCapture.VideoCaptureError useCaseError,
String message,
@Nullable Throwable cause) {
mVideoIsRecording.set(false);
@ -379,6 +406,9 @@ final class CameraXModule {
});
}
// Begin Signal Custom Code Block
@RequiresApi(26)
// End Signal Custom Code Block
public void stopRecording() {
if (mVideoCapture == null) {
return;
@ -598,7 +628,12 @@ final class CameraXModule {
void clearCurrentLifecycle() {
if (mCurrentLifecycle != null) {
// Remove previous use cases
CameraX.unbind(mImageCapture, mVideoCapture, mPreview);
// Begin Signal Custom Code Block
CameraX.unbind(mImageCapture, mPreview);
if (mVideoCapture != null) {
CameraX.unbind(mVideoCapture);
}
// End Signal Custom Code Block
}
mCurrentLifecycle = null;
@ -647,7 +682,9 @@ final class CameraXModule {
mImageCapture.setTargetRotation(getDisplaySurfaceRotation());
}
if (mVideoCapture != null) {
// Begin Signal Custom Code Block
if (mImageCapture != null && MediaConstraints.isVideoTranscodeAvailable()) {
// End Signal Custom Code Block
mVideoCapture.setTargetRotation(getDisplaySurfaceRotation());
}
}

View File

@ -54,12 +54,12 @@ import androidx.camera.core.FlashMode;
import androidx.camera.core.ImageCapture.OnImageCapturedListener;
import androidx.camera.core.ImageCapture.OnImageSavedListener;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.VideoCapture.OnVideoSavedListener;
import androidx.lifecycle.LifecycleOwner;
import org.thoughtcrime.securesms.logging.Log;
import java.io.File;
import java.io.FileDescriptor;
/**
* A {@link View} that displays a preview of the camera with methods {@link
@ -594,16 +594,22 @@ public final class CameraXView extends ViewGroup {
mCameraModule.takePicture(file, listener);
}
// Begin Signal Custom Code Block
/**
* Takes a video and calls the OnVideoSavedListener when done.
*
* @param file The destination.
* @param fileDescriptor The destination.
*/
public void startRecording(File file, OnVideoSavedListener listener) {
mCameraModule.startRecording(file, listener);
@RequiresApi(26)
public void startRecording(FileDescriptor fileDescriptor, VideoCapture.OnVideoSavedListener listener) {
mCameraModule.startRecording(fileDescriptor, listener);
}
// End Signal Custom Code Block
/** Stops an in progress video. */
// Begin Signal Custom Code Block
@RequiresApi(26)
// End Signal Custom Code Block
public void stopRecording() {
mCameraModule.stopRecording();
}

View File

@ -0,0 +1,965 @@
package org.thoughtcrime.securesms.mediasend.camerax;
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.location.Location;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.CamcorderProfile;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.util.Size;
import android.view.Display;
import android.view.Surface;
import androidx.annotation.GuardedBy;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RestrictTo;
import androidx.camera.core.CameraInfo;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraX;
import androidx.camera.core.CameraXThreads;
import androidx.camera.core.ConfigProvider;
import androidx.camera.core.DeferrableSurface;
import androidx.camera.core.ImageOutputConfig;
import androidx.camera.core.ImmediateSurface;
import androidx.camera.core.SessionConfig;
import androidx.camera.core.UseCase;
import androidx.camera.core.UseCaseConfig;
import androidx.camera.core.VideoCaptureConfig;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.File;
import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* A use case for taking a video.
*
* <p>This class is designed for simple video capturing. It gives basic configuration of the
* recorded video such as resolution and file format.
*
* @hide In the earlier stage, the VideoCapture is deprioritized.
*/
@RequiresApi(26)
public class VideoCapture extends UseCase {
/**
* Provides a static configuration with implementation-agnostic options.
*
* @hide
*/
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public static final VideoCapture.Defaults DEFAULT_CONFIG = new VideoCapture.Defaults();
private static final VideoCapture.Metadata EMPTY_METADATA = new VideoCapture.Metadata();
private static final String TAG = "VideoCapture";
/** Amount of time to wait for dequeuing a buffer from the videoEncoder. */
private static final int DEQUE_TIMEOUT_USEC = 10000;
/** Android preferred mime type for AVC video. */
// Begin Signal Custom Code Block
private static final String VIDEO_MIME_TYPE = VideoUtil.VIDEO_MIME_TYPE;
private static final String AUDIO_MIME_TYPE = VideoUtil.AUDIO_MIME_TYPE;
// End Signal Custom Code Block
/** Camcorder profiles quality list */
private static final int[] CamcorderQuality = {
CamcorderProfile.QUALITY_2160P,
CamcorderProfile.QUALITY_1080P,
CamcorderProfile.QUALITY_720P,
CamcorderProfile.QUALITY_480P
};
/**
* Audio encoding
*
* <p>the result of PCM_8BIT and PCM_FLOAT are not good. Set PCM_16BIT as the first option.
*/
private static final short[] sAudioEncoding = {
AudioFormat.ENCODING_PCM_16BIT,
AudioFormat.ENCODING_PCM_8BIT,
AudioFormat.ENCODING_PCM_FLOAT
};
private final MediaCodec.BufferInfo mVideoBufferInfo = new MediaCodec.BufferInfo();
private final Object mMuxerLock = new Object();
/** Thread on which all encoding occurs. */
private final HandlerThread mVideoHandlerThread =
new HandlerThread(CameraXThreads.TAG + "video encoding thread");
private final Handler mVideoHandler;
/** Thread on which audio encoding occurs. */
private final HandlerThread mAudioHandlerThread =
new HandlerThread(CameraXThreads.TAG + "audio encoding thread");
private final Handler mAudioHandler;
private final AtomicBoolean mEndOfVideoStreamSignal = new AtomicBoolean(true);
private final AtomicBoolean mEndOfAudioStreamSignal = new AtomicBoolean(true);
private final AtomicBoolean mEndOfAudioVideoSignal = new AtomicBoolean(true);
private final MediaCodec.BufferInfo mAudioBufferInfo = new MediaCodec.BufferInfo();
/** For record the first sample written time. */
private final AtomicBoolean mIsFirstVideoSampleWrite = new AtomicBoolean(false);
private final AtomicBoolean mIsFirstAudioSampleWrite = new AtomicBoolean(false);
private final VideoCaptureConfig.Builder mUseCaseConfigBuilder;
@NonNull
MediaCodec mVideoEncoder;
@NonNull
private MediaCodec mAudioEncoder;
/** The muxer that writes the encoding data to file. */
@GuardedBy("mMuxerLock")
private MediaMuxer mMuxer;
private boolean mMuxerStarted = false;
/** The index of the video track used by the muxer. */
private int mVideoTrackIndex;
/** The index of the audio track used by the muxer. */
private int mAudioTrackIndex;
/** Surface the camera writes to, which the videoEncoder uses as input. */
Surface mCameraSurface;
/** audio raw data */
private AudioRecord mAudioRecorder;
private int mAudioBufferSize;
private boolean mIsRecording = false;
private int mAudioChannelCount;
private int mAudioSampleRate;
private int mAudioBitRate;
private DeferrableSurface mDeferrableSurface;
/**
* Creates a new video capture use case from the given configuration.
*
* @param config for this use case instance
*/
public VideoCapture(VideoCaptureConfig config) {
super(config);
mUseCaseConfigBuilder = VideoCaptureConfig.Builder.fromConfig(config);
// video thread start
mVideoHandlerThread.start();
mVideoHandler = new Handler(mVideoHandlerThread.getLooper());
// audio thread start
mAudioHandlerThread.start();
mAudioHandler = new Handler(mAudioHandlerThread.getLooper());
}
/** Creates a {@link MediaFormat} using parameters from the configuration */
private static MediaFormat createMediaFormat(VideoCaptureConfig config, Size resolution) {
MediaFormat format =
MediaFormat.createVideoFormat(
VIDEO_MIME_TYPE, resolution.getWidth(), resolution.getHeight());
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, config.getBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, config.getVideoFrameRate());
// Begin Signal Custom Code Block
format.setInteger(MediaFormat.KEY_CAPTURE_RATE, config.getVideoFrameRate());
// End Signal Custom Code Block
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, config.getIFrameInterval());
return format;
}
/**
* {@inheritDoc}
*
* @hide
*/
@Override
@Nullable
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
protected UseCaseConfig.Builder<?, ?, ?> getDefaultBuilder(CameraX.LensFacing lensFacing) {
VideoCaptureConfig defaults = CameraX.getDefaultUseCaseConfig(
VideoCaptureConfig.class, lensFacing);
if (defaults != null) {
return VideoCaptureConfig.Builder.fromConfig(defaults);
}
return null;
}
/**
* {@inheritDoc}
*
* @hide
*/
@Override
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
protected Map<String, Size> onSuggestedResolutionUpdated(
Map<String, Size> suggestedResolutionMap) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
if (mCameraSurface != null) {
mVideoEncoder.stop();
mVideoEncoder.release();
mAudioEncoder.stop();
mAudioEncoder.release();
releaseCameraSurface(false);
}
try {
mVideoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
mAudioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE);
} catch (IOException e) {
throw new IllegalStateException("Unable to create MediaCodec due to: " + e.getCause());
}
String cameraId = getCameraIdUnchecked(config);
Size resolution = suggestedResolutionMap.get(cameraId);
if (resolution == null) {
throw new IllegalArgumentException(
"Suggested resolution map missing resolution for camera " + cameraId);
}
setupEncoder(resolution);
return suggestedResolutionMap;
}
/**
* Starts recording video, which continues until {@link VideoCapture#stopRecording()} is
* called.
*
* <p>StartRecording() is asynchronous. User needs to check if any error occurs by setting the
* {@link VideoCapture.OnVideoSavedListener#onError(VideoCapture.VideoCaptureError, String, Throwable)}.
*
* @param saveLocation Location to save the video capture
* @param listener Listener to call for the recorded video
*/
// Begin Signal Custom Code Block
public void startRecording(FileDescriptor saveLocation, VideoCapture.OnVideoSavedListener listener) {
// End Signal Custom Code Block
mIsFirstVideoSampleWrite.set(false);
mIsFirstAudioSampleWrite.set(false);
startRecording(saveLocation, listener, EMPTY_METADATA);
}
/**
* Starts recording video, which continues until {@link VideoCapture#stopRecording()} is
* called.
*
* <p>StartRecording() is asynchronous. User needs to check if any error occurs by setting the
* {@link VideoCapture.OnVideoSavedListener#onError(VideoCapture.VideoCaptureError, String, Throwable)}.
*
* @param saveLocation Location to save the video capture
* @param listener Listener to call for the recorded video
* @param metadata Metadata to save with the recorded video
*/
// Begin Signal Custom Code Block
public void startRecording(
final FileDescriptor saveLocation, final VideoCapture.OnVideoSavedListener listener, VideoCapture.Metadata metadata) {
// End Signal Custom Code Block
Log.i(TAG, "startRecording");
if (!mEndOfAudioVideoSignal.get()) {
listener.onError(
VideoCapture.VideoCaptureError.RECORDING_IN_PROGRESS, "It is still in video recording!",
null);
return;
}
// Begin Signal Custom Code Block
if (mAudioRecorder != null) {
try {
// audioRecord start
mAudioRecorder.startRecording();
} catch (IllegalStateException e) {
listener.onError(VideoCapture.VideoCaptureError.ENCODER_ERROR, "AudioRecorder start fail", e);
return;
}
} else {
Log.w(TAG, "Audio recorder was not initialized! Can't record audio.");
}
// End Signal Custom Code Block
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
String cameraId = getCameraIdUnchecked(config);
try {
// video encoder start
Log.i(TAG, "videoEncoder start");
mVideoEncoder.start();
// audio encoder start
Log.i(TAG, "audioEncoder start");
mAudioEncoder.start();
} catch (IllegalStateException e) {
setupEncoder(getAttachedSurfaceResolution(cameraId));
listener.onError(VideoCapture.VideoCaptureError.ENCODER_ERROR, "Audio/Video encoder start fail", e);
return;
}
// Get the relative rotation or default to 0 if the camera info is unavailable
int relativeRotation = 0;
try {
CameraInfo cameraInfo = CameraX.getCameraInfo(cameraId);
relativeRotation =
cameraInfo.getSensorRotationDegrees(
((ImageOutputConfig) getUseCaseConfig())
.getTargetRotation(Surface.ROTATION_0));
} catch (CameraInfoUnavailableException e) {
Log.e(TAG, "Unable to retrieve camera sensor orientation.", e);
}
try {
synchronized (mMuxerLock) {
mMuxer =
new MediaMuxer(
// Begin Signal Custom Code Block
saveLocation,
// End Signal Custom Code Block
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mMuxer.setOrientationHint(relativeRotation);
if (metadata.location != null) {
mMuxer.setLocation(
(float) metadata.location.getLatitude(),
(float) metadata.location.getLongitude());
}
}
} catch (IOException e) {
setupEncoder(getAttachedSurfaceResolution(cameraId));
listener.onError(VideoCapture.VideoCaptureError.MUXER_ERROR, "MediaMuxer creation failed!", e);
return;
}
mEndOfVideoStreamSignal.set(false);
mEndOfAudioStreamSignal.set(false);
mEndOfAudioVideoSignal.set(false);
mIsRecording = true;
notifyActive();
mAudioHandler.post(
new Runnable() {
@Override
public void run() {
VideoCapture.this.audioEncode(listener);
}
});
mVideoHandler.post(
new Runnable() {
@Override
public void run() {
boolean errorOccurred = VideoCapture.this.videoEncode(listener);
if (!errorOccurred) {
listener.onVideoSaved(saveLocation);
}
}
});
}
/**
* Stops recording video, this must be called after {@link
* VideoCapture#startRecording(File, VideoCapture.OnVideoSavedListener, VideoCapture.Metadata)} is called.
*
* <p>stopRecording() is asynchronous API. User need to check if {@link
* VideoCapture.OnVideoSavedListener#onVideoSaved(File)} or
* {@link VideoCapture.OnVideoSavedListener#onError(VideoCapture.VideoCaptureError, String, Throwable)} be called
* before startRecording.
*/
public void stopRecording() {
Log.i(TAG, "stopRecording");
notifyInactive();
if (!mEndOfAudioVideoSignal.get() && mIsRecording) {
// stop audio encoder thread, and wait video encoder and muxer stop.
mEndOfAudioStreamSignal.set(true);
}
}
/**
* {@inheritDoc}
*
* @hide
*/
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
@Override
public void clear() {
mVideoHandlerThread.quitSafely();
// audio encoder release
mAudioHandlerThread.quitSafely();
if (mAudioEncoder != null) {
mAudioEncoder.release();
mAudioEncoder = null;
}
if (mAudioRecorder != null) {
mAudioRecorder.release();
mAudioRecorder = null;
}
if (mCameraSurface != null) {
releaseCameraSurface(true);
}
super.clear();
}
private void releaseCameraSurface(final boolean releaseVideoEncoder) {
if (mDeferrableSurface == null) {
return;
}
final Surface surface = mCameraSurface;
final MediaCodec videoEncoder = mVideoEncoder;
mDeferrableSurface.setOnSurfaceDetachedListener(
CameraXExecutors.mainThreadExecutor(),
new DeferrableSurface.OnSurfaceDetachedListener() {
@Override
public void onSurfaceDetached() {
if (releaseVideoEncoder && videoEncoder != null) {
videoEncoder.release();
}
if (surface != null) {
surface.release();
}
}
});
if (releaseVideoEncoder) {
mVideoEncoder = null;
}
mCameraSurface = null;
mDeferrableSurface = null;
}
/**
* Sets the desired rotation of the output video.
*
* <p>In most cases this should be set to the current rotation returned by {@link
* Display#getRotation()}.
*
* @param rotation Desired rotation of the output video.
*/
public void setTargetRotation(@ImageOutputConfig.RotationValue int rotation) {
ImageOutputConfig oldConfig = (ImageOutputConfig) getUseCaseConfig();
int oldRotation = oldConfig.getTargetRotation(ImageOutputConfig.INVALID_ROTATION);
if (oldRotation == ImageOutputConfig.INVALID_ROTATION || oldRotation != rotation) {
mUseCaseConfigBuilder.setTargetRotation(rotation);
updateUseCaseConfig(mUseCaseConfigBuilder.build());
// TODO(b/122846516): Update session configuration and possibly reconfigure session.
}
}
/**
* Setup the {@link MediaCodec} for encoding video from a camera {@link Surface} and encoding
* audio from selected audio source.
*/
private void setupEncoder(Size resolution) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
// video encoder setup
mVideoEncoder.reset();
mVideoEncoder.configure(
createMediaFormat(config, resolution), /*surface*/
null, /*crypto*/
null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
if (mCameraSurface != null) {
releaseCameraSurface(false);
}
mCameraSurface = mVideoEncoder.createInputSurface();
SessionConfig.Builder builder = SessionConfig.Builder.createFrom(config);
mDeferrableSurface = new ImmediateSurface(mCameraSurface);
builder.addSurface(mDeferrableSurface);
String cameraId = getCameraIdUnchecked(config);
attachToCamera(cameraId, builder.build());
// audio encoder setup
setAudioParametersByCamcorderProfile(resolution, cameraId);
mAudioEncoder.reset();
mAudioEncoder.configure(
createAudioMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (mAudioRecorder != null) {
mAudioRecorder.release();
}
mAudioRecorder = autoConfigAudioRecordSource(config);
// check mAudioRecorder
if (mAudioRecorder == null) {
Log.e(TAG, "AudioRecord object cannot initialized correctly!");
}
mVideoTrackIndex = -1;
mAudioTrackIndex = -1;
mIsRecording = false;
}
/**
* Write a buffer that has been encoded to file.
*
* @param bufferIndex the index of the buffer in the videoEncoder that has available data
* @return returns true if this buffer is the end of the stream
*/
private boolean writeVideoEncodedBuffer(int bufferIndex) {
if (bufferIndex < 0) {
Log.e(TAG, "Output buffer should not have negative index: " + bufferIndex);
return false;
}
// Get data from buffer
ByteBuffer outputBuffer = mVideoEncoder.getOutputBuffer(bufferIndex);
// Check if buffer is valid, if not then return
if (outputBuffer == null) {
Log.d(TAG, "OutputBuffer was null.");
return false;
}
// Write data to mMuxer if available
if (mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0 && mVideoBufferInfo.size > 0) {
outputBuffer.position(mVideoBufferInfo.offset);
outputBuffer.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size);
mVideoBufferInfo.presentationTimeUs = (System.nanoTime() / 1000);
synchronized (mMuxerLock) {
if (!mIsFirstVideoSampleWrite.get()) {
Log.i(TAG, "First video sample written.");
mIsFirstVideoSampleWrite.set(true);
}
mMuxer.writeSampleData(mVideoTrackIndex, outputBuffer, mVideoBufferInfo);
}
}
// Release data
mVideoEncoder.releaseOutputBuffer(bufferIndex, false);
// Return true if EOS is set
return (mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
}
private boolean writeAudioEncodedBuffer(int bufferIndex) {
ByteBuffer buffer = getOutputBuffer(mAudioEncoder, bufferIndex);
buffer.position(mAudioBufferInfo.offset);
if (mAudioTrackIndex >= 0
&& mVideoTrackIndex >= 0
&& mAudioBufferInfo.size > 0
&& mAudioBufferInfo.presentationTimeUs > 0) {
try {
synchronized (mMuxerLock) {
if (!mIsFirstAudioSampleWrite.get()) {
Log.i(TAG, "First audio sample written.");
mIsFirstAudioSampleWrite.set(true);
}
mMuxer.writeSampleData(mAudioTrackIndex, buffer, mAudioBufferInfo);
}
} catch (Exception e) {
Log.e(
TAG,
"audio error:size="
+ mAudioBufferInfo.size
+ "/offset="
+ mAudioBufferInfo.offset
+ "/timeUs="
+ mAudioBufferInfo.presentationTimeUs);
e.printStackTrace();
}
}
mAudioEncoder.releaseOutputBuffer(bufferIndex, false);
return (mAudioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
}
/**
* Encoding which runs indefinitely until end of stream is signaled. This should not run on the
* main thread otherwise it will cause the application to block.
*
* @return returns {@code true} if an error condition occurred, otherwise returns {@code false}
*/
boolean videoEncode(VideoCapture.OnVideoSavedListener videoSavedListener) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
// Main encoding loop. Exits on end of stream.
boolean errorOccurred = false;
boolean videoEos = false;
while (!videoEos && !errorOccurred) {
// Check for end of stream from main thread
if (mEndOfVideoStreamSignal.get()) {
mVideoEncoder.signalEndOfInputStream();
mEndOfVideoStreamSignal.set(false);
}
// Deque buffer to check for processing step
int outputBufferId =
mVideoEncoder.dequeueOutputBuffer(mVideoBufferInfo, DEQUE_TIMEOUT_USEC);
switch (outputBufferId) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
if (mMuxerStarted) {
videoSavedListener.onError(
VideoCapture.VideoCaptureError.ENCODER_ERROR,
"Unexpected change in video encoding format.",
null);
errorOccurred = true;
}
synchronized (mMuxerLock) {
mVideoTrackIndex = mMuxer.addTrack(mVideoEncoder.getOutputFormat());
if (mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0) {
mMuxerStarted = true;
Log.i(TAG, "media mMuxer start");
mMuxer.start();
}
}
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// Timed out. Just wait until next attempt to deque.
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
// Ignore output buffers changed since we dequeue a single buffer instead of
// multiple
break;
default:
videoEos = writeVideoEncodedBuffer(outputBufferId);
}
}
try {
Log.i(TAG, "videoEncoder stop");
mVideoEncoder.stop();
} catch (IllegalStateException e) {
videoSavedListener.onError(VideoCapture.VideoCaptureError.ENCODER_ERROR,
"Video encoder stop failed!", e);
errorOccurred = true;
}
try {
// new MediaMuxer instance required for each new file written, and release current one.
synchronized (mMuxerLock) {
if (mMuxer != null) {
if (mMuxerStarted) {
mMuxer.stop();
}
mMuxer.release();
mMuxer = null;
}
}
} catch (IllegalStateException e) {
videoSavedListener.onError(VideoCapture.VideoCaptureError.MUXER_ERROR, "Muxer stop failed!", e);
errorOccurred = true;
}
mMuxerStarted = false;
// Do the setup of the videoEncoder at the end of video recording instead of at the start of
// recording because it requires attaching a new Surface. This causes a glitch so we don't
// want
// that to incur latency at the start of capture.
setupEncoder(getAttachedSurfaceResolution(getCameraIdUnchecked(config)));
notifyReset();
// notify the UI thread that the video recording has finished
mEndOfAudioVideoSignal.set(true);
Log.i(TAG, "Video encode thread end.");
return errorOccurred;
}
boolean audioEncode(VideoCapture.OnVideoSavedListener videoSavedListener) {
// Audio encoding loop. Exits on end of stream.
boolean audioEos = false;
int outIndex;
while (!audioEos && mIsRecording) {
// Check for end of stream from main thread
if (mEndOfAudioStreamSignal.get()) {
mEndOfAudioStreamSignal.set(false);
mIsRecording = false;
}
// get audio deque input buffer
if (mAudioEncoder != null && mAudioRecorder != null) {
int index = mAudioEncoder.dequeueInputBuffer(-1);
if (index >= 0) {
final ByteBuffer buffer = getInputBuffer(mAudioEncoder, index);
buffer.clear();
int length = mAudioRecorder.read(buffer, mAudioBufferSize);
if (length > 0) {
mAudioEncoder.queueInputBuffer(
index,
0,
length,
(System.nanoTime() / 1000),
mIsRecording ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
// start to dequeue audio output buffer
do {
outIndex = mAudioEncoder.dequeueOutputBuffer(mAudioBufferInfo, 0);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
synchronized (mMuxerLock) {
mAudioTrackIndex = mMuxer.addTrack(mAudioEncoder.getOutputFormat());
if (mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0) {
mMuxerStarted = true;
mMuxer.start();
}
}
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
default:
audioEos = writeAudioEncodedBuffer(outIndex);
}
} while (outIndex >= 0 && !audioEos); // end of dequeue output buffer
}
} // end of while loop
// Audio Stop
try {
Log.i(TAG, "audioRecorder stop");
// Begin Signal Custom Code Block
if (mAudioRecorder != null) {
mAudioRecorder.stop();
}
// End Signal Custom Code Block
} catch (IllegalStateException e) {
videoSavedListener.onError(
VideoCapture.VideoCaptureError.ENCODER_ERROR, "Audio recorder stop failed!", e);
}
try {
// Begin Signal Custom Code Block
if (mAudioRecorder != null) {
mAudioEncoder.stop();
}
// End Signal Custom Code Block
} catch (IllegalStateException e) {
videoSavedListener.onError(VideoCapture.VideoCaptureError.ENCODER_ERROR,
"Audio encoder stop failed!", e);
}
Log.i(TAG, "Audio encode thread end");
// Use AtomicBoolean to signal because MediaCodec.signalEndOfInputStream() is not thread
// safe
mEndOfVideoStreamSignal.set(true);
return false;
}
private ByteBuffer getInputBuffer(MediaCodec codec, int index) {
return codec.getInputBuffer(index);
}
private ByteBuffer getOutputBuffer(MediaCodec codec, int index) {
return codec.getOutputBuffer(index);
}
/** Creates a {@link MediaFormat} using parameters for audio from the configuration */
private MediaFormat createAudioMediaFormat() {
MediaFormat format =
MediaFormat.createAudioFormat(AUDIO_MIME_TYPE, mAudioSampleRate,
mAudioChannelCount);
format.setInteger(
MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitRate);
return format;
}
/** Create a AudioRecord object to get raw data */
private AudioRecord autoConfigAudioRecordSource(VideoCaptureConfig config) {
for (short audioFormat : sAudioEncoding) {
// Use channel count to determine stereo vs mono
int channelConfig =
mAudioChannelCount == 1
? AudioFormat.CHANNEL_IN_MONO
: AudioFormat.CHANNEL_IN_STEREO;
int source = config.getAudioRecordSource();
try {
int bufferSize =
AudioRecord.getMinBufferSize(mAudioSampleRate, channelConfig, audioFormat);
if (bufferSize <= 0) {
bufferSize = config.getAudioMinBufferSize();
}
AudioRecord recorder =
new AudioRecord(
source,
mAudioSampleRate,
channelConfig,
audioFormat,
bufferSize * 2);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
mAudioBufferSize = bufferSize;
Log.i(
TAG,
"source: "
+ source
+ " audioSampleRate: "
+ mAudioSampleRate
+ " channelConfig: "
+ channelConfig
+ " audioFormat: "
+ audioFormat
+ " bufferSize: "
+ bufferSize);
return recorder;
}
} catch (Exception e) {
Log.e(TAG, "Exception, keep trying.", e);
}
}
return null;
}
/** Set audio record parameters by CamcorderProfile */
private void setAudioParametersByCamcorderProfile(Size currentResolution, String cameraId) {
CamcorderProfile profile;
boolean isCamcorderProfileFound = false;
for (int quality : CamcorderQuality) {
if (CamcorderProfile.hasProfile(Integer.parseInt(cameraId), quality)) {
profile = CamcorderProfile.get(Integer.parseInt(cameraId), quality);
if (currentResolution.getWidth() == profile.videoFrameWidth
&& currentResolution.getHeight() == profile.videoFrameHeight) {
mAudioChannelCount = profile.audioChannels;
mAudioSampleRate = profile.audioSampleRate;
mAudioBitRate = profile.audioBitRate;
isCamcorderProfileFound = true;
break;
}
}
}
// In case no corresponding camcorder profile can be founded, * get default value from
// VideoCaptureConfig.
if (!isCamcorderProfileFound) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
mAudioChannelCount = config.getAudioChannelCount();
mAudioSampleRate = config.getAudioSampleRate();
mAudioBitRate = config.getAudioBitRate();
}
}
/**
* Describes the error that occurred during video capture operations.
*
* <p>This is a parameter sent to the error callback functions set in listeners such as {@link
* .VideoCapture.OnVideoSavedListener#onError(VideoCapture.VideoCaptureError, String, Throwable)}.
*
* <p>See message parameter in onError callback or log for more details.
*/
public enum VideoCaptureError {
/**
* An unknown error occurred.
*
* <p>See message parameter in onError callback or log for more details.
*/
UNKNOWN_ERROR,
/**
* An error occurred with encoder state, either when trying to change state or when an
* unexpected state change occurred.
*/
ENCODER_ERROR,
/** An error with muxer state such as during creation or when stopping. */
MUXER_ERROR,
/**
* An error indicating start recording was called when video recording is still in progress.
*/
RECORDING_IN_PROGRESS
}
/** Listener containing callbacks for video file I/O events. */
public interface OnVideoSavedListener {
/** Called when the video has been successfully saved. */
// Begin Signal Custom Code Block
void onVideoSaved(@NonNull FileDescriptor fileDescriptor);
// End Signal Custom Code Block
/** Called when an error occurs while attempting to save the video. */
void onError(@NonNull VideoCapture.VideoCaptureError videoCaptureError, @NonNull String message,
@Nullable Throwable cause);
}
/**
* Provides a base static default configuration for the VideoCapture
*
* <p>These values may be overridden by the implementation. They only provide a minimum set of
* defaults that are implementation independent.
*
* @hide
*/
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public static final class Defaults
implements ConfigProvider<VideoCaptureConfig> {
private static final Handler DEFAULT_HANDLER = new Handler(Looper.getMainLooper());
private static final int DEFAULT_VIDEO_FRAME_RATE = 30;
/** 8Mb/s the recommend rate for 30fps 1080p */
private static final int DEFAULT_BIT_RATE = 8 * 1024 * 1024;
/** Seconds between each key frame */
private static final int DEFAULT_INTRA_FRAME_INTERVAL = 1;
/** audio bit rate */
private static final int DEFAULT_AUDIO_BIT_RATE = 64000;
/** audio sample rate */
private static final int DEFAULT_AUDIO_SAMPLE_RATE = 8000;
/** audio channel count */
private static final int DEFAULT_AUDIO_CHANNEL_COUNT = 1;
/** audio record source */
private static final int DEFAULT_AUDIO_RECORD_SOURCE = MediaRecorder.AudioSource.MIC;
/** audio default minimum buffer size */
private static final int DEFAULT_AUDIO_MIN_BUFFER_SIZE = 1024;
/** Current max resolution of VideoCapture is set as FHD */
private static final Size DEFAULT_MAX_RESOLUTION = new Size(1920, 1080);
/** Surface occupancy prioirty to this use case */
private static final int DEFAULT_SURFACE_OCCUPANCY_PRIORITY = 3;
private static final VideoCaptureConfig DEFAULT_CONFIG;
static {
VideoCaptureConfig.Builder builder =
new VideoCaptureConfig.Builder()
.setCallbackHandler(DEFAULT_HANDLER)
.setVideoFrameRate(DEFAULT_VIDEO_FRAME_RATE)
.setBitRate(DEFAULT_BIT_RATE)
.setIFrameInterval(DEFAULT_INTRA_FRAME_INTERVAL)
.setAudioBitRate(DEFAULT_AUDIO_BIT_RATE)
.setAudioSampleRate(DEFAULT_AUDIO_SAMPLE_RATE)
.setAudioChannelCount(DEFAULT_AUDIO_CHANNEL_COUNT)
.setAudioRecordSource(DEFAULT_AUDIO_RECORD_SOURCE)
.setAudioMinBufferSize(DEFAULT_AUDIO_MIN_BUFFER_SIZE)
.setMaxResolution(DEFAULT_MAX_RESOLUTION)
.setSurfaceOccupancyPriority(DEFAULT_SURFACE_OCCUPANCY_PRIORITY);
DEFAULT_CONFIG = builder.build();
}
@Override
public VideoCaptureConfig getConfig(CameraX.LensFacing lensFacing) {
return DEFAULT_CONFIG;
}
}
/** Holder class for metadata that should be saved alongside captured video. */
public static final class Metadata {
/** Data representing a geographic location. */
@Nullable
public Location location;
}
}

View File

@ -24,7 +24,7 @@ class DecryptableStreamLocalUriFetcher extends StreamLocalUriFetcher {
DecryptableStreamLocalUriFetcher(Context context, Uri uri) {
super(context.getContentResolver(), uri);
this.context = context;
this.context = context;
}
@Override
@ -35,7 +35,9 @@ class DecryptableStreamLocalUriFetcher extends StreamLocalUriFetcher {
if (thumbnail != null) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
thumbnail.compress(Bitmap.CompressFormat.JPEG, 100, baos);
return new ByteArrayInputStream(baos.toByteArray());
ByteArrayInputStream thumbnailStream = new ByteArrayInputStream(baos.toByteArray());
thumbnail.recycle();
return thumbnailStream;
}
}

View File

@ -3,10 +3,12 @@ package org.thoughtcrime.securesms.providers;
import android.app.Application;
import android.content.Context;
import android.content.UriMatcher;
import android.media.MediaDataSource;
import android.net.Uri;
import androidx.annotation.IntRange;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.WorkerThread;
import org.thoughtcrime.securesms.crypto.AttachmentSecret;
@ -14,8 +16,11 @@ import org.thoughtcrime.securesms.crypto.AttachmentSecretProvider;
import org.thoughtcrime.securesms.crypto.ModernDecryptingPartInputStream;
import org.thoughtcrime.securesms.crypto.ModernEncryptingPartOutputStream;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.util.IOFunction;
import org.thoughtcrime.securesms.util.Util;
import org.thoughtcrime.securesms.util.concurrent.SignalExecutors;
import org.thoughtcrime.securesms.video.ByteArrayMediaDataSource;
import org.thoughtcrime.securesms.video.EncryptedMediaDataSource;
import java.io.ByteArrayInputStream;
import java.io.File;
@ -89,6 +94,34 @@ public class BlobProvider {
* @throws IOException If the stream fails to open or the spec of the URI doesn't match.
*/
public synchronized @NonNull InputStream getStream(@NonNull Context context, @NonNull Uri uri, long position) throws IOException {
return getBlobRepresentation(context,
uri,
bytes -> {
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
if (byteArrayInputStream.skip(position) != position) {
throw new IOException("Failed to skip to position " + position + " for: " + uri);
}
return byteArrayInputStream;
},
file -> ModernDecryptingPartInputStream.createFor(getAttachmentSecret(context),
file,
position));
}
@RequiresApi(23)
public synchronized @NonNull MediaDataSource getMediaDataSource(@NonNull Context context, @NonNull Uri uri) throws IOException {
return getBlobRepresentation(context,
uri,
ByteArrayMediaDataSource::new,
file -> EncryptedMediaDataSource.createForDiskBlob(getAttachmentSecret(context), file));
}
private synchronized @NonNull <T> T getBlobRepresentation(@NonNull Context context,
@NonNull Uri uri,
@NonNull IOFunction<byte[], T> getByteRepresentation,
@NonNull IOFunction<File, T> getFileRepresentation)
throws IOException
{
if (isAuthority(uri)) {
StorageType storageType = StorageType.decode(uri.getPathSegments().get(STORAGE_TYPE_PATH_SEGMENT));
@ -99,7 +132,7 @@ public class BlobProvider {
if (storageType == StorageType.SINGLE_USE_MEMORY) {
memoryBlobs.remove(uri);
}
return new ByteArrayInputStream(data);
return getByteRepresentation.apply(data);
} else {
throw new IOException("Failed to find in-memory blob for: " + uri);
}
@ -108,13 +141,17 @@ public class BlobProvider {
String directory = getDirectory(storageType);
File file = new File(getOrCreateCacheDirectory(context, directory), buildFileName(id));
return ModernDecryptingPartInputStream.createFor(AttachmentSecretProvider.getInstance(context).getOrCreateAttachmentSecret(), file, position);
return getFileRepresentation.apply(file);
}
} else {
throw new IOException("Provided URI does not match this spec. Uri: " + uri);
}
}
private synchronized AttachmentSecret getAttachmentSecret(@NonNull Context context) {
return AttachmentSecretProvider.getInstance(context).getOrCreateAttachmentSecret();
}
/**
* Delete the content with the specified URI.
*/
@ -475,4 +512,5 @@ public class BlobProvider {
throw new IOException("Failed to decode lifespan.");
}
}
}

View File

@ -0,0 +1,8 @@
package org.thoughtcrime.securesms.util;
/**
* A function which takes 3 inputs and returns 1 output.
*/
public interface Function3<A, B, C, D> {
D apply(A a, B b, C c);
}

View File

@ -0,0 +1,10 @@
package org.thoughtcrime.securesms.util;
import java.io.IOException;
/**
* A function which takes 1 input and returns 1 output, and is capable of throwing an IO Exception.
*/
public interface IOFunction<I, O> {
O apply(I input) throws IOException;
}

View File

@ -3,8 +3,11 @@ package org.thoughtcrime.securesms.util;
import android.content.ContentResolver;
import android.content.Context;
import android.graphics.Bitmap;
import android.media.MediaDataSource;
import android.media.MediaMetadataRetriever;
import android.media.ThumbnailUtils;
import android.net.Uri;
import android.os.Build;
import android.provider.MediaStore;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
@ -48,6 +51,7 @@ public class MediaUtil {
public static final String IMAGE_GIF = "image/gif";
public static final String AUDIO_AAC = "audio/aac";
public static final String AUDIO_UNSPECIFIED = "audio/*";
public static final String VIDEO_MP4 = "video/mp4";
public static final String VIDEO_UNSPECIFIED = "video/*";
public static final String VCARD = "text/x-vcard";
public static final String LONG_TEXT = "text/x-signal-plain";
@ -249,6 +253,10 @@ public class MediaUtil {
}
public static boolean hasVideoThumbnail(Uri uri) {
if (BlobProvider.isAuthority(uri) && MediaUtil.isVideo(BlobProvider.getMimeType(uri)) && Build.VERSION.SDK_INT >= 23) {
return true;
}
if (uri == null || !isSupportedVideoUriScheme(uri.getScheme())) {
return false;
}
@ -265,6 +273,7 @@ public class MediaUtil {
}
}
@WorkerThread
public static @Nullable Bitmap getVideoThumbnail(Context context, Uri uri) {
if ("com.android.providers.media.documents".equals(uri.getAuthority())) {
long videoId = Long.parseLong(uri.getLastPathSegment().split(":")[1]);
@ -284,6 +293,19 @@ public class MediaUtil {
MediaUtil.isVideo(URLConnection.guessContentTypeFromName(uri.toString()))) {
return ThumbnailUtils.createVideoThumbnail(uri.toString().replace("file://", ""),
MediaStore.Video.Thumbnails.MINI_KIND);
} else if (BlobProvider.isAuthority(uri) &&
MediaUtil.isVideo(BlobProvider.getMimeType(uri)) &&
Build.VERSION.SDK_INT >= 23) {
try {
MediaDataSource mediaDataSource = BlobProvider.getInstance().getMediaDataSource(context, uri);
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(mediaDataSource);
return mediaMetadataRetriever.getFrameAtTime(1000);
} catch (IOException e) {
Log.w(TAG, "failed to get thumbnail for video blob uri: " + uri, e);
return null;
}
}
return null;

View File

@ -0,0 +1,43 @@
package org.thoughtcrime.securesms.video;
import android.media.MediaDataSource;
import androidx.annotation.RequiresApi;
import java.io.IOException;
@RequiresApi(23)
public class ByteArrayMediaDataSource extends MediaDataSource {
private byte[] data;
public ByteArrayMediaDataSource(byte[] data) {
this.data = data;
}
@Override
public int readAt(long position, byte[] buffer, int offset, int size) throws IOException {
if (data == null) throw new IOException("ByteArrayMediaDataSource is closed");
long bytesAvailable = getSize() - position;
int read = Math.min(size, (int) bytesAvailable);
if (read <= 0) return -1;
if (buffer != null) {
System.arraycopy(data, (int) position, buffer, offset, read);
}
return read;
}
@Override
public long getSize() throws IOException {
if (data == null) throw new IOException("ByteArrayMediaDataSource is closed");
return data.length;
}
@Override
public void close() throws IOException {
data = null;
}
}

View File

@ -20,4 +20,8 @@ public final class EncryptedMediaDataSource {
return new ModernEncryptedMediaDataSource(attachmentSecret, mediaFile, random, length);
}
}
public static MediaDataSource createForDiskBlob(@NonNull AttachmentSecret attachmentSecret, @NonNull File mediaFile) {
return new ModernEncryptedMediaDataSource(attachmentSecret, mediaFile, null, 0);
}
}

View File

@ -28,12 +28,12 @@ public final class InMemoryTranscoder implements Closeable {
private static final String TAG = Log.tag(InMemoryTranscoder.class);
private static final int MAXIMUM_TARGET_VIDEO_BITRATE = 2_000_000;
private static final int MAXIMUM_TARGET_VIDEO_BITRATE = VideoUtil.VIDEO_BIT_RATE;
private static final int LOW_RES_TARGET_VIDEO_BITRATE = 1_750_000;
private static final int MINIMUM_TARGET_VIDEO_BITRATE = 500_000;
private static final int AUDIO_BITRATE = 192_000;
private static final int OUTPUT_FORMAT = 720;
private static final int LOW_RES_OUTPUT_FORMAT = 480;
private static final int MINIMUM_TARGET_VIDEO_BITRATE = 500_000;
private static final int AUDIO_BITRATE = VideoUtil.AUDIO_BIT_RATE;
private static final int OUTPUT_FORMAT = VideoUtil.VIDEO_SHORT_WIDTH;
private static final int LOW_RES_OUTPUT_FORMAT = 480;
private final Context context;
private final MediaDataSource dataSource;

View File

@ -3,6 +3,7 @@ package org.thoughtcrime.securesms.video;
import android.media.MediaDataSource;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import org.thoughtcrime.securesms.crypto.AttachmentSecret;
@ -28,7 +29,7 @@ final class ModernEncryptedMediaDataSource extends MediaDataSource {
private final byte[] random;
private final long length;
ModernEncryptedMediaDataSource(@NonNull AttachmentSecret attachmentSecret, @NonNull File mediaFile, @NonNull byte[] random, long length) {
ModernEncryptedMediaDataSource(@NonNull AttachmentSecret attachmentSecret, @NonNull File mediaFile, @Nullable byte[] random, long length) {
this.attachmentSecret = attachmentSecret;
this.mediaFile = mediaFile;
this.random = random;
@ -37,7 +38,7 @@ final class ModernEncryptedMediaDataSource extends MediaDataSource {
@Override
public int readAt(long position, byte[] bytes, int offset, int length) throws IOException {
try (InputStream inputStream = ModernDecryptingPartInputStream.createFor(attachmentSecret, random, mediaFile, position)) {
try (InputStream inputStream = createInputStream(position)) {
int totalRead = 0;
while (length > 0) {
@ -68,4 +69,12 @@ final class ModernEncryptedMediaDataSource extends MediaDataSource {
@Override
public void close() {
}
private InputStream createInputStream(long position) throws IOException {
if (random == null) {
return ModernDecryptingPartInputStream.createFor(attachmentSecret, mediaFile, position);
} else {
return ModernDecryptingPartInputStream.createFor(attachmentSecret, random, mediaFile, position);
}
}
}

View File

@ -0,0 +1,46 @@
package org.thoughtcrime.securesms.video;
import android.content.res.Resources;
import android.media.MediaFormat;
import android.util.DisplayMetrics;
import android.util.Size;
import androidx.annotation.RequiresApi;
import org.thoughtcrime.securesms.util.MediaUtil;
public final class VideoUtil {
public static final int AUDIO_BIT_RATE = 192_000;
public static final int VIDEO_FRAME_RATE = 30;
public static final int VIDEO_BIT_RATE = 2_000_000;
public static final int VIDEO_LONG_WIDTH = 1280;
public static final int VIDEO_SHORT_WIDTH = 720;
public static final int VIDEO_MAX_LENGTH_S = 30;
@RequiresApi(21)
public static final String VIDEO_MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC;
public static final String AUDIO_MIME_TYPE = "audio/mp4a-latm";
public static final String RECORDED_VIDEO_CONTENT_TYPE = MediaUtil.VIDEO_MP4;
private VideoUtil() { }
@RequiresApi(21)
public static Size getVideoRecordingSize() {
return isPortrait(screenSize())
? new Size(VIDEO_SHORT_WIDTH, VIDEO_LONG_WIDTH)
: new Size(VIDEO_LONG_WIDTH, VIDEO_SHORT_WIDTH);
}
@RequiresApi(21)
private static Size screenSize() {
DisplayMetrics metrics = Resources.getSystem().getDisplayMetrics();
return new Size(metrics.widthPixels, metrics.heightPixels);
}
@RequiresApi(21)
private static boolean isPortrait(Size size) {
return size.getWidth() < size.getHeight();
}
}

View File

@ -26,7 +26,8 @@ public class AttachmentDataSource implements DataSource {
public AttachmentDataSource(DefaultDataSource defaultDataSource,
PartDataSource partDataSource,
BlobDataSource blobDataSource) {
BlobDataSource blobDataSource)
{
this.defaultDataSource = defaultDataSource;
this.partDataSource = partDataSource;
this.blobDataSource = blobDataSource;
@ -38,7 +39,7 @@ public class AttachmentDataSource implements DataSource {
@Override
public long open(DataSpec dataSpec) throws IOException {
if (BlobProvider.isAuthority(dataSpec.uri)) dataSource = blobDataSource;
if (BlobProvider.isAuthority(dataSpec.uri)) dataSource = blobDataSource;
else if (PartAuthority.isLocalUri(dataSpec.uri)) dataSource = partDataSource;
else dataSource = defaultDataSource;

View File

@ -9,6 +9,7 @@ import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.FileNotFoundException;
import java.io.IOException;
@ -20,7 +21,7 @@ final class AudioTrackConverter {
private static final String TAG = "media-converter";
private static final boolean VERBOSE = false; // lots of logging
private static final String OUTPUT_AUDIO_MIME_TYPE = "audio/mp4a-latm"; // Advanced Audio Coding
private static final String OUTPUT_AUDIO_MIME_TYPE = VideoUtil.AUDIO_MIME_TYPE; // Advanced Audio Coding
private static final int OUTPUT_AUDIO_AAC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC; //MediaCodecInfo.CodecProfileLevel.AACObjectHE;
private static final int TIMEOUT_USEC = 10000;