Update CameraX to Alpha06 and bring in new View / Module code.

master
Alex Hart 2019-10-18 17:36:00 -03:00 committed by GitHub
parent 46ebff3659
commit c2da4fcd7d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 572 additions and 473 deletions

View File

@ -78,8 +78,8 @@ dependencies {
implementation 'androidx.lifecycle:lifecycle-extensions:2.1.0'
implementation 'androidx.lifecycle:lifecycle-viewmodel-savedstate:1.0.0-alpha05'
implementation 'androidx.lifecycle:lifecycle-common-java8:2.1.0'
implementation "androidx.camera:camera-core:1.0.0-alpha04"
implementation "androidx.camera:camera-camera2:1.0.0-alpha04"
implementation "androidx.camera:camera-core:1.0.0-alpha06"
implementation "androidx.camera:camera-camera2:1.0.0-alpha06"
implementation('com.google.firebase:firebase-messaging:17.3.4') {
exclude group: 'com.google.firebase', module: 'firebase-core'

View File

@ -340,4 +340,27 @@
<attr name="recordSize" format="dimension" />
</declare-styleable>
<declare-styleable name="CameraXView">
<attr format="enum" name="scaleType">
<enum name="centerCrop" value="0"/>
<enum name="centerInside" value="1"/>
</attr>
<attr format="enum" name="lensFacing">
<enum name="none" value="0"/>
<enum name="front" value="1"/>
<enum name="back" value="2"/>
</attr>
<attr format="enum" name="captureMode">
<enum name="image" value="0"/>
<enum name="video" value="1"/>
<enum name="mixed" value="2"/>
</attr>
<attr format="enum" name="flash">
<enum name="auto" value="1"/>
<enum name="on" value="2"/>
<enum name="off" value="4"/>
</attr>
<attr format="boolean" name="pinchToZoomEnabled"/>
</declare-styleable>
</resources>

View File

@ -1,9 +1,7 @@
package org.thoughtcrime.securesms.mediasend;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.os.Bundle;
import android.view.GestureDetector;
@ -26,11 +24,13 @@ import androidx.annotation.RequiresApi;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.core.content.ContextCompat;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.ViewModelProviders;
import com.bumptech.glide.Glide;
import com.bumptech.glide.util.Executors;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.components.TooltipPopup;
@ -325,7 +325,7 @@ public class CameraXFragment extends Fragment implements CameraFragment {
selfieFlash
);
camera.takePicture(new ImageCapture.OnImageCapturedListener() {
camera.takePicture(Executors.mainThreadExecutor(), new ImageCapture.OnImageCapturedListener() {
@Override
public void onCaptureSuccess(ImageProxy image, int rotationDegrees) {
flashHelper.endFlash();
@ -352,7 +352,7 @@ public class CameraXFragment extends Fragment implements CameraFragment {
}
@Override
public void onError(ImageCapture.UseCaseError useCaseError, String message, @Nullable Throwable cause) {
public void onError(ImageCapture.ImageCaptureError useCaseError, String message, @Nullable Throwable cause) {
flashHelper.endFlash();
controller.onCameraError();
}

View File

@ -16,6 +16,8 @@ import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.fragment.app.Fragment;
import com.bumptech.glide.util.Executors;
import org.thoughtcrime.securesms.R;
import org.thoughtcrime.securesms.animation.AnimationCompleteListener;
import org.thoughtcrime.securesms.components.TooltipPopup;
@ -119,7 +121,7 @@ class CameraXVideoCaptureHelper implements CameraButtonView.VideoCaptureListener
this.camera.setZoomLevel(0f);
callback.onVideoRecordStarted();
shrinkCaptureArea();
camera.startRecording(memoryFileDescriptor.getFileDescriptor(), videoSavedListener);
camera.startRecording(memoryFileDescriptor.getFileDescriptor(), Executors.mainThreadExecutor(), videoSavedListener);
updateProgressAnimator.start();
}

View File

@ -1,5 +1,3 @@
package org.thoughtcrime.securesms.mediasend.camerax;
/*
* Copyright (C) 2019 The Android Open Source Project
*
@ -16,6 +14,8 @@ package org.thoughtcrime.securesms.mediasend.camerax;
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.Manifest.permission;
import android.annotation.SuppressLint;
import android.content.Context;
@ -25,23 +25,24 @@ import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.os.Build;
import android.os.Looper;
import android.util.Log;
import android.util.Rational;
import android.util.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RequiresPermission;
import androidx.annotation.UiThread;
import androidx.camera.core.AspectRatio;
import androidx.camera.core.CameraInfo;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraOrientationUtil;
import androidx.camera.core.CameraX;
import androidx.camera.core.CameraX.LensFacing;
import androidx.camera.core.FlashMode;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCapture.OnImageCapturedListener;
import androidx.camera.core.ImageCapture.OnImageSavedListener;
import androidx.camera.core.ImageCaptureConfig;
import androidx.camera.core.Preview;
import androidx.camera.core.PreviewConfig;
@ -49,12 +50,10 @@ import androidx.camera.core.VideoCaptureConfig;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleObserver;
import androidx.lifecycle.LifecycleOwner;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.OnLifecycleEvent;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.mediasend.camerax.CameraXView.CaptureMode;
import org.thoughtcrime.securesms.mms.MediaConstraints;
import org.thoughtcrime.securesms.util.FeatureFlags;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.File;
@ -62,11 +61,11 @@ import java.io.FileDescriptor;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
/** CameraX use case operation built on @{link androidx.camera.core}. */
@RequiresApi(21)
@SuppressLint("RestrictedApi")
final class CameraXModule {
public static final String TAG = "CameraXModule";
@ -82,9 +81,9 @@ final class CameraXModule {
private final PreviewConfig.Builder mPreviewConfigBuilder;
private final VideoCaptureConfig.Builder mVideoCaptureConfigBuilder;
private final ImageCaptureConfig.Builder mImageCaptureConfigBuilder;
private final CameraXView mCameraXView;
private final CameraXView mCameraView;
final AtomicBoolean mVideoIsRecording = new AtomicBoolean(false);
private CaptureMode mCaptureMode = CaptureMode.IMAGE;
private CameraXView.CaptureMode mCaptureMode = CameraXView.CaptureMode.IMAGE;
private long mMaxVideoDuration = CameraXView.INDEFINITE_VIDEO_DURATION;
private long mMaxVideoSize = CameraXView.INDEFINITE_VIDEO_SIZE;
private FlashMode mFlash = FlashMode.OFF;
@ -112,10 +111,10 @@ final class CameraXModule {
@Nullable
private Rect mCropRegion;
@Nullable
private CameraX.LensFacing mCameraLensFacing = LensFacing.BACK;
private CameraX.LensFacing mCameraLensFacing = CameraX.LensFacing.BACK;
CameraXModule(CameraXView view) {
this.mCameraXView = view;
this.mCameraView = view;
mCameraManager = (CameraManager) view.getContext().getSystemService(Context.CAMERA_SERVICE);
@ -126,11 +125,10 @@ final class CameraXModule {
// Begin Signal Custom Code Block
mVideoCaptureConfigBuilder =
new VideoCaptureConfig.Builder()
.setAudioBitRate(VideoUtil.AUDIO_BIT_RATE)
.setVideoFrameRate(VideoUtil.VIDEO_FRAME_RATE)
.setBitRate(VideoUtil.VIDEO_BIT_RATE)
.setTargetName("VideoCapture");
new VideoCaptureConfig.Builder().setTargetName("VideoCapture")
.setAudioBitRate(VideoUtil.AUDIO_BIT_RATE)
.setVideoFrameRate(VideoUtil.VIDEO_FRAME_RATE)
.setBitRate(VideoUtil.VIDEO_BIT_RATE);
// End Signal Custom Code Block
}
@ -192,8 +190,7 @@ final class CameraXModule {
final int cameraOrientation;
try {
String cameraId;
Set<LensFacing> available = getAvailableCameraLensFacing();
Set<CameraX.LensFacing> available = getAvailableCameraLensFacing();
if (available.isEmpty()) {
Log.w(TAG, "Unable to bindToLifeCycle since no cameras available");
@ -217,41 +214,32 @@ final class CameraXModule {
if (mCameraLensFacing == null) {
return;
}
cameraId = CameraX.getCameraWithLensFacing(mCameraLensFacing);
if (cameraId == null) {
return;
}
CameraInfo cameraInfo = CameraX.getCameraInfo(cameraId);
CameraInfo cameraInfo = CameraX.getCameraInfo(getLensFacing());
cameraOrientation = cameraInfo.getSensorRotationDegrees();
} catch (CameraInfoUnavailableException e) {
throw new IllegalStateException("Unable to get Camera Info.", e);
} catch (Exception e) {
throw new IllegalStateException("Unable to bind to lifecycle.", e);
}
// Set the preferred aspect ratio as 4:3 if it is IMAGE only mode. Set the preferred aspect
// ratio as 16:9 if it is VIDEO or MIXED mode. Then, it will be WYSIWYG when the view finder
// is
// in CENTER_INSIDE mode.
// is in CENTER_INSIDE mode.
boolean isDisplayPortrait = getDisplayRotationDegrees() == 0
|| getDisplayRotationDegrees() == 180;
if (getCaptureMode() == CaptureMode.IMAGE) {
mImageCaptureConfigBuilder.setTargetAspectRatio(
isDisplayPortrait ? ASPECT_RATIO_3_4 : ASPECT_RATIO_4_3);
mPreviewConfigBuilder.setTargetAspectRatio(
isDisplayPortrait ? ASPECT_RATIO_3_4 : ASPECT_RATIO_4_3);
Rational targetAspectRatio;
if (getCaptureMode() == CameraXView.CaptureMode.IMAGE) {
mImageCaptureConfigBuilder.setTargetAspectRatio(AspectRatio.RATIO_4_3);
targetAspectRatio = isDisplayPortrait ? ASPECT_RATIO_3_4 : ASPECT_RATIO_4_3;
} else {
mImageCaptureConfigBuilder.setTargetAspectRatio(
isDisplayPortrait ? ASPECT_RATIO_9_16 : ASPECT_RATIO_16_9);
mPreviewConfigBuilder.setTargetAspectRatio(
isDisplayPortrait ? ASPECT_RATIO_9_16 : ASPECT_RATIO_16_9);
mImageCaptureConfigBuilder.setTargetAspectRatio(AspectRatio.RATIO_16_9);
targetAspectRatio = isDisplayPortrait ? ASPECT_RATIO_9_16 : ASPECT_RATIO_16_9;
}
mImageCaptureConfigBuilder.setTargetRotation(getDisplaySurfaceRotation());
mImageCaptureConfigBuilder.setLensFacing(mCameraLensFacing);
mImageCaptureConfigBuilder.setCaptureMode(CameraXUtil.getOptimalCaptureMode());
mImageCaptureConfigBuilder.setTargetResolution(new Size(1920, 1920));
mImageCapture = new ImageCapture(mImageCaptureConfigBuilder.build());
// Begin Signal Custom Code Block
@ -267,25 +255,17 @@ final class CameraXModule {
if (MediaConstraints.isVideoTranscodeAvailable()) {
mVideoCapture = new VideoCapture(mVideoCaptureConfigBuilder.build());
}
// End Signal Custom Code Block
mPreviewConfigBuilder.setLensFacing(mCameraLensFacing);
int relativeCameraOrientation = getRelativeCameraOrientation(false);
if (relativeCameraOrientation == 90 || relativeCameraOrientation == 270) {
mPreviewConfigBuilder.setTargetResolution(
new Size(getMeasuredHeight(), getMeasuredWidth()));
} else {
mPreviewConfigBuilder.setTargetResolution(
new Size(getMeasuredWidth(), getMeasuredHeight()));
}
// Adjusts the preview resolution according to the view size and the target aspect ratio.
int height = (int) (getMeasuredWidth() / targetAspectRatio.floatValue());
mPreviewConfigBuilder.setTargetResolution(new Size(getMeasuredWidth(), height));
mPreview = new Preview(mPreviewConfigBuilder.build());
mPreview.setOnPreviewOutputUpdateListener(
new Preview.OnPreviewOutputUpdateListener() {
@Override
public void onUpdated(Preview.PreviewOutput output) {
public void onUpdated(@NonNull Preview.PreviewOutput output) {
boolean needReverse = cameraOrientation != 0 && cameraOrientation != 180;
int textureWidth =
needReverse
@ -301,9 +281,9 @@ final class CameraXModule {
}
});
if (getCaptureMode() == CaptureMode.IMAGE) {
if (getCaptureMode() == CameraXView.CaptureMode.IMAGE) {
CameraX.bindToLifecycle(mCurrentLifecycle, mImageCapture, mPreview);
} else if (getCaptureMode() == CaptureMode.VIDEO) {
} else if (getCaptureMode() == CameraXView.CaptureMode.VIDEO) {
CameraX.bindToLifecycle(mCurrentLifecycle, mVideoCapture, mPreview);
} else {
CameraX.bindToLifecycle(mCurrentLifecycle, mImageCapture, mVideoCapture, mPreview);
@ -324,18 +304,12 @@ final class CameraXModule {
"Explicit open/close of camera not yet supported. Use bindtoLifecycle() instead.");
}
public void stopPreview() {
if (mPreview != null) {
mPreview.clear();
}
}
public void takePicture(OnImageCapturedListener listener) {
public void takePicture(Executor executor, ImageCapture.OnImageCapturedListener listener) {
if (mImageCapture == null) {
return;
}
if (getCaptureMode() == CaptureMode.VIDEO) {
if (getCaptureMode() == CameraXView.CaptureMode.VIDEO) {
throw new IllegalStateException("Can not take picture under VIDEO capture mode.");
}
@ -343,15 +317,15 @@ final class CameraXModule {
throw new IllegalArgumentException("OnImageCapturedListener should not be empty");
}
mImageCapture.takePicture(listener);
mImageCapture.takePicture(executor, listener);
}
public void takePicture(File saveLocation, OnImageSavedListener listener) {
public void takePicture(File saveLocation, Executor executor, ImageCapture.OnImageSavedListener listener) {
if (mImageCapture == null) {
return;
}
if (getCaptureMode() == CaptureMode.VIDEO) {
if (getCaptureMode() == CameraXView.CaptureMode.VIDEO) {
throw new IllegalStateException("Can not take picture under VIDEO capture mode.");
}
@ -360,19 +334,19 @@ final class CameraXModule {
}
ImageCapture.Metadata metadata = new ImageCapture.Metadata();
metadata.isReversedHorizontal = mCameraLensFacing == LensFacing.FRONT;
mImageCapture.takePicture(saveLocation, listener, metadata);
metadata.isReversedHorizontal = mCameraLensFacing == CameraX.LensFacing.FRONT;
mImageCapture.takePicture(saveLocation, metadata, executor, listener);
}
// Begin Signal Custom Code Block
@RequiresApi(26)
public void startRecording(FileDescriptor file, Executor executor, final VideoCapture.OnVideoSavedListener listener) {
// End Signal Custom Code Block
public void startRecording(FileDescriptor file, final VideoCapture.OnVideoSavedListener listener) {
if (mVideoCapture == null) {
return;
}
if (getCaptureMode() == CaptureMode.IMAGE) {
if (getCaptureMode() == CameraXView.CaptureMode.IMAGE) {
throw new IllegalStateException("Can not record video under IMAGE capture mode.");
}
@ -383,25 +357,24 @@ final class CameraXModule {
mVideoIsRecording.set(true);
mVideoCapture.startRecording(
file,
executor,
new VideoCapture.OnVideoSavedListener() {
@Override
// Begin Signal Custom Code Block
public void onVideoSaved(FileDescriptor savedFileDescriptor) {
// Begin Signal Custom Code block
public void onVideoSaved(@NonNull FileDescriptor savedFile) {
// End Signal Custom Code Block
mVideoIsRecording.set(false);
// Begin Signal Custom Code Block
listener.onVideoSaved(savedFileDescriptor);
// End Signal Custom Code Block
listener.onVideoSaved(savedFile);
}
@Override
public void onError(
VideoCapture.VideoCaptureError useCaseError,
String message,
@NonNull VideoCapture.VideoCaptureError videoCaptureError,
@NonNull String message,
@Nullable Throwable cause) {
mVideoIsRecording.set(false);
Log.e(TAG, message, cause);
listener.onError(useCaseError, message, cause);
listener.onError(videoCaptureError, message, cause);
}
});
}
@ -423,7 +396,7 @@ final class CameraXModule {
// TODO(b/124269166): Rethink how we can handle permissions here.
@SuppressLint("MissingPermission")
public void setCameraLensFacing(@Nullable LensFacing lensFacing) {
public void setCameraLensFacing(@Nullable CameraX.LensFacing lensFacing) {
// Setting same lens facing is a no-op, so check for that first
if (mCameraLensFacing != lensFacing) {
// If we're not bound to a lifecycle, just update the camera that will be opened when we
@ -438,7 +411,7 @@ final class CameraXModule {
}
@RequiresPermission(permission.CAMERA)
public boolean hasCameraWithLensFacing(LensFacing lensFacing) {
public boolean hasCameraWithLensFacing(CameraX.LensFacing lensFacing) {
String cameraId;
try {
cameraId = CameraX.getCameraWithLensFacing(lensFacing);
@ -450,14 +423,14 @@ final class CameraXModule {
}
@Nullable
public LensFacing getLensFacing() {
public CameraX.LensFacing getLensFacing() {
return mCameraLensFacing;
}
public void toggleCamera() {
// TODO(b/124269166): Rethink how we can handle permissions here.
@SuppressLint("MissingPermission")
Set<LensFacing> availableCameraLensFacing = getAvailableCameraLensFacing();
Set<CameraX.LensFacing> availableCameraLensFacing = getAvailableCameraLensFacing();
if (availableCameraLensFacing.isEmpty()) {
return;
@ -468,44 +441,19 @@ final class CameraXModule {
return;
}
if (mCameraLensFacing == LensFacing.BACK
&& availableCameraLensFacing.contains(LensFacing.FRONT)) {
setCameraLensFacing(LensFacing.FRONT);
if (mCameraLensFacing == CameraX.LensFacing.BACK
&& availableCameraLensFacing.contains(CameraX.LensFacing.FRONT)) {
setCameraLensFacing(CameraX.LensFacing.FRONT);
return;
}
if (mCameraLensFacing == LensFacing.FRONT
&& availableCameraLensFacing.contains(LensFacing.BACK)) {
setCameraLensFacing(LensFacing.BACK);
if (mCameraLensFacing == CameraX.LensFacing.FRONT
&& availableCameraLensFacing.contains(CameraX.LensFacing.BACK)) {
setCameraLensFacing(CameraX.LensFacing.BACK);
return;
}
}
public void focus(Rect focus, Rect metering) {
if (mPreview == null) {
// Nothing to focus on since we don't yet have a preview
return;
}
Rect rescaledFocus;
Rect rescaledMetering;
try {
Rect sensorRegion;
if (mCropRegion != null) {
sensorRegion = mCropRegion;
} else {
sensorRegion = getSensorSize(getActiveCamera());
}
rescaledFocus = rescaleViewRectToSensorRect(focus, sensorRegion);
rescaledMetering = rescaleViewRectToSensorRect(metering, sensorRegion);
} catch (Exception e) {
Log.e(TAG, "Failed to rescale the focus and metering rectangles.", e);
return;
}
mPreview.focus(rescaledFocus, rescaledMetering);
}
public float getZoomLevel() {
return mZoomLevel;
}
@ -604,17 +552,17 @@ final class CameraXModule {
}
int getRelativeCameraOrientation(boolean compensateForMirroring) {
int rotationDegrees;
int rotationDegrees = 0;
try {
String cameraId = CameraX.getCameraWithLensFacing(getLensFacing());
CameraInfo cameraInfo = CameraX.getCameraInfo(cameraId);
CameraInfo cameraInfo = CameraX.getCameraInfo(getLensFacing());
rotationDegrees = cameraInfo.getSensorRotationDegrees(getDisplaySurfaceRotation());
if (compensateForMirroring) {
rotationDegrees = (360 - rotationDegrees) % 360;
}
} catch (CameraInfoUnavailableException e) {
Log.e(TAG, "Failed to get CameraInfo", e);
} catch (Exception e) {
Log.e(TAG, "Failed to query camera", e);
rotationDegrees = 0;
}
return rotationDegrees;
@ -678,30 +626,28 @@ final class CameraXModule {
// Update view related information used in use cases
private void updateViewInfo() {
if (mImageCapture != null) {
mImageCapture.setTargetAspectRatio(new Rational(getWidth(), getHeight()));
mImageCapture.setTargetAspectRatioCustom(new Rational(getWidth(), getHeight()));
mImageCapture.setTargetRotation(getDisplaySurfaceRotation());
}
// Begin Signal Custom Code Block
if (mImageCapture != null && MediaConstraints.isVideoTranscodeAvailable()) {
// End Signal Custom Code Block
if (mVideoCapture != null && MediaConstraints.isVideoTranscodeAvailable()) {
mVideoCapture.setTargetRotation(getDisplaySurfaceRotation());
}
}
@RequiresPermission(permission.CAMERA)
private Set<LensFacing> getAvailableCameraLensFacing() {
private Set<CameraX.LensFacing> getAvailableCameraLensFacing() {
// Start with all camera directions
Set<LensFacing> available = new LinkedHashSet<>(Arrays.asList(LensFacing.values()));
Set<CameraX.LensFacing> available = new LinkedHashSet<>(Arrays.asList(CameraX.LensFacing.values()));
// If we're bound to a lifecycle, remove unavailable cameras
if (mCurrentLifecycle != null) {
if (!hasCameraWithLensFacing(LensFacing.BACK)) {
available.remove(LensFacing.BACK);
if (!hasCameraWithLensFacing(CameraX.LensFacing.BACK)) {
available.remove(CameraX.LensFacing.BACK);
}
if (!hasCameraWithLensFacing(LensFacing.FRONT)) {
available.remove(LensFacing.FRONT);
if (!hasCameraWithLensFacing(CameraX.LensFacing.FRONT)) {
available.remove(CameraX.LensFacing.FRONT);
}
}
@ -723,16 +669,6 @@ final class CameraXModule {
mImageCapture.setFlashMode(flash);
}
public boolean hasFlash() {
try {
Boolean flashInfoAvailable = mCameraManager.getCameraCharacteristics(getActiveCamera())
.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
return flashInfoAvailable == Boolean.TRUE;
} catch (CameraInfoUnavailableException | CameraAccessException e) {
return false;
}
}
public void enableTorch(boolean torch) {
if (mPreview == null) {
return;
@ -748,48 +684,59 @@ final class CameraXModule {
}
public Context getContext() {
return mCameraXView.getContext();
return mCameraView.getContext();
}
public int getWidth() {
return mCameraXView.getWidth();
return mCameraView.getWidth();
}
public int getHeight() {
return mCameraXView.getHeight();
return mCameraView.getHeight();
}
public int getDisplayRotationDegrees() {
return CameraOrientationUtil.surfaceRotationToDegrees(getDisplaySurfaceRotation());
}
// Begin Signal Custom Code Block
public boolean hasFlash() {
try {
LiveData<Boolean> isFlashAvailable = CameraX.getCameraInfo(getLensFacing()).isFlashAvailable();
return isFlashAvailable.getValue() == Boolean.TRUE;
} catch (CameraInfoUnavailableException e) {
return false;
}
}
// End Signal Custom Code Block
protected int getDisplaySurfaceRotation() {
return mCameraXView.getDisplaySurfaceRotation();
return mCameraView.getDisplaySurfaceRotation();
}
public void setSurfaceTexture(SurfaceTexture st) {
mCameraXView.setSurfaceTexture(st);
mCameraView.setSurfaceTexture(st);
}
private int getPreviewWidth() {
return mCameraXView.getPreviewWidth();
return mCameraView.getPreviewWidth();
}
private int getPreviewHeight() {
return mCameraXView.getPreviewHeight();
return mCameraView.getPreviewHeight();
}
private int getMeasuredWidth() {
return mCameraXView.getMeasuredWidth();
return mCameraView.getMeasuredWidth();
}
private int getMeasuredHeight() {
return mCameraXView.getMeasuredHeight();
return mCameraView.getMeasuredHeight();
}
void setTransform(final Matrix matrix) {
if (Looper.myLooper() != Looper.getMainLooper()) {
mCameraXView.post(
mCameraView.post(
new Runnable() {
@Override
public void run() {
@ -797,7 +744,7 @@ final class CameraXModule {
}
});
} else {
mCameraXView.setTransform(matrix);
mCameraView.setTransform(matrix);
}
}
@ -810,14 +757,14 @@ final class CameraXModule {
* @param height height of camera source buffers.
*/
void onPreviewSourceDimensUpdated(int width, int height) {
mCameraXView.onPreviewSourceDimensUpdated(width, height);
mCameraView.onPreviewSourceDimensUpdated(width, height);
}
public CaptureMode getCaptureMode() {
public CameraXView.CaptureMode getCaptureMode() {
return mCaptureMode;
}
public void setCaptureMode(CaptureMode captureMode) {
public void setCaptureMode(CameraXView.CaptureMode captureMode) {
this.mCaptureMode = captureMode;
rebindToLifecycle();
}

View File

@ -1,5 +1,3 @@
package org.thoughtcrime.securesms.mediasend.camerax;
/*
* Copyright (C) 2019 The Android Open Source Project
*
@ -16,9 +14,12 @@ package org.thoughtcrime.securesms.mediasend.camerax;
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.Manifest.permission;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
@ -31,6 +32,7 @@ import android.os.Looper;
import android.os.Parcelable;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Size;
import android.view.Display;
import android.view.MotionEvent;
@ -40,31 +42,35 @@ import android.view.TextureView;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.animation.BaseInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RequiresPermission;
import androidx.annotation.RestrictTo;
import androidx.annotation.RestrictTo.Scope;
import androidx.annotation.UiThread;
import androidx.camera.core.CameraX.LensFacing;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraX;
import androidx.camera.core.FlashMode;
import androidx.camera.core.ImageCapture.OnImageCapturedListener;
import androidx.camera.core.ImageCapture.OnImageSavedListener;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.FocusMeteringAction;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.MeteringPoint;
import androidx.lifecycle.LifecycleOwner;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.R;
import java.io.File;
import java.io.FileDescriptor;
import java.util.concurrent.Executor;
/**
* A {@link View} that displays a preview of the camera with methods {@link
* #takePicture(OnImageCapturedListener)}, {@link #takePicture(File, OnImageSavedListener)}, {@link
* #startRecording(File, OnVideoSavedListener)} and {@link #stopRecording()}.
* #takePicture(Executor, OnImageCapturedListener)},
* {@link #takePicture(File, Executor, OnImageSavedListener)},
* {@link #startRecording(File, Executor, OnVideoSavedListener)} and {@link #stopRecording()}.
*
* <p>Because the Camera is a limited resource and consumes a high amount of power, CameraView must
* be opened/closed. CameraView will handle opening/closing automatically through use of a {@link
@ -88,8 +94,12 @@ public final class CameraXView extends ViewGroup {
private static final String EXTRA_CAMERA_DIRECTION = "camera_direction";
private static final String EXTRA_CAPTURE_MODE = "captureMode";
private final Rect mFocusingRect = new Rect();
private final Rect mMeteringRect = new Rect();
private static final int LENS_FACING_NONE = 0;
private static final int LENS_FACING_FRONT = 1;
private static final int LENS_FACING_BACK = 2;
private static final int FLASH_MODE_AUTO = 1;
private static final int FLASH_MODE_ON = 2;
private static final int FLASH_MODE_OFF = 4;
// For tap-to-focus
private long mDownEventTimestamp;
// For pinch-to-zoom
@ -116,8 +126,7 @@ public final class CameraXView extends ViewGroup {
private ScaleType mScaleType = ScaleType.CENTER_CROP;
// For accessibility event
private MotionEvent mUpEvent;
private @Nullable
Paint mLayerPaint;
private @Nullable Paint mLayerPaint;
public CameraXView(Context context) {
this(context, null);
@ -188,11 +197,52 @@ public final class CameraXView extends ViewGroup {
onPreviewSourceDimensUpdated(640, 480);
}
setScaleType(ScaleType.CENTER_CROP);
setPinchToZoomEnabled(true);
setCaptureMode(CaptureMode.IMAGE);
setCameraLensFacing(LensFacing.FRONT);
setFlash(FlashMode.OFF);
if (attrs != null) {
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CameraView);
setScaleType(
ScaleType.fromId(
a.getInteger(R.styleable.CameraXView_scaleType,
getScaleType().getId())));
setPinchToZoomEnabled(
a.getBoolean(
R.styleable.CameraXView_pinchToZoomEnabled, isPinchToZoomEnabled()));
setCaptureMode(
CaptureMode.fromId(
a.getInteger(R.styleable.CameraXView_captureMode,
getCaptureMode().getId())));
int lensFacing = a.getInt(R.styleable.CameraXView_lensFacing, LENS_FACING_BACK);
switch (lensFacing) {
case LENS_FACING_NONE:
setCameraLensFacing(null);
break;
case LENS_FACING_FRONT:
setCameraLensFacing(CameraX.LensFacing.FRONT);
break;
case LENS_FACING_BACK:
setCameraLensFacing(CameraX.LensFacing.BACK);
break;
default:
// Unhandled event.
}
int flashMode = a.getInt(R.styleable.CameraXView_flash, 0);
switch (flashMode) {
case FLASH_MODE_AUTO:
setFlash(FlashMode.AUTO);
break;
case FLASH_MODE_ON:
setFlash(FlashMode.ON);
break;
case FLASH_MODE_OFF:
setFlash(FlashMode.OFF);
break;
default:
// Unhandled event.
}
a.recycle();
}
if (getBackground() == null) {
setBackgroundColor(0xFF111111);
@ -245,7 +295,7 @@ public final class CameraXView extends ViewGroup {
setCameraLensFacing(
TextUtils.isEmpty(lensFacingString)
? null
: LensFacing.valueOf(lensFacingString));
: CameraX.LensFacing.valueOf(lensFacingString));
setCaptureMode(CaptureMode.fromId(state.getInt(EXTRA_CAPTURE_MODE)));
} else {
super.onRestoreInstanceState(savedState);
@ -578,33 +628,42 @@ public final class CameraXView extends ViewGroup {
* Takes a picture, and calls {@link OnImageCapturedListener#onCaptureSuccess(ImageProxy, int)}
* once when done.
*
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener which will receive success or failure callbacks.
*/
public void takePicture(OnImageCapturedListener listener) {
mCameraModule.takePicture(listener);
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
public void takePicture(@NonNull Executor executor, @NonNull ImageCapture.OnImageCapturedListener listener) {
mCameraModule.takePicture(executor, listener);
}
/**
* Takes a picture and calls {@link OnImageSavedListener#onImageSaved(File)} when done.
*
* @param file The destination.
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener which will receive success or failure callbacks.
*/
public void takePicture(File file, OnImageSavedListener listener) {
mCameraModule.takePicture(file, listener);
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
public void takePicture(@NonNull File file, @NonNull Executor executor,
@NonNull ImageCapture.OnImageSavedListener listener) {
mCameraModule.takePicture(file, executor, listener);
}
// Begin Signal Custom Code Block
/**
* Takes a video and calls the OnVideoSavedListener when done.
*
* @param fileDescriptor The destination.
* @param file The destination.
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener which will receive success or failure callbacks.
*/
// Begin Signal Custom Code Block
@RequiresApi(26)
public void startRecording(FileDescriptor fileDescriptor, VideoCapture.OnVideoSavedListener listener) {
mCameraModule.startRecording(fileDescriptor, listener);
}
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
public void startRecording(@NonNull FileDescriptor file, @NonNull Executor executor,
// End Signal Custom Code Block
@NonNull VideoCapture.OnVideoSavedListener listener) {
mCameraModule.startRecording(file, executor, listener);
}
/** Stops an in progress video. */
// Begin Signal Custom Code Block
@ -626,7 +685,7 @@ public final class CameraXView extends ViewGroup {
* @throws IllegalStateException if the CAMERA permission is not currently granted.
*/
@RequiresPermission(permission.CAMERA)
public boolean hasCameraWithLensFacing(LensFacing lensFacing) {
public boolean hasCameraWithLensFacing(CameraX.LensFacing lensFacing) {
return mCameraModule.hasCameraWithLensFacing(lensFacing);
}
@ -655,29 +714,21 @@ public final class CameraXView extends ViewGroup {
*
* @param lensFacing The desired camera lensFacing.
*/
public void setCameraLensFacing(@Nullable LensFacing lensFacing) {
public void setCameraLensFacing(@Nullable CameraX.LensFacing lensFacing) {
mCameraModule.setCameraLensFacing(lensFacing);
}
/** Returns the currently selected {@link LensFacing}. */
@Nullable
public LensFacing getCameraLensFacing() {
public CameraX.LensFacing getCameraLensFacing() {
return mCameraModule.getLensFacing();
}
/**
* Focuses the camera on the given area.
*
* <p>Sets the focus and exposure metering rectangles. Coordinates for both X and Y dimensions
* are Limited from -1000 to 1000, where (0, 0) is the center of the image and the width/height
* represent the values from -1000 to 1000.
*
* @param focus Area used to focus the camera.
* @param metering Area used for exposure metering.
*/
public void focus(Rect focus, Rect metering) {
mCameraModule.focus(focus, metering);
// Begin Signal Custom Code Block
public boolean hasFlash() {
return mCameraModule.hasFlash();
}
// End Signal Custom Code Block
/** Gets the active flash strategy. */
public FlashMode getFlash() {
@ -685,14 +736,10 @@ public final class CameraXView extends ViewGroup {
}
/** Sets the active flash strategy. */
public void setFlash(FlashMode flashMode) {
public void setFlash(@NonNull FlashMode flashMode) {
mCameraModule.setFlash(flashMode);
}
public boolean hasFlash() {
return mCameraModule.hasFlash();
}
private int getRelativeCameraOrientation(boolean compensateForMirroring) {
return mCameraModule.getRelativeCameraOrientation(compensateForMirroring);
}
@ -702,7 +749,7 @@ public final class CameraXView extends ViewGroup {
}
@Override
public boolean onTouchEvent(MotionEvent event) {
public boolean onTouchEvent(@NonNull MotionEvent event) {
// Disable pinch-to-zoom and tap-to-focus while the camera module is paused.
if (mCameraModule.isPaused()) {
return false;
@ -745,10 +792,21 @@ public final class CameraXView extends ViewGroup {
final float x = (mUpEvent != null) ? mUpEvent.getX() : getX() + getWidth() / 2f;
final float y = (mUpEvent != null) ? mUpEvent.getY() : getY() + getHeight() / 2f;
mUpEvent = null;
calculateTapArea(mFocusingRect, x, y, 1f);
calculateTapArea(mMeteringRect, x, y, 1.5f);
if (area(mFocusingRect) > 0 && area(mMeteringRect) > 0) {
focus(mFocusingRect, mMeteringRect);
TextureViewMeteringPointFactory pointFactory = new TextureViewMeteringPointFactory(
mCameraTextureView);
float afPointWidth = 1.0f / 6.0f; // 1/6 total area
float aePointWidth = afPointWidth * 1.5f;
MeteringPoint afPoint = pointFactory.createPoint(x, y, afPointWidth, 1.0f);
MeteringPoint aePoint = pointFactory.createPoint(x, y, aePointWidth, 1.0f);
try {
CameraX.getCameraControl(getCameraLensFacing()).startFocusAndMetering(
FocusMeteringAction.Builder.from(afPoint, FocusMeteringAction.MeteringMode.AF_ONLY)
.addPoint(aePoint, FocusMeteringAction.MeteringMode.AE_ONLY)
.build());
} catch (CameraInfoUnavailableException e) {
Log.d(TAG, "cannot access camera", e);
}
return true;
@ -759,80 +817,6 @@ public final class CameraXView extends ViewGroup {
return rect.width() * rect.height();
}
/** The area must be between -1000,-1000 and 1000,1000 */
private void calculateTapArea(Rect rect, float x, float y, float coefficient) {
int max = 1000;
int min = -1000;
// Default to 300 (1/6th the total area) and scale by the coefficient
int areaSize = (int) (300 * coefficient);
// Rotate the coordinates if the camera orientation is different
int width = getWidth();
int height = getHeight();
// Compensate orientation as it's mirrored on preview for forward facing cameras
boolean compensateForMirroring = (getCameraLensFacing() == LensFacing.FRONT);
int relativeCameraOrientation = getRelativeCameraOrientation(compensateForMirroring);
int temp;
float tempf;
switch (relativeCameraOrientation) {
case 90:
// Fall-through
case 270:
// We're horizontal. Swap width/height. Swap x/y.
temp = width;
//noinspection SuspiciousNameCombination
width = height;
height = temp;
tempf = x;
//noinspection SuspiciousNameCombination
x = y;
y = tempf;
break;
default:
break;
}
switch (relativeCameraOrientation) {
// Map to correct coordinates according to relativeCameraOrientation
case 90:
y = height - y;
break;
case 180:
x = width - x;
y = height - y;
break;
case 270:
x = width - x;
break;
default:
break;
}
// Swap x if it's a mirrored preview
if (compensateForMirroring) {
x = width - x;
}
// Grab the x, y position from within the View and normalize it to -1000 to 1000
x = min + distance(max, min) * (x / width);
y = min + distance(max, min) * (y / height);
// Modify the rect to the bounding area
rect.top = (int) y - areaSize / 2;
rect.left = (int) x - areaSize / 2;
rect.bottom = rect.top + areaSize;
rect.right = rect.left + areaSize;
// Cap at -1000 to 1000
rect.top = rangeLimit(rect.top, max, min);
rect.left = rangeLimit(rect.left, max, min);
rect.bottom = rangeLimit(rect.bottom, max, min);
rect.right = rangeLimit(rect.right, max, min);
}
private int rangeLimit(int val, int max, int min) {
return Math.min(Math.max(val, min), max);
}
@ -975,7 +959,7 @@ public final class CameraXView extends ViewGroup {
* The capture mode used by CameraView.
*
* <p>This enum can be used to determine which capture mode will be enabled for {@link
* CameraXView}.
* CameraView}.
*/
public enum CaptureMode {
/** A mode where image capture is enabled. */
@ -1024,7 +1008,7 @@ public final class CameraXView extends ViewGroup {
private class PinchToZoomGestureDetector extends ScaleGestureDetector
implements ScaleGestureDetector.OnScaleGestureListener {
private static final float SCALE_MULTIPIER = 0.75f;
private final Interpolator mInterpolator = new DecelerateInterpolator(2f);
private final BaseInterpolator mInterpolator = new DecelerateInterpolator(2f);
private float mNormalizedScaleFactor = 0;
PinchToZoomGestureDetector(Context context) {

View File

@ -0,0 +1,93 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.SurfaceTexture;
import android.view.TextureView;
import androidx.annotation.NonNull;
import androidx.camera.core.MeteringPoint;
import androidx.camera.core.MeteringPointFactory;
/**
* A {@link MeteringPointFactory} for creating a {@link MeteringPoint} by {@link TextureView} and
* (x,y).
*
* <p>SurfaceTexture in TextureView could be cropped, scaled or rotated by
* {@link TextureView#getTransform(Matrix)}. This factory translates the (x, y) into the sensor
* crop region normalized (x,y) by this transform. {@link SurfaceTexture#getTransformMatrix} is
* also used during the translation. No lens facing information is required because
* {@link SurfaceTexture#getTransformMatrix} contains the necessary transformation corresponding
* to the lens face of current camera ouput.
*/
public class TextureViewMeteringPointFactory extends MeteringPointFactory {
private final TextureView mTextureView;
public TextureViewMeteringPointFactory(@NonNull TextureView textureView) {
mTextureView = textureView;
}
/**
* Translates a (x,y) from TextureView.
*/
@NonNull
@Override
protected PointF translatePoint(float x, float y) {
Matrix transform = new Matrix();
mTextureView.getTransform(transform);
// applying reverse of TextureView#getTransform
Matrix inverse = new Matrix();
transform.invert(inverse);
float[] pt = new float[]{x, y};
inverse.mapPoints(pt);
// get SurfaceTexture#getTransformMatrix
float[] surfaceTextureMat = new float[16];
mTextureView.getSurfaceTexture().getTransformMatrix(surfaceTextureMat);
// convert SurfaceTexture#getTransformMatrix(4x4 column major 3D matrix) to
// android.graphics.Matrix(3x3 row major 2D matrix)
Matrix surfaceTextureTransform = glMatrixToGraphicsMatrix(surfaceTextureMat);
float[] pt2 = new float[2];
// convert to texture coordinates first.
pt2[0] = pt[0] / mTextureView.getWidth();
pt2[1] = (mTextureView.getHeight() - pt[1]) / mTextureView.getHeight();
surfaceTextureTransform.mapPoints(pt2);
return new PointF(pt2[0], pt2[1]);
}
private Matrix glMatrixToGraphicsMatrix(float[] glMatrix) {
float[] convert = new float[9];
convert[0] = glMatrix[0];
convert[1] = glMatrix[4];
convert[2] = glMatrix[12];
convert[3] = glMatrix[1];
convert[4] = glMatrix[5];
convert[5] = glMatrix[13];
convert[6] = glMatrix[3];
convert[7] = glMatrix[7];
convert[8] = glMatrix[15];
Matrix graphicsMatrix = new Matrix();
graphicsMatrix.setValues(convert);
return graphicsMatrix;
}
}

View File

@ -1,5 +1,3 @@
package org.thoughtcrime.securesms.mediasend.camerax;
/*
* Copyright (C) 2019 The Android Open Source Project
*
@ -16,18 +14,24 @@ package org.thoughtcrime.securesms.mediasend.camerax;
* limitations under the License.
*/
package org.thoughtcrime.securesms.mediasend.camerax;
import android.annotation.SuppressLint;
import android.location.Location;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.CamcorderProfile;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaRecorder;
import android.media.MediaRecorder.AudioSource;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.util.Log;
import android.util.Size;
import android.view.Display;
import android.view.Surface;
@ -37,13 +41,16 @@ import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RestrictTo;
import androidx.camera.core.CameraInfo;
import androidx.annotation.RestrictTo.Scope;
import androidx.camera.core.CameraInfoInternal;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraX;
import androidx.camera.core.CameraX.LensFacing;
import androidx.camera.core.CameraXThreads;
import androidx.camera.core.ConfigProvider;
import androidx.camera.core.DeferrableSurface;
import androidx.camera.core.ImageOutputConfig;
import androidx.camera.core.ImageOutputConfig.RotationValue;
import androidx.camera.core.ImmediateSurface;
import androidx.camera.core.SessionConfig;
import androidx.camera.core.UseCase;
@ -51,7 +58,6 @@ import androidx.camera.core.UseCaseConfig;
import androidx.camera.core.VideoCaptureConfig;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import org.thoughtcrime.securesms.logging.Log;
import org.thoughtcrime.securesms.video.VideoUtil;
import java.io.File;
@ -59,6 +65,8 @@ import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
/**
@ -70,6 +78,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
* @hide In the earlier stage, the VideoCapture is deprioritized.
*/
@RequiresApi(26)
@RestrictTo(Scope.LIBRARY_GROUP)
public class VideoCapture extends UseCase {
/**
@ -77,9 +86,9 @@ public class VideoCapture extends UseCase {
*
* @hide
*/
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public static final VideoCapture.Defaults DEFAULT_CONFIG = new VideoCapture.Defaults();
private static final VideoCapture.Metadata EMPTY_METADATA = new VideoCapture.Metadata();
@RestrictTo(Scope.LIBRARY_GROUP)
public static final Defaults DEFAULT_CONFIG = new Defaults();
private static final Metadata EMPTY_METADATA = new Metadata();
private static final String TAG = "VideoCapture";
/** Amount of time to wait for dequeuing a buffer from the videoEncoder. */
private static final int DEQUE_TIMEOUT_USEC = 10000;
@ -90,10 +99,10 @@ public class VideoCapture extends UseCase {
// End Signal Custom Code Block
/** Camcorder profiles quality list */
private static final int[] CamcorderQuality = {
CamcorderProfile.QUALITY_2160P,
CamcorderProfile.QUALITY_1080P,
CamcorderProfile.QUALITY_720P,
CamcorderProfile.QUALITY_480P
CamcorderProfile.QUALITY_2160P,
CamcorderProfile.QUALITY_1080P,
CamcorderProfile.QUALITY_720P,
CamcorderProfile.QUALITY_480P
};
/**
* Audio encoding
@ -101,28 +110,29 @@ public class VideoCapture extends UseCase {
* <p>the result of PCM_8BIT and PCM_FLOAT are not good. Set PCM_16BIT as the first option.
*/
private static final short[] sAudioEncoding = {
AudioFormat.ENCODING_PCM_16BIT,
AudioFormat.ENCODING_PCM_8BIT,
AudioFormat.ENCODING_PCM_FLOAT
AudioFormat.ENCODING_PCM_16BIT,
AudioFormat.ENCODING_PCM_8BIT,
AudioFormat.ENCODING_PCM_FLOAT
};
private final MediaCodec.BufferInfo mVideoBufferInfo = new MediaCodec.BufferInfo();
private final BufferInfo mVideoBufferInfo = new BufferInfo();
private final Object mMuxerLock = new Object();
/** Thread on which all encoding occurs. */
private final HandlerThread mVideoHandlerThread =
new HandlerThread(CameraXThreads.TAG + "video encoding thread");
new HandlerThread(CameraXThreads.TAG + "video encoding thread");
private final Handler mVideoHandler;
/** Thread on which audio encoding occurs. */
private final HandlerThread mAudioHandlerThread =
new HandlerThread(CameraXThreads.TAG + "audio encoding thread");
new HandlerThread(CameraXThreads.TAG + "audio encoding thread");
private final Handler mAudioHandler;
private final AtomicBoolean mEndOfVideoStreamSignal = new AtomicBoolean(true);
private final AtomicBoolean mEndOfAudioStreamSignal = new AtomicBoolean(true);
private final AtomicBoolean mEndOfAudioVideoSignal = new AtomicBoolean(true);
private final MediaCodec.BufferInfo mAudioBufferInfo = new MediaCodec.BufferInfo();
private final BufferInfo mAudioBufferInfo = new BufferInfo();
/** For record the first sample written time. */
private final AtomicBoolean mIsFirstVideoSampleWrite = new AtomicBoolean(false);
private final AtomicBoolean mIsFirstAudioSampleWrite = new AtomicBoolean(false);
private final VideoCaptureConfig.Builder mUseCaseConfigBuilder;
@NonNull
MediaCodec mVideoEncoder;
@NonNull
@ -138,6 +148,7 @@ public class VideoCapture extends UseCase {
/** Surface the camera writes to, which the videoEncoder uses as input. */
Surface mCameraSurface;
/** audio raw data */
@NonNull
private AudioRecord mAudioRecorder;
private int mAudioBufferSize;
private boolean mIsRecording = false;
@ -167,9 +178,9 @@ public class VideoCapture extends UseCase {
/** Creates a {@link MediaFormat} using parameters from the configuration */
private static MediaFormat createMediaFormat(VideoCaptureConfig config, Size resolution) {
MediaFormat format =
MediaFormat.createVideoFormat(
VIDEO_MIME_TYPE, resolution.getWidth(), resolution.getHeight());
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
MediaFormat.createVideoFormat(
VIDEO_MIME_TYPE, resolution.getWidth(), resolution.getHeight());
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, config.getBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, config.getVideoFrameRate());
// Begin Signal Custom Code Block
@ -188,10 +199,10 @@ public class VideoCapture extends UseCase {
*/
@Override
@Nullable
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
protected UseCaseConfig.Builder<?, ?, ?> getDefaultBuilder(CameraX.LensFacing lensFacing) {
@RestrictTo(Scope.LIBRARY_GROUP)
protected UseCaseConfig.Builder<?, ?, ?> getDefaultBuilder(LensFacing lensFacing) {
VideoCaptureConfig defaults = CameraX.getDefaultUseCaseConfig(
VideoCaptureConfig.class, lensFacing);
VideoCaptureConfig.class, lensFacing);
if (defaults != null) {
return VideoCaptureConfig.Builder.fromConfig(defaults);
}
@ -205,9 +216,9 @@ public class VideoCapture extends UseCase {
* @hide
*/
@Override
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
@RestrictTo(Scope.LIBRARY_GROUP)
protected Map<String, Size> onSuggestedResolutionUpdated(
Map<String, Size> suggestedResolutionMap) {
Map<String, Size> suggestedResolutionMap) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
if (mCameraSurface != null) {
mVideoEncoder.stop();
@ -228,7 +239,7 @@ public class VideoCapture extends UseCase {
Size resolution = suggestedResolutionMap.get(cameraId);
if (resolution == null) {
throw new IllegalArgumentException(
"Suggested resolution map missing resolution for camera " + cameraId);
"Suggested resolution map missing resolution for camera " + cameraId);
}
setupEncoder(resolution);
@ -240,17 +251,20 @@ public class VideoCapture extends UseCase {
* called.
*
* <p>StartRecording() is asynchronous. User needs to check if any error occurs by setting the
* {@link VideoCapture.OnVideoSavedListener#onError(VideoCapture.VideoCaptureError, String, Throwable)}.
* {@link OnVideoSavedListener#onError(VideoCaptureError, String, Throwable)}.
*
* @param saveLocation Location to save the video capture
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener to call for the recorded video
*/
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
// Begin Signal Custom Code Block
public void startRecording(FileDescriptor saveLocation, VideoCapture.OnVideoSavedListener listener) {
public void startRecording(@NonNull FileDescriptor saveLocation,
@NonNull Executor executor, @NonNull OnVideoSavedListener listener) {
// End Signal Custom Code Block
mIsFirstVideoSampleWrite.set(false);
mIsFirstAudioSampleWrite.set(false);
startRecording(saveLocation, listener, EMPTY_METADATA);
startRecording(saveLocation, EMPTY_METADATA, executor, listener);
}
/**
@ -258,38 +272,37 @@ public class VideoCapture extends UseCase {
* called.
*
* <p>StartRecording() is asynchronous. User needs to check if any error occurs by setting the
* {@link VideoCapture.OnVideoSavedListener#onError(VideoCapture.VideoCaptureError, String, Throwable)}.
* {@link OnVideoSavedListener#onError(VideoCaptureError, String, Throwable)}.
*
* @param saveLocation Location to save the video capture
* @param listener Listener to call for the recorded video
* @param metadata Metadata to save with the recorded video
* @param executor The executor in which the listener callback methods will be run.
* @param listener Listener to call for the recorded video
*/
@SuppressLint("LambdaLast") // Maybe remove after https://issuetracker.google.com/135275901
// Begin Signal Custom Code Block
public void startRecording(
final FileDescriptor saveLocation, final VideoCapture.OnVideoSavedListener listener, VideoCapture.Metadata metadata) {
@NonNull FileDescriptor saveLocation, @NonNull Metadata metadata,
@NonNull Executor executor,
@NonNull OnVideoSavedListener listener) {
// End Signal Custom Code Block
Log.i(TAG, "startRecording");
OnVideoSavedListener postListener = new VideoSavedListenerWrapper(executor, listener);
if (!mEndOfAudioVideoSignal.get()) {
listener.onError(
VideoCapture.VideoCaptureError.RECORDING_IN_PROGRESS, "It is still in video recording!",
null);
postListener.onError(
VideoCaptureError.RECORDING_IN_PROGRESS, "It is still in video recording!",
null);
return;
}
// Begin Signal Custom Code Block
if (mAudioRecorder != null) {
try {
// audioRecord start
mAudioRecorder.startRecording();
} catch (IllegalStateException e) {
listener.onError(VideoCapture.VideoCaptureError.ENCODER_ERROR, "AudioRecorder start fail", e);
return;
}
} else {
Log.w(TAG, "Audio recorder was not initialized! Can't record audio.");
try {
// audioRecord start
mAudioRecorder.startRecording();
} catch (IllegalStateException e) {
postListener.onError(VideoCaptureError.ENCODER_ERROR, "AudioRecorder start fail", e);
return;
}
// End Signal Custom Code Block
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
String cameraId = getCameraIdUnchecked(config);
@ -303,18 +316,19 @@ public class VideoCapture extends UseCase {
} catch (IllegalStateException e) {
setupEncoder(getAttachedSurfaceResolution(cameraId));
listener.onError(VideoCapture.VideoCaptureError.ENCODER_ERROR, "Audio/Video encoder start fail", e);
postListener.onError(VideoCaptureError.ENCODER_ERROR, "Audio/Video encoder start fail",
e);
return;
}
// Get the relative rotation or default to 0 if the camera info is unavailable
int relativeRotation = 0;
try {
CameraInfo cameraInfo = CameraX.getCameraInfo(cameraId);
CameraInfoInternal cameraInfoInternal = CameraX.getCameraInfo(cameraId);
relativeRotation =
cameraInfo.getSensorRotationDegrees(
((ImageOutputConfig) getUseCaseConfig())
.getTargetRotation(Surface.ROTATION_0));
cameraInfoInternal.getSensorRotationDegrees(
((ImageOutputConfig) getUseCaseConfig())
.getTargetRotation(Surface.ROTATION_0));
} catch (CameraInfoUnavailableException e) {
Log.e(TAG, "Unable to retrieve camera sensor orientation.", e);
}
@ -322,22 +336,22 @@ public class VideoCapture extends UseCase {
try {
synchronized (mMuxerLock) {
mMuxer =
new MediaMuxer(
// Begin Signal Custom Code Block
saveLocation,
// End Signal Custom Code Block
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
new MediaMuxer(
// Begin Signal Custom Code Block
saveLocation,
// End Signal Custom Code Block
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mMuxer.setOrientationHint(relativeRotation);
if (metadata.location != null) {
mMuxer.setLocation(
(float) metadata.location.getLatitude(),
(float) metadata.location.getLongitude());
(float) metadata.location.getLatitude(),
(float) metadata.location.getLongitude());
}
}
} catch (IOException e) {
setupEncoder(getAttachedSurfaceResolution(cameraId));
listener.onError(VideoCapture.VideoCaptureError.MUXER_ERROR, "MediaMuxer creation failed!", e);
postListener.onError(VideoCaptureError.MUXER_ERROR, "MediaMuxer creation failed!", e);
return;
}
@ -348,32 +362,32 @@ public class VideoCapture extends UseCase {
notifyActive();
mAudioHandler.post(
new Runnable() {
@Override
public void run() {
VideoCapture.this.audioEncode(listener);
}
});
new Runnable() {
@Override
public void run() {
VideoCapture.this.audioEncode(postListener);
}
});
mVideoHandler.post(
new Runnable() {
@Override
public void run() {
boolean errorOccurred = VideoCapture.this.videoEncode(listener);
if (!errorOccurred) {
listener.onVideoSaved(saveLocation);
}
new Runnable() {
@Override
public void run() {
boolean errorOccurred = VideoCapture.this.videoEncode(postListener);
if (!errorOccurred) {
postListener.onVideoSaved(saveLocation);
}
});
}
});
}
/**
* Stops recording video, this must be called after {@link
* VideoCapture#startRecording(File, VideoCapture.OnVideoSavedListener, VideoCapture.Metadata)} is called.
* VideoCapture#startRecording(File, Metadata, Executor, OnVideoSavedListener)} is called.
*
* <p>stopRecording() is asynchronous API. User need to check if {@link
* VideoCapture.OnVideoSavedListener#onVideoSaved(File)} or
* {@link VideoCapture.OnVideoSavedListener#onError(VideoCapture.VideoCaptureError, String, Throwable)} be called
* OnVideoSavedListener#onVideoSaved(File)} or
* {@link OnVideoSavedListener#onError(VideoCaptureError, String, Throwable)} be called
* before startRecording.
*/
public void stopRecording() {
@ -390,7 +404,7 @@ public class VideoCapture extends UseCase {
*
* @hide
*/
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
@RestrictTo(Scope.LIBRARY_GROUP)
@Override
public void clear() {
mVideoHandlerThread.quitSafely();
@ -423,19 +437,19 @@ public class VideoCapture extends UseCase {
final MediaCodec videoEncoder = mVideoEncoder;
mDeferrableSurface.setOnSurfaceDetachedListener(
CameraXExecutors.mainThreadExecutor(),
new DeferrableSurface.OnSurfaceDetachedListener() {
@Override
public void onSurfaceDetached() {
if (releaseVideoEncoder && videoEncoder != null) {
videoEncoder.release();
}
if (surface != null) {
surface.release();
}
CameraXExecutors.mainThreadExecutor(),
new DeferrableSurface.OnSurfaceDetachedListener() {
@Override
public void onSurfaceDetached() {
if (releaseVideoEncoder && videoEncoder != null) {
videoEncoder.release();
}
});
if (surface != null) {
surface.release();
}
}
});
if (releaseVideoEncoder) {
mVideoEncoder = null;
@ -453,7 +467,7 @@ public class VideoCapture extends UseCase {
*
* @param rotation Desired rotation of the output video.
*/
public void setTargetRotation(@ImageOutputConfig.RotationValue int rotation) {
public void setTargetRotation(@RotationValue int rotation) {
ImageOutputConfig oldConfig = (ImageOutputConfig) getUseCaseConfig();
int oldRotation = oldConfig.getTargetRotation(ImageOutputConfig.INVALID_ROTATION);
if (oldRotation == ImageOutputConfig.INVALID_ROTATION || oldRotation != rotation) {
@ -468,35 +482,45 @@ public class VideoCapture extends UseCase {
* Setup the {@link MediaCodec} for encoding video from a camera {@link Surface} and encoding
* audio from selected audio source.
*/
private void setupEncoder(Size resolution) {
@SuppressWarnings("WeakerAccess") /* synthetic accessor */
void setupEncoder(Size resolution) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
// video encoder setup
mVideoEncoder.reset();
mVideoEncoder.configure(
createMediaFormat(config, resolution), /*surface*/
null, /*crypto*/
null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
createMediaFormat(config, resolution), /*surface*/
null, /*crypto*/
null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
if (mCameraSurface != null) {
releaseCameraSurface(false);
}
mCameraSurface = mVideoEncoder.createInputSurface();
SessionConfig.Builder builder = SessionConfig.Builder.createFrom(config);
SessionConfig.Builder sessionConfigBuilder = SessionConfig.Builder.createFrom(config);
mDeferrableSurface = new ImmediateSurface(mCameraSurface);
builder.addSurface(mDeferrableSurface);
sessionConfigBuilder.addSurface(mDeferrableSurface);
String cameraId = getCameraIdUnchecked(config);
attachToCamera(cameraId, builder.build());
sessionConfigBuilder.addErrorListener(new SessionConfig.ErrorListener() {
@Override
public void onError(@NonNull SessionConfig sessionConfig,
@NonNull SessionConfig.SessionError error) {
setupEncoder(resolution);
}
});
attachToCamera(cameraId, sessionConfigBuilder.build());
// audio encoder setup
setAudioParametersByCamcorderProfile(resolution, cameraId);
mAudioEncoder.reset();
mAudioEncoder.configure(
createAudioMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
createAudioMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (mAudioRecorder != null) {
mAudioRecorder.release();
@ -558,9 +582,9 @@ public class VideoCapture extends UseCase {
ByteBuffer buffer = getOutputBuffer(mAudioEncoder, bufferIndex);
buffer.position(mAudioBufferInfo.offset);
if (mAudioTrackIndex >= 0
&& mVideoTrackIndex >= 0
&& mAudioBufferInfo.size > 0
&& mAudioBufferInfo.presentationTimeUs > 0) {
&& mVideoTrackIndex >= 0
&& mAudioBufferInfo.size > 0
&& mAudioBufferInfo.presentationTimeUs > 0) {
try {
synchronized (mMuxerLock) {
if (!mIsFirstAudioSampleWrite.get()) {
@ -571,13 +595,13 @@ public class VideoCapture extends UseCase {
}
} catch (Exception e) {
Log.e(
TAG,
"audio error:size="
+ mAudioBufferInfo.size
+ "/offset="
+ mAudioBufferInfo.offset
+ "/timeUs="
+ mAudioBufferInfo.presentationTimeUs);
TAG,
"audio error:size="
+ mAudioBufferInfo.size
+ "/offset="
+ mAudioBufferInfo.offset
+ "/timeUs="
+ mAudioBufferInfo.presentationTimeUs);
e.printStackTrace();
}
}
@ -591,7 +615,7 @@ public class VideoCapture extends UseCase {
*
* @return returns {@code true} if an error condition occurred, otherwise returns {@code false}
*/
boolean videoEncode(VideoCapture.OnVideoSavedListener videoSavedListener) {
boolean videoEncode(OnVideoSavedListener videoSavedListener) {
VideoCaptureConfig config = (VideoCaptureConfig) getUseCaseConfig();
// Main encoding loop. Exits on end of stream.
boolean errorOccurred = false;
@ -605,14 +629,14 @@ public class VideoCapture extends UseCase {
// Deque buffer to check for processing step
int outputBufferId =
mVideoEncoder.dequeueOutputBuffer(mVideoBufferInfo, DEQUE_TIMEOUT_USEC);
mVideoEncoder.dequeueOutputBuffer(mVideoBufferInfo, DEQUE_TIMEOUT_USEC);
switch (outputBufferId) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
if (mMuxerStarted) {
videoSavedListener.onError(
VideoCapture.VideoCaptureError.ENCODER_ERROR,
"Unexpected change in video encoding format.",
null);
VideoCaptureError.ENCODER_ERROR,
"Unexpected change in video encoding format.",
null);
errorOccurred = true;
}
@ -640,8 +664,8 @@ public class VideoCapture extends UseCase {
Log.i(TAG, "videoEncoder stop");
mVideoEncoder.stop();
} catch (IllegalStateException e) {
videoSavedListener.onError(VideoCapture.VideoCaptureError.ENCODER_ERROR,
"Video encoder stop failed!", e);
videoSavedListener.onError(VideoCaptureError.ENCODER_ERROR,
"Video encoder stop failed!", e);
errorOccurred = true;
}
@ -657,7 +681,7 @@ public class VideoCapture extends UseCase {
}
}
} catch (IllegalStateException e) {
videoSavedListener.onError(VideoCapture.VideoCaptureError.MUXER_ERROR, "Muxer stop failed!", e);
videoSavedListener.onError(VideoCaptureError.MUXER_ERROR, "Muxer stop failed!", e);
errorOccurred = true;
}
@ -676,7 +700,7 @@ public class VideoCapture extends UseCase {
return errorOccurred;
}
boolean audioEncode(VideoCapture.OnVideoSavedListener videoSavedListener) {
boolean audioEncode(OnVideoSavedListener videoSavedListener) {
// Audio encoding loop. Exits on end of stream.
boolean audioEos = false;
int outIndex;
@ -696,11 +720,11 @@ public class VideoCapture extends UseCase {
int length = mAudioRecorder.read(buffer, mAudioBufferSize);
if (length > 0) {
mAudioEncoder.queueInputBuffer(
index,
0,
length,
(System.nanoTime() / 1000),
mIsRecording ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
index,
0,
length,
(System.nanoTime() / 1000),
mIsRecording ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
}
@ -729,25 +753,17 @@ public class VideoCapture extends UseCase {
// Audio Stop
try {
Log.i(TAG, "audioRecorder stop");
// Begin Signal Custom Code Block
if (mAudioRecorder != null) {
mAudioRecorder.stop();
}
// End Signal Custom Code Block
mAudioRecorder.stop();
} catch (IllegalStateException e) {
videoSavedListener.onError(
VideoCapture.VideoCaptureError.ENCODER_ERROR, "Audio recorder stop failed!", e);
VideoCaptureError.ENCODER_ERROR, "Audio recorder stop failed!", e);
}
try {
// Begin Signal Custom Code Block
if (mAudioRecorder != null) {
mAudioEncoder.stop();
}
// End Signal Custom Code Block
mAudioEncoder.stop();
} catch (IllegalStateException e) {
videoSavedListener.onError(VideoCapture.VideoCaptureError.ENCODER_ERROR,
"Audio encoder stop failed!", e);
videoSavedListener.onError(VideoCaptureError.ENCODER_ERROR,
"Audio encoder stop failed!", e);
}
Log.i(TAG, "Audio encode thread end");
@ -769,10 +785,10 @@ public class VideoCapture extends UseCase {
/** Creates a {@link MediaFormat} using parameters for audio from the configuration */
private MediaFormat createAudioMediaFormat() {
MediaFormat format =
MediaFormat.createAudioFormat(AUDIO_MIME_TYPE, mAudioSampleRate,
mAudioChannelCount);
MediaFormat.createAudioFormat(AUDIO_MIME_TYPE, mAudioSampleRate,
mAudioChannelCount);
format.setInteger(
MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitRate);
return format;
@ -784,41 +800,41 @@ public class VideoCapture extends UseCase {
// Use channel count to determine stereo vs mono
int channelConfig =
mAudioChannelCount == 1
? AudioFormat.CHANNEL_IN_MONO
: AudioFormat.CHANNEL_IN_STEREO;
mAudioChannelCount == 1
? AudioFormat.CHANNEL_IN_MONO
: AudioFormat.CHANNEL_IN_STEREO;
int source = config.getAudioRecordSource();
try {
int bufferSize =
AudioRecord.getMinBufferSize(mAudioSampleRate, channelConfig, audioFormat);
AudioRecord.getMinBufferSize(mAudioSampleRate, channelConfig, audioFormat);
if (bufferSize <= 0) {
bufferSize = config.getAudioMinBufferSize();
}
AudioRecord recorder =
new AudioRecord(
source,
mAudioSampleRate,
channelConfig,
audioFormat,
bufferSize * 2);
new AudioRecord(
source,
mAudioSampleRate,
channelConfig,
audioFormat,
bufferSize * 2);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
mAudioBufferSize = bufferSize;
Log.i(
TAG,
"source: "
+ source
+ " audioSampleRate: "
+ mAudioSampleRate
+ " channelConfig: "
+ channelConfig
+ " audioFormat: "
+ audioFormat
+ " bufferSize: "
+ bufferSize);
TAG,
"source: "
+ source
+ " audioSampleRate: "
+ mAudioSampleRate
+ " channelConfig: "
+ channelConfig
+ " audioFormat: "
+ audioFormat
+ " bufferSize: "
+ bufferSize);
return recorder;
}
} catch (Exception e) {
@ -838,7 +854,7 @@ public class VideoCapture extends UseCase {
if (CamcorderProfile.hasProfile(Integer.parseInt(cameraId), quality)) {
profile = CamcorderProfile.get(Integer.parseInt(cameraId), quality);
if (currentResolution.getWidth() == profile.videoFrameWidth
&& currentResolution.getHeight() == profile.videoFrameHeight) {
&& currentResolution.getHeight() == profile.videoFrameHeight) {
mAudioChannelCount = profile.audioChannels;
mAudioSampleRate = profile.audioSampleRate;
mAudioBitRate = profile.audioBitRate;
@ -862,7 +878,7 @@ public class VideoCapture extends UseCase {
* Describes the error that occurred during video capture operations.
*
* <p>This is a parameter sent to the error callback functions set in listeners such as {@link
* .VideoCapture.OnVideoSavedListener#onError(VideoCapture.VideoCaptureError, String, Throwable)}.
* VideoCapture.OnVideoSavedListener#onError(VideoCaptureError, String, Throwable)}.
*
* <p>See message parameter in onError callback or log for more details.
*/
@ -890,11 +906,11 @@ public class VideoCapture extends UseCase {
public interface OnVideoSavedListener {
/** Called when the video has been successfully saved. */
// Begin Signal Custom Code Block
void onVideoSaved(@NonNull FileDescriptor fileDescriptor);
void onVideoSaved(@NonNull FileDescriptor file);
// End Signal Custom Code Block
/** Called when an error occurs while attempting to save the video. */
void onError(@NonNull VideoCapture.VideoCaptureError videoCaptureError, @NonNull String message,
void onError(@NonNull VideoCaptureError videoCaptureError, @NonNull String message,
@Nullable Throwable cause);
}
@ -906,9 +922,9 @@ public class VideoCapture extends UseCase {
*
* @hide
*/
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
@RestrictTo(Scope.LIBRARY_GROUP)
public static final class Defaults
implements ConfigProvider<VideoCaptureConfig> {
implements ConfigProvider<VideoCaptureConfig> {
private static final Handler DEFAULT_HANDLER = new Handler(Looper.getMainLooper());
private static final int DEFAULT_VIDEO_FRAME_RATE = 30;
/** 8Mb/s the recommend rate for 30fps 1080p */
@ -922,7 +938,7 @@ public class VideoCapture extends UseCase {
/** audio channel count */
private static final int DEFAULT_AUDIO_CHANNEL_COUNT = 1;
/** audio record source */
private static final int DEFAULT_AUDIO_RECORD_SOURCE = MediaRecorder.AudioSource.MIC;
private static final int DEFAULT_AUDIO_RECORD_SOURCE = AudioSource.MIC;
/** audio default minimum buffer size */
private static final int DEFAULT_AUDIO_MIN_BUFFER_SIZE = 1024;
/** Current max resolution of VideoCapture is set as FHD */
@ -934,24 +950,23 @@ public class VideoCapture extends UseCase {
static {
VideoCaptureConfig.Builder builder =
new VideoCaptureConfig.Builder()
.setCallbackHandler(DEFAULT_HANDLER)
.setVideoFrameRate(DEFAULT_VIDEO_FRAME_RATE)
.setBitRate(DEFAULT_BIT_RATE)
.setIFrameInterval(DEFAULT_INTRA_FRAME_INTERVAL)
.setAudioBitRate(DEFAULT_AUDIO_BIT_RATE)
.setAudioSampleRate(DEFAULT_AUDIO_SAMPLE_RATE)
.setAudioChannelCount(DEFAULT_AUDIO_CHANNEL_COUNT)
.setAudioRecordSource(DEFAULT_AUDIO_RECORD_SOURCE)
.setAudioMinBufferSize(DEFAULT_AUDIO_MIN_BUFFER_SIZE)
.setMaxResolution(DEFAULT_MAX_RESOLUTION)
.setSurfaceOccupancyPriority(DEFAULT_SURFACE_OCCUPANCY_PRIORITY);
new VideoCaptureConfig.Builder()
.setVideoFrameRate(DEFAULT_VIDEO_FRAME_RATE)
.setBitRate(DEFAULT_BIT_RATE)
.setIFrameInterval(DEFAULT_INTRA_FRAME_INTERVAL)
.setAudioBitRate(DEFAULT_AUDIO_BIT_RATE)
.setAudioSampleRate(DEFAULT_AUDIO_SAMPLE_RATE)
.setAudioChannelCount(DEFAULT_AUDIO_CHANNEL_COUNT)
.setAudioRecordSource(DEFAULT_AUDIO_RECORD_SOURCE)
.setAudioMinBufferSize(DEFAULT_AUDIO_MIN_BUFFER_SIZE)
.setMaxResolution(DEFAULT_MAX_RESOLUTION)
.setSurfaceOccupancyPriority(DEFAULT_SURFACE_OCCUPANCY_PRIORITY);
DEFAULT_CONFIG = builder.build();
}
@Override
public VideoCaptureConfig getConfig(CameraX.LensFacing lensFacing) {
public VideoCaptureConfig getConfig(LensFacing lensFacing) {
return DEFAULT_CONFIG;
}
}
@ -962,4 +977,39 @@ public class VideoCapture extends UseCase {
@Nullable
public Location location;
}
private final class VideoSavedListenerWrapper implements OnVideoSavedListener {
@NonNull Executor mExecutor;
@NonNull OnVideoSavedListener mOnVideoSavedListener;
VideoSavedListenerWrapper(@NonNull Executor executor,
@NonNull OnVideoSavedListener onVideoSavedListener) {
mExecutor = executor;
mOnVideoSavedListener = onVideoSavedListener;
}
@Override
// Begin Signal Custom Code Block
public void onVideoSaved(@NonNull FileDescriptor file) {
// End Signal Custom Code Block
try {
mExecutor.execute(() -> mOnVideoSavedListener.onVideoSaved(file));
} catch (RejectedExecutionException e) {
Log.e(TAG, "Unable to post to the supplied executor.");
}
}
@Override
public void onError(@NonNull VideoCaptureError videoCaptureError, @NonNull String message,
@Nullable Throwable cause) {
try {
mExecutor.execute(
() -> mOnVideoSavedListener.onError(videoCaptureError, message, cause));
} catch (RejectedExecutionException e) {
Log.e(TAG, "Unable to post to the supplied executor.");
}
}
}
}

View File

@ -24,11 +24,11 @@ dependencyVerification {
['androidx.asynclayoutinflater:asynclayoutinflater:1.0.0',
'f7eab60c57addd94bb06275832fe7600611beaaae1a1ec597c231956faf96c8b'],
['androidx.camera:camera-camera2:1.0.0-alpha04',
'b7897230aec96365d675712c92f5edcb8b464badfd61788c8f956ec2d6e49bfe'],
['androidx.camera:camera-camera2:1.0.0-alpha06',
'e50f20deb950ffebcd4d1de5408ef7a5404bec80ec77119e05663c890739b903'],
['androidx.camera:camera-core:1.0.0-alpha04',
'e1c70de55600a0caf826eb4f8a75c96c5ff8f0b626bf08413d31e80ffa55f8ba'],
['androidx.camera:camera-core:1.0.0-alpha06',
'0096cabe539d9b4288f406acfb44264b137ebd600e38e33504ff425c979016c9'],
['androidx.cardview:cardview:1.0.0',
'1193c04c22a3d6b5946dae9f4e8c59d6adde6a71b6bd5d87fb99d82dda1afec7'],