diff --git a/BUILD.md b/BUILD.md
index 451d8f1..d068994 100644
--- a/BUILD.md
+++ b/BUILD.md
@@ -45,16 +45,16 @@ $ git clone --depth=1 https://git.ffmpeg.org/ffmpeg.git
If you want Gocryptfs support, you need to download OpenSSL:
```
$ cd ../libgocryptfs
-$ wget https://www.openssl.org/source/openssl-1.1.1q.tar.gz
+$ wget https://www.openssl.org/source/openssl-1.1.1t.tar.gz
```
Verify OpenSSL signature:
```
-$ wget https://www.openssl.org/source/openssl-1.1.1q.tar.gz.asc
-$ gpg --verify openssl-1.1.1q.tar.gz.asc openssl-1.1.1q.tar.gz
+$ wget https://www.openssl.org/source/openssl-1.1.1t.tar.gz.asc
+$ gpg --verify openssl-1.1.1t.tar.gz.asc openssl-1.1.1t.tar.gz
```
Continue **ONLY** if the signature is **VALID**.
```
-$ tar -xzf openssl-1.1.1q.tar.gz
+$ tar -xzf openssl-1.1.1t.tar.gz
```
If you want CryFS support, initialize libcryfs:
```
@@ -76,7 +76,7 @@ $ ./build.sh ffmpeg
This step is only required if you want Gocryptfs support.
```
$ cd app/libgocryptfs
-$ OPENSSL_PATH="./openssl-1.1.1q" ./build.sh
+$ OPENSSL_PATH="./openssl-1.1.1t" ./build.sh
```
## Compile APKs
Gradle build libgocryptfs and libcryfs by default.
diff --git a/app/build.gradle b/app/build.gradle
index 8c1d714..1dc4977 100644
--- a/app/build.gradle
+++ b/app/build.gradle
@@ -81,16 +81,23 @@ android {
path file('CMakeLists.txt')
}
}
+
+ sourceSets {
+ main {
+ java {
+ exclude 'androidx/camera/video/originals/**'
+ }
+ }
+ }
}
dependencies {
implementation project(":libpdfviewer:app")
- implementation fileTree(dir: "libs", include: ["*.jar"])
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
implementation 'androidx.core:core-ktx:1.9.0'
implementation "androidx.appcompat:appcompat:1.6.1"
implementation "androidx.constraintlayout:constraintlayout:2.1.4"
- def lifecycle_version = "2.5.1"
+ def lifecycle_version = "2.6.0"
implementation "androidx.lifecycle:lifecycle-viewmodel-ktx:$lifecycle_version"
implementation "androidx.lifecycle:lifecycle-process:$lifecycle_version"
@@ -101,15 +108,19 @@ dependencies {
implementation "com.github.bumptech.glide:glide:4.13.2"
implementation "androidx.biometric:biometric-ktx:1.2.0-alpha05"
- def exoplayer_version = "2.18.2"
+ def exoplayer_version = "2.18.4"
implementation "com.google.android.exoplayer:exoplayer-core:$exoplayer_version"
implementation "com.google.android.exoplayer:exoplayer-ui:$exoplayer_version"
implementation "androidx.concurrent:concurrent-futures:1.1.0"
- def camerax_version = "1.2.0-beta02"
+ def camerax_version = "1.3.0-alpha04"
implementation "androidx.camera:camera-camera2:$camerax_version"
implementation "androidx.camera:camera-lifecycle:$camerax_version"
implementation "androidx.camera:camera-view:$camerax_version"
implementation "androidx.camera:camera-extensions:$camerax_version"
+
+ def autoValueVersion = "1.10.1"
+ implementation "com.google.auto.value:auto-value-annotations:$autoValueVersion"
+ annotationProcessor "com.google.auto.value:auto-value:$autoValueVersion"
}
diff --git a/app/src/main/java/androidx/camera/video/MediaMuxer.kt b/app/src/main/java/androidx/camera/video/MediaMuxer.kt
new file mode 100644
index 0000000..f4d0df9
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/MediaMuxer.kt
@@ -0,0 +1,14 @@
+package androidx.camera.video
+
+import android.media.MediaCodec
+import android.media.MediaFormat
+import java.nio.ByteBuffer
+
+interface MediaMuxer {
+ fun setOrientationHint(degree: Int)
+ fun release()
+ fun addTrack(mediaFormat: MediaFormat): Int
+ fun start()
+ fun writeSampleData(trackIndex: Int, buffer: ByteBuffer, bufferInfo: MediaCodec.BufferInfo)
+ fun stop()
+}
\ No newline at end of file
diff --git a/app/src/main/java/androidx/camera/video/MuxerOutputOptions.kt b/app/src/main/java/androidx/camera/video/MuxerOutputOptions.kt
new file mode 100644
index 0000000..e3b0f4c
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/MuxerOutputOptions.kt
@@ -0,0 +1,16 @@
+package androidx.camera.video
+
+import android.location.Location
+
+class MuxerOutputOptions(private val mediaMuxer: MediaMuxer): OutputOptions(MuxerOutputOptionsInternal()) {
+
+ private class MuxerOutputOptionsInternal: OutputOptionsInternal() {
+ override fun getFileSizeLimit(): Long = FILE_SIZE_UNLIMITED.toLong()
+
+ override fun getDurationLimitMillis(): Long = DURATION_UNLIMITED.toLong()
+
+ override fun getLocation(): Location? = null
+ }
+
+ fun getMediaMuxer(): MediaMuxer = mediaMuxer
+}
\ No newline at end of file
diff --git a/app/src/main/java/androidx/camera/video/SucklessPendingRecording.java b/app/src/main/java/androidx/camera/video/SucklessPendingRecording.java
new file mode 100644
index 0000000..4e17eb1
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/SucklessPendingRecording.java
@@ -0,0 +1,181 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.camera.video;
+
+import android.Manifest;
+import android.annotation.SuppressLint;
+import android.content.Context;
+
+import androidx.annotation.CheckResult;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.annotation.RequiresPermission;
+import androidx.camera.core.impl.utils.ContextUtil;
+import androidx.core.content.PermissionChecker;
+import androidx.core.util.Consumer;
+import androidx.core.util.Preconditions;
+
+import java.util.concurrent.Executor;
+
+/**
+ * A recording that can be started at a future time.
+ *
+ *
A pending recording allows for configuration of a recording before it is started. Once a
+ * pending recording is started with {@link #start(Executor, Consumer)}, any changes to the pending
+ * recording will not affect the actual recording; any modifications to the recording will need
+ * to occur through the controls of the {@link SucklessRecording} class returned by
+ * {@link #start(Executor, Consumer)}.
+ *
+ *
A pending recording can be created using one of the {@link Recorder} methods for starting a
+ * recording such as {@link Recorder#prepareRecording(Context, MediaStoreOutputOptions)}.
+
+ *
There may be more settings that can only be changed per-recorder instead of per-recording,
+ * because it requires expensive operations like reconfiguring the camera. For those settings, use
+ * the {@link Recorder.Builder} methods to configure before creating the {@link Recorder}
+ * instance, then create the pending recording with it.
+ */
+@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+@SuppressLint("RestrictedApi")
+public final class SucklessPendingRecording {
+
+ private final Context mContext;
+ private final SucklessRecorder mRecorder;
+ private final OutputOptions mOutputOptions;
+ private Consumer mEventListener;
+ private Executor mListenerExecutor;
+
+ private boolean mAudioEnabled = false;
+
+ SucklessPendingRecording(@NonNull Context context, @NonNull SucklessRecorder recorder,
+ @NonNull OutputOptions options) {
+ // Application context is sufficient for all our needs, so store that to avoid leaking
+ // unused resources. For attribution, ContextUtil.getApplicationContext() will retain the
+ // attribution tag from the original context.
+ mContext = ContextUtil.getApplicationContext(context);
+ mRecorder = recorder;
+ mOutputOptions = options;
+ }
+
+ /**
+ * Returns an application context which was retrieved from the {@link Context} used to
+ * create this object.
+ */
+ @NonNull
+ Context getApplicationContext() {
+ return mContext;
+ }
+
+ @NonNull
+ SucklessRecorder getRecorder() {
+ return mRecorder;
+ }
+
+ @NonNull
+ OutputOptions getOutputOptions() {
+ return mOutputOptions;
+ }
+
+ @Nullable
+ Executor getListenerExecutor() {
+ return mListenerExecutor;
+ }
+
+ @Nullable
+ Consumer getEventListener() {
+ return mEventListener;
+ }
+
+ boolean isAudioEnabled() {
+ return mAudioEnabled;
+ }
+
+ /**
+ * Enables audio to be recorded for this recording.
+ *
+ * This method must be called prior to {@link #start(Executor, Consumer)} to enable audio
+ * in the recording. If this method is not called, the {@link SucklessRecording} generated by
+ * {@link #start(Executor, Consumer)} will not contain audio, and
+ * {@link AudioStats#getAudioState()} will always return
+ * {@link AudioStats#AUDIO_STATE_DISABLED} for all {@link RecordingStats} send to the listener
+ * set passed to {@link #start(Executor, Consumer)}.
+ *
+ *
Recording with audio requires the {@link android.Manifest.permission#RECORD_AUDIO}
+ * permission; without it, recording will fail at {@link #start(Executor, Consumer)} with an
+ * {@link IllegalStateException}.
+ *
+ * @return this pending recording
+ * @throws IllegalStateException if the {@link Recorder} this recording is associated to
+ * doesn't support audio.
+ * @throws SecurityException if the {@link Manifest.permission#RECORD_AUDIO} permission
+ * is denied for the current application.
+ */
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ @NonNull
+ public SucklessPendingRecording withAudioEnabled() {
+ // Check permissions and throw a security exception if RECORD_AUDIO is not granted.
+ if (PermissionChecker.checkSelfPermission(mContext, Manifest.permission.RECORD_AUDIO)
+ == PermissionChecker.PERMISSION_DENIED) {
+ throw new SecurityException("Attempted to enable audio for recording but application "
+ + "does not have RECORD_AUDIO permission granted.");
+ }
+ Preconditions.checkState(mRecorder.isAudioSupported(), "The Recorder this recording is "
+ + "associated to doesn't support audio.");
+ mAudioEnabled = true;
+ return this;
+ }
+
+ /**
+ * Starts the recording, making it an active recording.
+ *
+ *
Only a single recording can be active at a time, so if another recording is active,
+ * this will throw an {@link IllegalStateException}.
+ *
+ *
If there are no errors starting the recording, the returned {@link SucklessRecording}
+ * can be used to {@link SucklessRecording#pause() pause}, {@link SucklessRecording#resume() resume},
+ * or {@link SucklessRecording#stop() stop} the recording.
+ *
+ *
Upon successfully starting the recording, a {@link VideoRecordEvent.Start} event will
+ * be the first event sent to the provided event listener.
+ *
+ *
If errors occur while starting the recording, a {@link VideoRecordEvent.Finalize} event
+ * will be the first event sent to the provided listener, and information about the error can
+ * be found in that event's {@link VideoRecordEvent.Finalize#getError()} method. The returned
+ * {@link SucklessRecording} will be in a finalized state, and all controls will be no-ops.
+ *
+ *
If the returned {@link SucklessRecording} is garbage collected, the recording will be
+ * automatically stopped. A reference to the active recording must be maintained as long as
+ * the recording needs to be active.
+ *
+ * @throws IllegalStateException if the associated Recorder currently has an unfinished
+ * active recording.
+ * @param listenerExecutor the executor that the event listener will be run on.
+ * @param listener the event listener to handle video record events.
+ */
+ @NonNull
+ @CheckResult
+ public SucklessRecording start(
+ @NonNull Executor listenerExecutor,
+ @NonNull Consumer listener) {
+ Preconditions.checkNotNull(listenerExecutor, "Listener Executor can't be null.");
+ Preconditions.checkNotNull(listener, "Event listener can't be null");
+ mListenerExecutor = listenerExecutor;
+ mEventListener = listener;
+ return mRecorder.start(this);
+ }
+}
+
diff --git a/app/src/main/java/androidx/camera/video/SucklessRecorder.java b/app/src/main/java/androidx/camera/video/SucklessRecorder.java
new file mode 100644
index 0000000..7693bf2
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/SucklessRecorder.java
@@ -0,0 +1,3004 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.camera.video;
+
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_DURATION_LIMIT_REACHED;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_ENCODING_FAILED;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_FILE_SIZE_LIMIT_REACHED;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_INVALID_OUTPUT_OPTIONS;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_NONE;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_NO_VALID_DATA;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_RECORDER_ERROR;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_SOURCE_INACTIVE;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_UNKNOWN;
+import static androidx.camera.video.VideoRecordEvent.Finalize.VideoRecordError;
+import static androidx.camera.video.internal.DebugUtils.readableUs;
+import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioEncoderConfig;
+import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioMimeInfo;
+import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioSourceSettings;
+
+import android.Manifest;
+import android.annotation.SuppressLint;
+import android.content.ContentValues;
+import android.content.Context;
+import android.media.MediaRecorder;
+import android.media.MediaScannerConnection;
+import android.net.Uri;
+import android.os.Build;
+import android.os.ParcelFileDescriptor;
+import android.provider.MediaStore;
+import android.util.Range;
+import android.util.Size;
+import android.view.Surface;
+
+import androidx.annotation.GuardedBy;
+import androidx.annotation.IntRange;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.annotation.RequiresPermission;
+import androidx.annotation.RestrictTo;
+import androidx.annotation.VisibleForTesting;
+import androidx.camera.core.AspectRatio;
+import androidx.camera.core.Logger;
+import androidx.camera.core.SurfaceRequest;
+import androidx.camera.core.impl.CamcorderProfileProxy;
+import androidx.camera.core.impl.MutableStateObservable;
+import androidx.camera.core.impl.Observable;
+import androidx.camera.core.impl.StateObservable;
+import androidx.camera.core.impl.Timebase;
+import androidx.camera.core.impl.annotation.ExecutedBy;
+import androidx.camera.core.impl.utils.CloseGuardHelper;
+import androidx.camera.core.impl.utils.executor.CameraXExecutors;
+import androidx.camera.core.impl.utils.futures.FutureCallback;
+import androidx.camera.core.impl.utils.futures.Futures;
+import androidx.camera.core.internal.utils.ArrayRingBuffer;
+import androidx.camera.core.internal.utils.RingBuffer;
+import androidx.camera.video.StreamInfo.StreamState;
+import androidx.camera.video.internal.AudioSource;
+import androidx.camera.video.internal.AudioSourceAccessException;
+import androidx.camera.video.internal.compat.quirk.DeactivateEncoderSurfaceBeforeStopEncoderQuirk;
+import androidx.camera.video.internal.compat.quirk.DeviceQuirks;
+import androidx.camera.video.internal.compat.quirk.EncoderNotUsePersistentInputSurfaceQuirk;
+import androidx.camera.video.internal.config.MimeInfo;
+import androidx.camera.video.internal.encoder.AudioEncoderConfig;
+import androidx.camera.video.internal.encoder.BufferCopiedEncodedData;
+import androidx.camera.video.internal.encoder.EncodeException;
+import androidx.camera.video.internal.encoder.EncodedData;
+import androidx.camera.video.internal.encoder.Encoder;
+import androidx.camera.video.internal.encoder.EncoderCallback;
+import androidx.camera.video.internal.encoder.EncoderFactory;
+import androidx.camera.video.internal.encoder.SucklessEncoderImpl;
+import androidx.camera.video.internal.encoder.InvalidConfigException;
+import androidx.camera.video.internal.encoder.OutputConfig;
+import androidx.camera.video.internal.encoder.VideoEncoderInfo;
+import androidx.camera.video.internal.utils.OutputUtil;
+import androidx.concurrent.futures.CallbackToFutureAdapter;
+import androidx.core.util.Consumer;
+import androidx.core.util.Preconditions;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.util.concurrent.ListenableFuture;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executor;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * An implementation of {@link VideoOutput} for starting video recordings that are saved
+ * to a {@link File}, {@link ParcelFileDescriptor}, or {@link MediaStore}.
+ *
+ * A recorder can be used to save the video frames sent from the {@link VideoCapture} use case
+ * in common recording formats such as MPEG4.
+ *
+ *
Usage example of setting up {@link VideoCapture} with a recorder as output:
+ *
+ * ProcessCameraProvider cameraProvider = ...;
+ * CameraSelector cameraSelector = ...;
+ * ...
+ * // Create our preview to show on screen
+ * Preview preview = new Preview.Builder.build();
+ * // Create the video capture use case with a Recorder as the output
+ * VideoCapture videoCapture = VideoCapture.withOutput(new Recorder.Builder().build());
+ *
+ * // Bind use cases to Fragment/Activity lifecycle
+ * cameraProvider.bindToLifecycle(this, cameraSelector, preview, videoCapture);
+ *
+ *
+ * Once the recorder is attached to a video source as a {@link VideoOutput}, e.g. using it to
+ * create a {@link VideoCapture} by calling {@link VideoCapture#withOutput(VideoOutput)}, a new
+ * recording can be generated with one of the prepareRecording methods, such as
+ * {@link #prepareRecording(Context, MediaStoreOutputOptions)}. The {@link SucklessPendingRecording} class
+ * then can be used to adjust per-recording settings and to start the recording. It also requires
+ * passing a listener to {@link SucklessPendingRecording#start(Executor, Consumer)} to
+ * listen for {@link VideoRecordEvent}s such as {@link VideoRecordEvent.Start},
+ * {@link VideoRecordEvent.Pause}, {@link VideoRecordEvent.Resume}, and
+ * {@link VideoRecordEvent.Finalize}. This listener will also receive regular recording status
+ * updates via the {@link VideoRecordEvent.Status} event.
+ *
+ *
Attaching a single Recorder instance to multiple video sources at the same time may causes
+ * unexpected behaviors and is not recommended.
+ *
+ *
A recorder can also capture and save audio alongside video. The audio must be explicitly
+ * enabled with {@link SucklessPendingRecording#withAudioEnabled()} before starting the recording.
+ *
+ * @see VideoCapture#withOutput(VideoOutput)
+ * @see SucklessPendingRecording
+ */
+@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+@SuppressLint("RestrictedApi")
+public final class SucklessRecorder implements VideoOutput {
+
+ private static final String TAG = "Recorder";
+
+ enum State {
+ /**
+ * The Recorder is being configured.
+ *
+ *
The Recorder will reach this state whenever it is waiting for a surface request.
+ */
+ CONFIGURING,
+ /**
+ * There's a recording waiting for being started.
+ *
+ *
The Recorder will reach this state whenever a recording can not be serviced
+ * immediately.
+ */
+ PENDING_RECORDING,
+ /**
+ * There's a recording waiting for being paused.
+ *
+ *
The Recorder will reach this state whenever a recording can not be serviced
+ * immediately.
+ */
+ PENDING_PAUSED,
+ /**
+ * The Recorder is idling and ready to start a new recording.
+ */
+ IDLING,
+ /**
+ * There's a running recording and the Recorder is producing output.
+ */
+ RECORDING,
+ /**
+ * There's a running recording and it's paused.
+ */
+ PAUSED,
+ /**
+ * There's a recording being stopped.
+ */
+ STOPPING,
+ /**
+ * There's a running recording and the Recorder is being reset.
+ */
+ RESETTING,
+ /**
+ * The Recorder encountered errors and any operation will attempt will throw an
+ * {@link IllegalStateException}. Users can handle the error by monitoring
+ * {@link VideoRecordEvent}.
+ */
+ ERROR
+ }
+
+ enum AudioState {
+ /**
+ * The audio is being initializing.
+ */
+ INITIALIZING,
+ /**
+ * The audio has been initialized and is waiting for a new recording to be started.
+ */
+ IDLING,
+ /**
+ * Audio recording is disabled for the running recording.
+ */
+ DISABLED,
+ /**
+ * The recording is being recorded with audio.
+ */
+ ACTIVE,
+ /**
+ * The audio encoder encountered errors.
+ */
+ ERROR_ENCODER,
+ /**
+ * The audio source encountered errors.
+ */
+ ERROR_SOURCE,
+ }
+
+ /**
+ * The subset of states considered pending states.
+ */
+ private static final Set PENDING_STATES =
+ Collections.unmodifiableSet(EnumSet.of(State.PENDING_RECORDING, State.PENDING_PAUSED));
+
+ /**
+ * The subset of states which are valid non-pending states while in a pending state.
+ *
+ * All other states should not be possible if in a PENDING_* state. Pending states are
+ * meant to be transient states that occur while waiting for another operation to finish.
+ */
+ private static final Set VALID_NON_PENDING_STATES_WHILE_PENDING =
+ Collections.unmodifiableSet(EnumSet.of(
+ State.CONFIGURING, // Waiting for camera before starting recording.
+ State.IDLING, // Waiting for sequential executor to start pending recording.
+ State.RESETTING, // Waiting for camera/encoders to reset before starting.
+ State.STOPPING, // Waiting for previous recording to finalize before starting.
+ State.ERROR // Waiting for re-initialization before starting.
+ ));
+
+ /**
+ * Default quality selector for recordings.
+ *
+ * The default quality selector chooses a video quality suitable for recordings based on
+ * device and compatibility constraints. It is equivalent to:
+ *
{@code
+ * QualitySelector.fromOrderedList(Arrays.asList(Quality.FHD, Quality.HD, Quality.SD),
+ * FallbackStrategy.higherQualityOrLowerThan(Quality.FHD));
+ * }
+ *
+ * @see QualitySelector
+ */
+ public static final QualitySelector DEFAULT_QUALITY_SELECTOR =
+ QualitySelector.fromOrderedList(Arrays.asList(Quality.FHD, Quality.HD, Quality.SD),
+ FallbackStrategy.higherQualityOrLowerThan(Quality.FHD));
+
+ private static final VideoSpec VIDEO_SPEC_DEFAULT =
+ VideoSpec.builder()
+ .setQualitySelector(DEFAULT_QUALITY_SELECTOR)
+ .setAspectRatio(AspectRatio.RATIO_DEFAULT)
+ .build();
+ private static final MediaSpec MEDIA_SPEC_DEFAULT =
+ MediaSpec.builder()
+ .setOutputFormat(MediaSpec.OUTPUT_FORMAT_AUTO)
+ .setVideoSpec(VIDEO_SPEC_DEFAULT)
+ .build();
+ @SuppressWarnings("deprecation")
+ private static final String MEDIA_COLUMN = MediaStore.Video.Media.DATA;
+ private static final Exception PENDING_RECORDING_ERROR_CAUSE_SOURCE_INACTIVE =
+ new RuntimeException("The video frame producer became inactive before any "
+ + "data was received.");
+ private static final int PENDING = 1;
+ private static final int NOT_PENDING = 0;
+ private static final long SOURCE_NON_STREAMING_TIMEOUT_MS = 1000L;
+ // The audio data is expected to be less than 1 kB, the value of the cache size is used to limit
+ // the memory used within an acceptable range.
+ private static final int AUDIO_CACHE_SIZE = 60;
+ @VisibleForTesting
+ static final EncoderFactory DEFAULT_ENCODER_FACTORY = SucklessEncoderImpl::new;
+ private static final Executor AUDIO_EXECUTOR =
+ CameraXExecutors.newSequentialExecutor(CameraXExecutors.ioExecutor());
+
+ private final MutableStateObservable mStreamInfo;
+ // Used only by getExecutor()
+ private final Executor mUserProvidedExecutor;
+ // May be equivalent to mUserProvidedExecutor or an internal executor if the user did not
+ // provide an executor.
+ private final Executor mExecutor;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ final Executor mSequentialExecutor;
+ private final EncoderFactory mVideoEncoderFactory;
+ private final EncoderFactory mAudioEncoderFactory;
+ private final Object mLock = new Object();
+ private final boolean mEncoderNotUsePersistentInputSurface = DeviceQuirks.get(
+ EncoderNotUsePersistentInputSurfaceQuirk.class) != null;
+
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+ // Members only accessed when holding mLock //
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+ @GuardedBy("mLock")
+ private State mState = State.CONFIGURING;
+ // Tracks the underlying state when in a PENDING_* state. When not in a PENDING_* state, this
+ // should be null.
+ @GuardedBy("mLock")
+ private State mNonPendingState = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @GuardedBy("mLock")
+ int mStreamId = StreamInfo.STREAM_ID_ANY;
+ @GuardedBy("mLock")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ RecordingRecord mActiveRecordingRecord = null;
+ // A recording that will be started once the previous recording has finalized or the
+ // recorder has finished initializing.
+ @GuardedBy("mLock")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ RecordingRecord mPendingRecordingRecord = null;
+ @GuardedBy("mLock")
+ private long mLastGeneratedRecordingId = 0L;
+ //--------------------------------------------------------------------------------------------//
+
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+ // Members only accessed on mSequentialExecutor //
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+ private RecordingRecord mInProgressRecording = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ boolean mInProgressRecordingStopping = false;
+ private SurfaceRequest.TransformationInfo mSurfaceTransformationInfo = null;
+ private CamcorderProfileProxy mResolvedCamcorderProfile = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ final List> mEncodingFutures = new ArrayList<>();
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Integer mAudioTrackIndex = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Integer mVideoTrackIndex = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ SurfaceRequest mLatestSurfaceRequest;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Timebase mVideoSourceTimebase;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Surface mLatestSurface = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Surface mActiveSurface = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ MediaMuxer mMediaMuxer = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ final MutableStateObservable mMediaSpec;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ AudioSource mAudioSource = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Encoder mVideoEncoder = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ OutputConfig mVideoOutputConfig = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Encoder mAudioEncoder = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ OutputConfig mAudioOutputConfig = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ AudioState mAudioState = AudioState.INITIALIZING;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @NonNull
+ Uri mOutputUri = Uri.EMPTY;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mRecordingBytes = 0L;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mRecordingDurationNs = 0L;
+ @VisibleForTesting
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mFirstRecordingVideoDataTimeUs = Long.MAX_VALUE;
+ @VisibleForTesting
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ int mFirstRecordingVideoBitrate = 0;
+ @VisibleForTesting
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Range mVideoEncoderBitrateRange = null;
+ @VisibleForTesting
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mFirstRecordingAudioDataTimeUs = Long.MAX_VALUE;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mPreviousRecordingVideoDataTimeUs = Long.MAX_VALUE;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mPreviousRecordingAudioDataTimeUs = Long.MAX_VALUE;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mFileSizeLimitInBytes = OutputOptions.FILE_SIZE_UNLIMITED;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mDurationLimitNs = OutputOptions.DURATION_UNLIMITED;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @VideoRecordError
+ int mRecordingStopError = ERROR_UNKNOWN;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Throwable mRecordingStopErrorCause = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ EncodedData mPendingFirstVideoData = null;
+ // A cache that hold audio data created before the muxer starts to prevent A/V out of sync in
+ // the beginning of the recording.
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @NonNull
+ final RingBuffer mPendingAudioRingBuffer = new ArrayRingBuffer<>(
+ AUDIO_CACHE_SIZE);
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Throwable mAudioErrorCause = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ boolean mIsAudioSourceSilenced = false;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ SourceState mSourceState = SourceState.INACTIVE;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ ScheduledFuture> mSourceNonStreamingTimeout = null;
+ // The Recorder has to be reset first before being configured again.
+ private boolean mNeedsReset = false;
+ @NonNull
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ VideoEncoderSession mVideoEncoderSession;
+ @Nullable
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ VideoEncoderSession mVideoEncoderSessionToRelease = null;
+ //--------------------------------------------------------------------------------------------//
+
+ SucklessRecorder(@Nullable Executor executor, @NonNull MediaSpec mediaSpec,
+ @NonNull EncoderFactory videoEncoderFactory,
+ @NonNull EncoderFactory audioEncoderFactory) {
+ mUserProvidedExecutor = executor;
+ mExecutor = executor != null ? executor : CameraXExecutors.ioExecutor();
+ mSequentialExecutor = CameraXExecutors.newSequentialExecutor(mExecutor);
+
+ mMediaSpec = MutableStateObservable.withInitialState(composeRecorderMediaSpec(mediaSpec));
+ mStreamInfo = MutableStateObservable.withInitialState(
+ StreamInfo.of(mStreamId, internalStateToStreamState(mState)));
+ mVideoEncoderFactory = videoEncoderFactory;
+ mAudioEncoderFactory = audioEncoderFactory;
+ mVideoEncoderSession =
+ new VideoEncoderSession(mVideoEncoderFactory, mSequentialExecutor, mExecutor);
+ }
+
+ @Override
+ public void onSurfaceRequested(@NonNull SurfaceRequest request) {
+ onSurfaceRequested(request, Timebase.UPTIME);
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @Override
+ public void onSurfaceRequested(@NonNull SurfaceRequest request, @NonNull Timebase timebase) {
+ synchronized (mLock) {
+ Logger.d(TAG, "Surface is requested in state: " + mState + ", Current surface: "
+ + mStreamId);
+ if (mState == State.ERROR) {
+ setState(State.CONFIGURING);
+ }
+ }
+ mSequentialExecutor.execute(() -> onSurfaceRequestedInternal(request, timebase));
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @Override
+ @NonNull
+ public Observable getMediaSpec() {
+ return mMediaSpec;
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @Override
+ @NonNull
+ public Observable getStreamInfo() {
+ return mStreamInfo;
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @Override
+ public void onSourceStateChanged(@NonNull SourceState newState) {
+ mSequentialExecutor.execute(() -> onSourceStateChangedInternal(newState));
+ }
+
+ @NonNull
+ public SucklessPendingRecording prepareRecording(@NonNull Context context, @NonNull MuxerOutputOptions outputOptions) {
+ return prepareRecordingInternal(context, outputOptions);
+ }
+
+ @NonNull
+ private SucklessPendingRecording prepareRecordingInternal(@NonNull Context context,
+ @NonNull OutputOptions options) {
+ Preconditions.checkNotNull(options, "The OutputOptions cannot be null.");
+ return new SucklessPendingRecording(context, this, options);
+ }
+
+ /**
+ * Gets the quality selector of this Recorder.
+ *
+ * @return the {@link QualitySelector} provided to
+ * {@link Builder#setQualitySelector(QualitySelector)} on the builder used to create this
+ * recorder, or the default value of {@link Recorder#DEFAULT_QUALITY_SELECTOR} if no quality
+ * selector was provided.
+ */
+ @NonNull
+ public QualitySelector getQualitySelector() {
+ return getObservableData(mMediaSpec).getVideoSpec().getQualitySelector();
+ }
+
+ /**
+ * Gets the audio source of this Recorder.
+ *
+ * @return the value provided to {@link Builder#setAudioSource(int)} on the builder used to
+ * create this recorder, or the default value of {@link AudioSpec#SOURCE_AUTO} if no source was
+ * set.
+ */
+ @AudioSpec.Source
+ int getAudioSource() {
+ return getObservableData(mMediaSpec).getAudioSpec().getSource();
+ }
+
+ /**
+ * Returns the executor provided to the builder for this recorder.
+ *
+ * @return the {@link Executor} provided to {@link Builder#setExecutor(Executor)} on the
+ * builder used to create this recorder. If no executor was provided, returns {code null}.
+ */
+ @Nullable
+ public Executor getExecutor() {
+ return mUserProvidedExecutor;
+ }
+
+ /**
+ * Gets the target video encoding bitrate of this Recorder.
+ *
+ * @return the value provided to {@link Builder#setTargetVideoEncodingBitRate(int)} on the
+ * builder used to create this recorder. Returns 0, if
+ * {@link Builder#setTargetVideoEncodingBitRate(int)} is not called.
+ */
+ public int getTargetVideoEncodingBitRate() {
+ return getObservableData(mMediaSpec).getVideoSpec().getBitrate().getLower();
+ }
+
+ /**
+ * Gets the aspect ratio of this Recorder.
+ *
+ * @return the value from {@link Builder#setAspectRatio(int)} or
+ * {@link AspectRatio#RATIO_DEFAULT} if not set.
+ */
+ @AspectRatio.Ratio
+ public int getAspectRatio() {
+ return getObservableData(mMediaSpec).getVideoSpec().getAspectRatio();
+ }
+
+ /**
+ * Starts a pending recording and returns an active recording instance.
+ *
+ * If the Recorder is already running a recording, an {@link IllegalStateException} will
+ * be thrown when calling this method.
+ *
+ *
If the video encoder hasn't been setup with {@link #onSurfaceRequested(SurfaceRequest)}
+ * , the {@link SucklessPendingRecording} specified will be started once the video encoder setup
+ * completes. The recording will be considered active, so before it's finalized, an
+ * {@link IllegalStateException} will be thrown if this method is called for a second time.
+ *
+ *
If the video producer stops sending frames to the provided surface, the recording will
+ * be automatically finalized with {@link VideoRecordEvent.Finalize#ERROR_SOURCE_INACTIVE}.
+ * This can happen, for example, when the {@link VideoCapture} this Recorder is associated
+ * with is detached from the camera.
+ *
+ * @throws IllegalStateException if there's an active recording, or the audio is
+ * {@link SucklessPendingRecording#withAudioEnabled() enabled} for the
+ * recording but
+ * {@link android.Manifest.permission#RECORD_AUDIO} is not
+ * granted.
+ */
+ @NonNull
+ SucklessRecording start(@NonNull SucklessPendingRecording pendingRecording) {
+ Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
+ RecordingRecord alreadyInProgressRecording = null;
+ @VideoRecordError int error = ERROR_NONE;
+ Throwable errorCause = null;
+ long recordingId;
+ synchronized (mLock) {
+ recordingId = ++mLastGeneratedRecordingId;
+ switch (mState) {
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ alreadyInProgressRecording = mActiveRecordingRecord;
+ break;
+ case PENDING_PAUSED:
+ // Fall-through
+ case PENDING_RECORDING:
+ // There is already a recording pending that hasn't been stopped.
+ alreadyInProgressRecording =
+ Preconditions.checkNotNull(mPendingRecordingRecord);
+ break;
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ // Fall-through
+ case CONFIGURING:
+ // Fall-through
+ case ERROR:
+ // Fall-through
+ case IDLING:
+ if (mState == State.IDLING) {
+ Preconditions.checkState(
+ mActiveRecordingRecord == null
+ && mPendingRecordingRecord == null,
+ "Expected recorder to be idle but a recording is either "
+ + "pending or in progress.");
+ }
+ try {
+ RecordingRecord recordingRecord = RecordingRecord.from(pendingRecording,
+ recordingId);
+ recordingRecord.initializeRecording(
+ pendingRecording.getApplicationContext());
+ mPendingRecordingRecord = recordingRecord;
+ if (mState == State.IDLING) {
+ setState(State.PENDING_RECORDING);
+ mSequentialExecutor.execute(this::tryServicePendingRecording);
+ } else if (mState == State.ERROR) {
+ setState(State.PENDING_RECORDING);
+ // Retry initialization.
+ mSequentialExecutor.execute(() -> {
+ if (mLatestSurfaceRequest == null) {
+ throw new AssertionError(
+ "surface request is required to retry "
+ + "initialization.");
+ }
+ configureInternal(mLatestSurfaceRequest, mVideoSourceTimebase);
+ });
+ } else {
+ setState(State.PENDING_RECORDING);
+ // The recording will automatically start once the initialization
+ // completes.
+ }
+ } catch (IOException e) {
+ error = ERROR_INVALID_OUTPUT_OPTIONS;
+ errorCause = e;
+ }
+ break;
+ }
+ }
+
+ if (alreadyInProgressRecording != null) {
+ throw new IllegalStateException("A recording is already in progress. Previous "
+ + "recordings must be stopped before a new recording can be started.");
+ } else if (error != ERROR_NONE) {
+ Logger.e(TAG,
+ "Recording was started when the Recorder had encountered error " + errorCause);
+ // Immediately update the listener if the Recorder encountered an error.
+ finalizePendingRecording(RecordingRecord.from(pendingRecording, recordingId),
+ error, errorCause);
+ return SucklessRecording.createFinalizedFrom(pendingRecording, recordingId);
+ }
+
+ return SucklessRecording.from(pendingRecording, recordingId);
+ }
+
+ void pause(@NonNull SucklessRecording activeRecording) {
+ synchronized (mLock) {
+ if (!isSameRecording(activeRecording, mPendingRecordingRecord) && !isSameRecording(
+ activeRecording, mActiveRecordingRecord)) {
+ // If this Recording is no longer active, log and treat as a no-op.
+ // This is not technically an error since the recording can be finalized
+ // asynchronously.
+ Logger.d(TAG,
+ "pause() called on a recording that is no longer active: "
+ + activeRecording.getOutputOptions());
+ return;
+ }
+
+ switch (mState) {
+ case PENDING_RECORDING:
+ // The recording will automatically pause once the initialization completes.
+ setState(State.PENDING_PAUSED);
+ break;
+ case CONFIGURING:
+ // Fall-through
+ case IDLING:
+ throw new IllegalStateException("Called pause() from invalid state: " + mState);
+ case RECORDING:
+ setState(State.PAUSED);
+ RecordingRecord finalActiveRecordingRecord = mActiveRecordingRecord;
+ mSequentialExecutor.execute(() -> pauseInternal(finalActiveRecordingRecord));
+ break;
+ case PENDING_PAUSED:
+ // Fall-through
+ case PAUSED:
+ // No-op when the recording is already paused.
+ break;
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ // If recorder is resetting or stopping, then pause is a no-op.
+ break;
+ case ERROR:
+ // In an error state, the recording will already be finalized. Treat as a
+ // no-op in pause()
+ break;
+ }
+ }
+ }
+
+ void resume(@NonNull SucklessRecording activeRecording) {
+ synchronized (mLock) {
+ if (!isSameRecording(activeRecording, mPendingRecordingRecord) && !isSameRecording(
+ activeRecording, mActiveRecordingRecord)) {
+ // If this Recording is no longer active, log and treat as a no-op.
+ // This is not technically an error since the recording can be finalized
+ // asynchronously.
+ Logger.d(TAG,
+ "resume() called on a recording that is no longer active: "
+ + activeRecording.getOutputOptions());
+ return;
+ }
+ switch (mState) {
+ case PENDING_PAUSED:
+ // The recording will automatically start once the initialization completes.
+ setState(State.PENDING_RECORDING);
+ break;
+ case CONFIGURING:
+ // Should not be able to resume when initializing. Should be in a PENDING state.
+ // Fall-through
+ case IDLING:
+ throw new IllegalStateException("Called resume() from invalid state: "
+ + mState);
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ // If recorder is stopping or resetting, then resume is a no-op.
+ // Fall-through
+ case PENDING_RECORDING:
+ // Fall-through
+ case RECORDING:
+ // No-op when the recording is running.
+ break;
+ case PAUSED:
+ setState(State.RECORDING);
+ RecordingRecord finalActiveRecordingRecord = mActiveRecordingRecord;
+ mSequentialExecutor.execute(() -> resumeInternal(finalActiveRecordingRecord));
+ break;
+ case ERROR:
+ // In an error state, the recording will already be finalized. Treat as a
+ // no-op in resume()
+ break;
+ }
+ }
+ }
+
+ void stop(@NonNull SucklessRecording activeRecording) {
+ RecordingRecord pendingRecordingToFinalize = null;
+ synchronized (mLock) {
+ if (!isSameRecording(activeRecording, mPendingRecordingRecord) && !isSameRecording(
+ activeRecording, mActiveRecordingRecord)) {
+ // If this Recording is no longer active, log and treat as a no-op.
+ // This is not technically an error since the recording can be finalized
+ // asynchronously.
+ Logger.d(TAG,
+ "stop() called on a recording that is no longer active: "
+ + activeRecording.getOutputOptions());
+ return;
+ }
+ switch (mState) {
+ case PENDING_RECORDING:
+ // Fall-through
+ case PENDING_PAUSED:
+ // Immediately finalize pending recording since it never started.
+ Preconditions.checkState(isSameRecording(activeRecording,
+ mPendingRecordingRecord));
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ restoreNonPendingState(); // Equivalent to setState(mNonPendingState)
+ break;
+ case STOPPING:
+ // Fall-through
+ case RESETTING:
+ // We are already resetting, likely due to an error that stopped the recording.
+ // Ensure this is the current active recording and treat as a no-op. The
+ // active recording will be cleared once stop/reset is complete.
+ Preconditions.checkState(isSameRecording(activeRecording,
+ mActiveRecordingRecord));
+ break;
+ case CONFIGURING:
+ // Fall-through
+ case IDLING:
+ throw new IllegalStateException("Calling stop() while idling or initializing "
+ + "is invalid.");
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ setState(State.STOPPING);
+ long explicitlyStopTimeUs = TimeUnit.NANOSECONDS.toMicros(System.nanoTime());
+ RecordingRecord finalActiveRecordingRecord = mActiveRecordingRecord;
+ mSequentialExecutor.execute(() -> stopInternal(finalActiveRecordingRecord,
+ explicitlyStopTimeUs, ERROR_NONE, null));
+ break;
+ case ERROR:
+ // In an error state, the recording will already be finalized. Treat as a
+ // no-op in stop()
+ break;
+ }
+ }
+
+ if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, ERROR_NO_VALID_DATA,
+ new RuntimeException("Recording was stopped before any data could be "
+ + "produced."));
+ }
+ }
+
+ private void finalizePendingRecording(@NonNull RecordingRecord recordingToFinalize,
+ @VideoRecordError int error, @Nullable Throwable cause) {
+ recordingToFinalize.finalizeRecording(Uri.EMPTY);
+ recordingToFinalize.updateVideoRecordEvent(
+ VideoRecordEvent.finalizeWithError(
+ recordingToFinalize.getOutputOptions(),
+ RecordingStats.of(/*duration=*/0L,
+ /*bytes=*/0L,
+ AudioStats.of(AudioStats.AUDIO_STATE_DISABLED, mAudioErrorCause)),
+ OutputResults.of(Uri.EMPTY),
+ error,
+ cause));
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void onSurfaceRequestedInternal(@NonNull SurfaceRequest request,
+ @NonNull Timebase timebase) {
+ if (mLatestSurfaceRequest != null && !mLatestSurfaceRequest.isServiced()) {
+ mLatestSurfaceRequest.willNotProvideSurface();
+ }
+ configureInternal(mLatestSurfaceRequest = request, mVideoSourceTimebase = timebase);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ void onSourceStateChangedInternal(@NonNull SourceState newState) {
+ SourceState oldState = mSourceState;
+ mSourceState = newState;
+ if (oldState != newState) {
+ Logger.d(TAG, "Video source has transitioned to state: " + newState);
+ } else {
+ Logger.d(TAG, "Video source transitions to the same state: " + newState);
+ return;
+ }
+
+ if (newState == SourceState.INACTIVE) {
+ if (mActiveSurface == null) {
+ // If we're inactive and have no active surface, we'll reset the encoder directly.
+ // Otherwise, we'll wait for the active surface's surface request listener to
+ // reset the encoder.
+ requestReset(ERROR_SOURCE_INACTIVE, null);
+ } else {
+ // The source becomes inactive, the incoming new surface request has to be cached
+ // and be serviced after the Recorder is reset when receiving the previous
+ // surface request complete callback.
+ mNeedsReset = true;
+ if (mInProgressRecording != null) {
+ // Stop any in progress recording with "source inactive" error
+ onInProgressRecordingInternalError(mInProgressRecording, ERROR_SOURCE_INACTIVE,
+ null);
+ }
+ }
+ } else if (newState == SourceState.ACTIVE_NON_STREAMING) {
+ // We are expecting the source to transition to NON_STREAMING state.
+ if (mSourceNonStreamingTimeout != null && mSourceNonStreamingTimeout.cancel(false)
+ && mVideoEncoder != null) {
+ notifyEncoderSourceStopped(mVideoEncoder);
+ }
+ }
+ }
+
+ /**
+ * Requests the Recorder to be reset.
+ *
+ *
If a recording is in progress, it will be stopped asynchronously and reset once it has
+ * been finalized.
+ *
+ *
The Recorder is expected to be reset when there's no active surface. Otherwise, wait for
+ * the surface request complete callback first.
+ */
+ @ExecutedBy("mSequentialExecutor")
+ void requestReset(@VideoRecordError int errorCode, @Nullable Throwable errorCause) {
+ boolean shouldReset = false;
+ boolean shouldStop = false;
+ synchronized (mLock) {
+ switch (mState) {
+ case PENDING_RECORDING:
+ // Fall-through
+ case PENDING_PAUSED:
+ // Fall-through
+ shouldReset = true;
+ updateNonPendingState(State.RESETTING);
+ break;
+ case ERROR:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case CONFIGURING:
+ shouldReset = true;
+ break;
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ if (mActiveRecordingRecord != mInProgressRecording) {
+ throw new AssertionError("In-progress recording does not match the active"
+ + " recording. Unable to reset encoder.");
+ }
+ // If there's an active recording, stop it first then release the resources
+ // at onRecordingFinalized().
+ shouldStop = true;
+ // Fall-through
+ case STOPPING:
+ // Already stopping. Set state to RESETTING so resources will be released once
+ // onRecordingFinalized() runs.
+ setState(State.RESETTING);
+ break;
+ case RESETTING:
+ // No-Op, the Recorder is already being reset.
+ break;
+ }
+ }
+
+ // These calls must not be posted to the executor to ensure they are executed inline on
+ // the sequential executor and the state changes above are correctly handled.
+ if (shouldReset) {
+ reset();
+ } else if (shouldStop) {
+ stopInternal(mInProgressRecording, Encoder.NO_TIMESTAMP, errorCode, errorCause);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+
+ private void configureInternal(@NonNull SurfaceRequest surfaceRequest,
+ @NonNull Timebase videoSourceTimebase) {
+ if (surfaceRequest.isServiced()) {
+ Logger.w(TAG, "Ignore the SurfaceRequest since it is already served.");
+ return;
+ }
+ surfaceRequest.setTransformationInfoListener(mSequentialExecutor,
+ (transformationInfo) -> mSurfaceTransformationInfo = transformationInfo);
+ Size surfaceSize = surfaceRequest.getResolution();
+ // Fetch and cache nearest camcorder profile, if one exists.
+ VideoCapabilities capabilities =
+ VideoCapabilities.from(surfaceRequest.getCamera().getCameraInfo());
+ Quality highestSupportedQuality = capabilities.findHighestSupportedQualityFor(surfaceSize);
+ Logger.d(TAG, "Using supported quality of " + highestSupportedQuality
+ + " for surface size " + surfaceSize);
+ if (highestSupportedQuality != Quality.NONE) {
+ mResolvedCamcorderProfile = capabilities.getProfile(highestSupportedQuality);
+ if (mResolvedCamcorderProfile == null) {
+ throw new AssertionError("Camera advertised available quality but did not "
+ + "produce CamcorderProfile for advertised quality.");
+ }
+ }
+ setupVideo(surfaceRequest, videoSourceTimebase);
+ }
+
+ @SuppressWarnings("ObjectToString")
+ @ExecutedBy("mSequentialExecutor")
+ private void setupVideo(@NonNull SurfaceRequest request, @NonNull Timebase timebase) {
+ safeToCloseVideoEncoder().addListener(() -> {
+ if (request.isServiced() || mVideoEncoderSession.isConfiguredSurfaceRequest(request)) {
+ Logger.w(TAG, "Ignore the SurfaceRequest " + request + " isServiced: "
+ + request.isServiced() + " VideoEncoderSession: " + mVideoEncoderSession);
+ return;
+ }
+ VideoEncoderSession videoEncoderSession =
+ new VideoEncoderSession(mVideoEncoderFactory, mSequentialExecutor, mExecutor);
+ MediaSpec mediaSpec = getObservableData(mMediaSpec);
+ ListenableFuture configureFuture =
+ videoEncoderSession.configure(request, timebase, mediaSpec,
+ mResolvedCamcorderProfile);
+ mVideoEncoderSession = videoEncoderSession;
+ Futures.addCallback(configureFuture, new FutureCallback() {
+ @Override
+ public void onSuccess(@Nullable Encoder result) {
+ Logger.d(TAG, "VideoEncoder is created. " + result);
+ if (result == null) {
+ return;
+ }
+ Preconditions.checkState(mVideoEncoderSession == videoEncoderSession);
+ Preconditions.checkState(mVideoEncoder == null);
+ onVideoEncoderReady(videoEncoderSession);
+ onConfigured();
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ Logger.d(TAG, "VideoEncoder Setup error: " + t);
+ onEncoderSetupError(t);
+ }
+ }, mSequentialExecutor);
+ }, mSequentialExecutor);
+ }
+
+ @NonNull
+ @ExecutedBy("mSequentialExecutor")
+ private ListenableFuture safeToCloseVideoEncoder() {
+ Logger.d(TAG, "Try to safely release video encoder: " + mVideoEncoder);
+ return mVideoEncoderSession.signalTermination();
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void onVideoEncoderReady(@NonNull VideoEncoderSession videoEncoderSession) {
+ mVideoEncoder = videoEncoderSession.getVideoEncoder();
+ mVideoEncoderBitrateRange =
+ ((VideoEncoderInfo) mVideoEncoder.getEncoderInfo()).getSupportedBitrateRange();
+ mFirstRecordingVideoBitrate = mVideoEncoder.getConfiguredBitrate();
+ mActiveSurface = videoEncoderSession.getActiveSurface();
+ setLatestSurface(mActiveSurface);
+
+ videoEncoderSession.setOnSurfaceUpdateListener(mSequentialExecutor, this::setLatestSurface);
+
+ Futures.addCallback(videoEncoderSession.getReadyToReleaseFuture(),
+ new FutureCallback() {
+ @Override
+ public void onSuccess(@Nullable Encoder result) {
+ Logger.d(TAG, "VideoEncoder can be released: " + result);
+ if (result == null) {
+ return;
+ }
+ if (mSourceNonStreamingTimeout != null
+ && mSourceNonStreamingTimeout.cancel(false)
+ && mVideoEncoder != null && mVideoEncoder == result) {
+ notifyEncoderSourceStopped(mVideoEncoder);
+ }
+
+ mVideoEncoderSessionToRelease = videoEncoderSession;
+ setLatestSurface(null);
+ requestReset(ERROR_SOURCE_INACTIVE, null);
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ Logger.d(TAG, "Error in ReadyToReleaseFuture: " + t);
+ }
+ }, mSequentialExecutor);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void onConfigured() {
+ RecordingRecord recordingToStart = null;
+ RecordingRecord pendingRecordingToFinalize = null;
+ @VideoRecordError int error = ERROR_NONE;
+ Throwable errorCause = null;
+ boolean startRecordingPaused = false;
+ synchronized (mLock) {
+ switch (mState) {
+ case IDLING:
+ // Fall-through
+ case RECORDING:
+ // Fall-through
+ case PAUSED:
+ // Fall-through
+ case RESETTING:
+ throw new AssertionError(
+ "Incorrectly invoke onConfigured() in state " + mState);
+ case STOPPING:
+ if (!mEncoderNotUsePersistentInputSurface) {
+ throw new AssertionError("Unexpectedly invoke onConfigured() in a "
+ + "STOPPING state when it's not waiting for a new surface.");
+ }
+ break;
+ case CONFIGURING:
+ setState(State.IDLING);
+ break;
+ case ERROR:
+ Logger.e(TAG,
+ "onConfigured() was invoked when the Recorder had encountered error");
+ break;
+ case PENDING_PAUSED:
+ startRecordingPaused = true;
+ // Fall through
+ case PENDING_RECORDING:
+ if (mActiveRecordingRecord != null) {
+ // Active recording is still finalizing. Pending recording will be
+ // serviced in onRecordingFinalized().
+ break;
+ }
+ if (mSourceState == SourceState.INACTIVE) {
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ restoreNonPendingState(); // Equivalent to setState(mNonPendingState)
+ error = ERROR_SOURCE_INACTIVE;
+ errorCause = PENDING_RECORDING_ERROR_CAUSE_SOURCE_INACTIVE;
+ } else {
+ recordingToStart = makePendingRecordingActiveLocked(mState);
+ }
+ break;
+ }
+ }
+
+ if (recordingToStart != null) {
+ // Start new active recording inline on sequential executor (but unlocked).
+ startRecording(recordingToStart, startRecordingPaused);
+ } else if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, error, errorCause);
+ }
+ }
+
+ @NonNull
+ private MediaSpec composeRecorderMediaSpec(@NonNull MediaSpec mediaSpec) {
+ MediaSpec.Builder mediaSpecBuilder = mediaSpec.toBuilder();
+
+ // Append default video configurations
+ VideoSpec videoSpec = mediaSpec.getVideoSpec();
+ if (videoSpec.getAspectRatio() == AspectRatio.RATIO_DEFAULT) {
+ mediaSpecBuilder.configureVideo(
+ builder -> builder.setAspectRatio(VIDEO_SPEC_DEFAULT.getAspectRatio()));
+ }
+
+ return mediaSpecBuilder.build();
+ }
+
+ private static boolean isSameRecording(@NonNull SucklessRecording activeRecording,
+ @Nullable RecordingRecord recordingRecord) {
+ if (recordingRecord == null) {
+ return false;
+ }
+
+ return activeRecording.getRecordingId() == recordingRecord.getRecordingId();
+ }
+
+ /**
+ * Setup audio related resources.
+ *
+ * @throws AudioSourceAccessException if the audio source failed to be setup.
+ * @throws InvalidConfigException if the audio encoder failed to be setup.
+ */
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ @ExecutedBy("mSequentialExecutor")
+ private void setupAudio(@NonNull RecordingRecord recordingToStart)
+ throws AudioSourceAccessException, InvalidConfigException {
+ MediaSpec mediaSpec = getObservableData(mMediaSpec);
+ // Resolve the audio mime info
+ MimeInfo audioMimeInfo = resolveAudioMimeInfo(mediaSpec, mResolvedCamcorderProfile);
+ Timebase audioSourceTimebase = Timebase.UPTIME;
+
+ // Select and create the audio source
+ AudioSource.Settings audioSourceSettings =
+ resolveAudioSourceSettings(audioMimeInfo, mediaSpec.getAudioSpec());
+ if (mAudioSource != null) {
+ releaseCurrentAudioSource();
+ }
+ // TODO: set audioSourceTimebase to AudioSource. Currently AudioSource hard code
+ // AudioTimestamp.TIMEBASE_MONOTONIC.
+ mAudioSource = setupAudioSource(recordingToStart, audioSourceSettings);
+ Logger.d(TAG, String.format("Set up new audio source: 0x%x", mAudioSource.hashCode()));
+
+ // Select and create the audio encoder
+ AudioEncoderConfig audioEncoderConfig = resolveAudioEncoderConfig(audioMimeInfo,
+ audioSourceTimebase, audioSourceSettings, mediaSpec.getAudioSpec());
+ mAudioEncoder = mAudioEncoderFactory.createEncoder(mExecutor, audioEncoderConfig);
+
+ // Connect the audio source to the audio encoder
+ Encoder.EncoderInput bufferProvider = mAudioEncoder.getInput();
+ if (!(bufferProvider instanceof Encoder.ByteBufferInput)) {
+ throw new AssertionError("The EncoderInput of audio isn't a ByteBufferInput.");
+ }
+ mAudioSource.setBufferProvider((Encoder.ByteBufferInput) bufferProvider);
+ }
+
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ @NonNull
+ private AudioSource setupAudioSource(@NonNull RecordingRecord recordingToStart,
+ @NonNull AudioSource.Settings audioSourceSettings)
+ throws AudioSourceAccessException {
+ return recordingToStart.performOneTimeAudioSourceCreation(audioSourceSettings,
+ AUDIO_EXECUTOR);
+ }
+
+ private void releaseCurrentAudioSource() {
+ if (mAudioSource == null) {
+ throw new AssertionError("Cannot release null audio source.");
+ }
+ AudioSource audioSource = mAudioSource;
+ mAudioSource = null;
+ Logger.d(TAG, String.format("Releasing audio source: 0x%x", audioSource.hashCode()));
+ // Run callback on direct executor since it is only logging
+ Futures.addCallback(audioSource.release(), new FutureCallback() {
+ @Override
+ public void onSuccess(@Nullable Void result) {
+ Logger.d(TAG, String.format("Released audio source successfully: 0x%x",
+ audioSource.hashCode()));
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ Logger.d(TAG, String.format("An error occurred while attempting to "
+ + "release audio source: 0x%x", audioSource.hashCode()));
+ }
+ }, CameraXExecutors.directExecutor());
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void onEncoderSetupError(@Nullable Throwable cause) {
+ RecordingRecord pendingRecordingToFinalize = null;
+ synchronized (mLock) {
+ switch (mState) {
+ case PENDING_PAUSED:
+ // Fall-through
+ case PENDING_RECORDING:
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ // Fall-through
+ case CONFIGURING:
+ setStreamId(StreamInfo.STREAM_ID_ERROR);
+ setState(State.ERROR);
+ break;
+ case ERROR:
+ // Already in an error state. Ignore new error.
+ break;
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ throw new AssertionError("Encountered encoder setup error while in unexpected"
+ + " state " + mState + ": " + cause);
+ }
+ }
+
+ if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, ERROR_RECORDER_ERROR, cause);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void setupAndStartMediaMuxer(@NonNull RecordingRecord recordingToStart) {
+ if (mMediaMuxer != null) {
+ throw new AssertionError("Unable to set up media muxer when one already exists.");
+ }
+
+ if (isAudioEnabled() && mPendingAudioRingBuffer.isEmpty()) {
+ throw new AssertionError("Audio is enabled but no audio sample is ready. Cannot start"
+ + " media muxer.");
+ }
+
+ if (mPendingFirstVideoData == null) {
+ throw new AssertionError("Media muxer cannot be started without an encoded video "
+ + "frame.");
+ }
+
+ try (EncodedData videoDataToWrite = mPendingFirstVideoData) {
+ mPendingFirstVideoData = null;
+ List audioDataToWrite = getAudioDataToWriteAndClearCache(
+ videoDataToWrite.getPresentationTimeUs()
+ );
+ // Make sure we can write the first audio and video data without hitting the file size
+ // limit. Otherwise we will be left with a malformed (empty) track on stop.
+ long firstDataSize = videoDataToWrite.size();
+ for (EncodedData data : audioDataToWrite) {
+ firstDataSize += data.size();
+ }
+ if (mFileSizeLimitInBytes != OutputOptions.FILE_SIZE_UNLIMITED
+ && firstDataSize > mFileSizeLimitInBytes) {
+ Logger.d(TAG,
+ String.format("Initial data exceeds file size limit %d > %d", firstDataSize,
+ mFileSizeLimitInBytes));
+ onInProgressRecordingInternalError(recordingToStart,
+ ERROR_FILE_SIZE_LIMIT_REACHED, null);
+ return;
+ }
+
+ MediaMuxer mediaMuxer;
+ try {
+ MediaSpec mediaSpec = getObservableData(mMediaSpec);
+ int muxerOutputFormat =
+ mediaSpec.getOutputFormat() == MediaSpec.OUTPUT_FORMAT_AUTO
+ ? supportedMuxerFormatOrDefaultFrom(mResolvedCamcorderProfile,
+ MediaSpec.outputFormatToMuxerFormat(
+ MEDIA_SPEC_DEFAULT.getOutputFormat()))
+ : MediaSpec.outputFormatToMuxerFormat(mediaSpec.getOutputFormat());
+ mediaMuxer = recordingToStart.performOneTimeMediaMuxerCreation(muxerOutputFormat,
+ uri -> mOutputUri = uri);
+ } catch (IOException e) {
+ onInProgressRecordingInternalError(recordingToStart, ERROR_INVALID_OUTPUT_OPTIONS,
+ e);
+ return;
+ }
+
+ if (mSurfaceTransformationInfo != null) {
+ mediaMuxer.setOrientationHint(mSurfaceTransformationInfo.getRotationDegrees());
+ }
+
+ mVideoTrackIndex = mediaMuxer.addTrack(mVideoOutputConfig.getMediaFormat());
+ if (isAudioEnabled()) {
+ mAudioTrackIndex = mediaMuxer.addTrack(mAudioOutputConfig.getMediaFormat());
+ }
+ mediaMuxer.start();
+
+ // MediaMuxer is successfully initialized, transfer the ownership to Recorder.
+ mMediaMuxer = mediaMuxer;
+
+ // Write first data to ensure tracks are not empty
+ writeVideoData(videoDataToWrite, recordingToStart);
+ for (EncodedData data : audioDataToWrite) {
+ writeAudioData(data, recordingToStart);
+ }
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @NonNull
+ private List getAudioDataToWriteAndClearCache(long firstVideoDataTimeUs) {
+ List res = new ArrayList<>();
+
+ while (!mPendingAudioRingBuffer.isEmpty()) {
+ EncodedData data = mPendingAudioRingBuffer.dequeue();
+
+ // Add all audio data that has timestamp greater than or equal to the first video data
+ // timestamp.
+ if (data.getPresentationTimeUs() >= firstVideoDataTimeUs) {
+ res.add(data);
+ }
+ }
+
+ return res;
+ }
+
+ @SuppressLint("MissingPermission")
+ @ExecutedBy("mSequentialExecutor")
+ private void startInternal(@NonNull RecordingRecord recordingToStart) {
+ if (mInProgressRecording != null) {
+ throw new AssertionError("Attempted to start a new recording while another was in "
+ + "progress.");
+ }
+
+ if (recordingToStart.getOutputOptions().getFileSizeLimit() > 0) {
+ // Use %95 of the given file size limit as the criteria, which refers to the
+ // MPEG4Writer.cpp in libstagefright.
+ mFileSizeLimitInBytes = Math.round(
+ recordingToStart.getOutputOptions().getFileSizeLimit() * 0.95);
+ Logger.d(TAG, "File size limit in bytes: " + mFileSizeLimitInBytes);
+ } else {
+ mFileSizeLimitInBytes = OutputOptions.FILE_SIZE_UNLIMITED;
+ }
+
+ if (recordingToStart.getOutputOptions().getDurationLimitMillis() > 0) {
+ mDurationLimitNs = TimeUnit.MILLISECONDS.toNanos(
+ recordingToStart.getOutputOptions().getDurationLimitMillis());
+ Logger.d(TAG, "Duration limit in nanoseconds: " + mDurationLimitNs);
+ } else {
+ mDurationLimitNs = OutputOptions.DURATION_UNLIMITED;
+ }
+
+ mInProgressRecording = recordingToStart;
+
+ // Configure audio based on the current audio state.
+ switch (mAudioState) {
+ case ERROR_ENCODER:
+ // Fall-through
+ case ERROR_SOURCE:
+ // Fall-through
+ case ACTIVE:
+ // Fall-through
+ case DISABLED:
+ throw new AssertionError(
+ "Incorrectly invoke startInternal in audio state " + mAudioState);
+ case IDLING:
+ setAudioState(recordingToStart.hasAudioEnabled() ? AudioState.ACTIVE
+ : AudioState.DISABLED);
+ break;
+ case INITIALIZING:
+ if (recordingToStart.hasAudioEnabled()) {
+ if (!isAudioSupported()) {
+ throw new AssertionError(
+ "The Recorder doesn't support recording with audio");
+ }
+ try {
+ setupAudio(recordingToStart);
+ setAudioState(AudioState.ACTIVE);
+ } catch (AudioSourceAccessException | InvalidConfigException e) {
+ Logger.e(TAG, "Unable to create audio resource with error: ", e);
+ AudioState audioState;
+ if (e instanceof InvalidConfigException) {
+ audioState = AudioState.ERROR_ENCODER;
+ } else {
+ audioState = AudioState.ERROR_SOURCE;
+ }
+ setAudioState(audioState);
+ mAudioErrorCause = e;
+ }
+ }
+ break;
+ }
+
+ initEncoderAndAudioSourceCallbacks(recordingToStart);
+ if (isAudioEnabled()) {
+ mAudioSource.start();
+ mAudioEncoder.start();
+ }
+ mVideoEncoder.start();
+
+ mInProgressRecording.updateVideoRecordEvent(VideoRecordEvent.start(
+ mInProgressRecording.getOutputOptions(),
+ getInProgressRecordingStats()));
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void initEncoderAndAudioSourceCallbacks(@NonNull RecordingRecord recordingToStart) {
+ mEncodingFutures.add(CallbackToFutureAdapter.getFuture(
+ completer -> {
+ mVideoEncoder.setEncoderCallback(new EncoderCallback() {
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeStart() {
+ // No-op.
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeStop() {
+ completer.set(null);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeError(@NonNull EncodeException e) {
+ completer.setException(e);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodedData(@NonNull EncodedData encodedData) {
+ // If the media muxer doesn't yet exist, we may need to create and
+ // start it. Otherwise we can write the data.
+ if (mMediaMuxer == null) {
+ if (!mInProgressRecordingStopping) {
+ // Clear any previously pending video data since we now
+ // have newer data.
+ boolean cachedDataDropped = false;
+ if (mPendingFirstVideoData != null) {
+ cachedDataDropped = true;
+ mPendingFirstVideoData.close();
+ mPendingFirstVideoData = null;
+ }
+
+ if (true) { // Let custom Muxers receive all frames
+ // We have a keyframe. Cache it in case we need to wait
+ // for audio data.
+ mPendingFirstVideoData = encodedData;
+ // If first pending audio data exists or audio is
+ // disabled, we can start the muxer.
+ if (!isAudioEnabled()
+ || !mPendingAudioRingBuffer.isEmpty()) {
+ Logger.d(TAG, "Received video keyframe. Starting "
+ + "muxer...");
+ setupAndStartMediaMuxer(recordingToStart);
+ } else {
+ if (cachedDataDropped) {
+ Logger.d(TAG, "Replaced cached video keyframe "
+ + "with newer keyframe.");
+ } else {
+ Logger.d(TAG, "Cached video keyframe while we wait "
+ + "for first audio sample before starting "
+ + "muxer.");
+ }
+ }
+ } else {
+ // If the video data is not a key frame,
+ // MediaMuxer#writeSampleData will drop it. It will
+ // cause incorrect estimated record bytes and should
+ // be dropped.
+ if (cachedDataDropped) {
+ Logger.d(TAG, "Dropped cached keyframe since we have "
+ + "new video data and have not yet received "
+ + "audio data.");
+ }
+ Logger.d(TAG, "Dropped video data since muxer has not yet "
+ + "started and data is not a keyframe.");
+ mVideoEncoder.requestKeyFrame();
+ encodedData.close();
+ }
+ } else {
+ // Recording is stopping before muxer has been started.
+ Logger.d(TAG, "Drop video data since recording is stopping.");
+ encodedData.close();
+ }
+ } else {
+ // MediaMuxer is already started, write the data.
+ try (EncodedData videoDataToWrite = encodedData) {
+ writeVideoData(videoDataToWrite, recordingToStart);
+ }
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onOutputConfigUpdate(@NonNull OutputConfig outputConfig) {
+ mVideoOutputConfig = outputConfig;
+ }
+ }, mSequentialExecutor);
+ return "videoEncodingFuture";
+ }));
+
+ if (isAudioEnabled()) {
+ mEncodingFutures.add(CallbackToFutureAdapter.getFuture(
+ completer -> {
+ Consumer audioErrorConsumer = throwable -> {
+ if (mAudioErrorCause == null) {
+ // If the audio source or encoder encounters error, update the
+ // status event to notify users. Then continue recording without
+ // audio data.
+ if (throwable instanceof EncodeException) {
+ setAudioState(AudioState.ERROR_ENCODER);
+ } else {
+ setAudioState(AudioState.ERROR_SOURCE);
+ }
+ mAudioErrorCause = throwable;
+ updateInProgressStatusEvent();
+ completer.set(null);
+ }
+ };
+
+ mAudioSource.setAudioSourceCallback(mSequentialExecutor,
+ new AudioSource.AudioSourceCallback() {
+ @Override
+ public void onSilenced(boolean silenced) {
+ if (mIsAudioSourceSilenced != silenced) {
+ mIsAudioSourceSilenced = silenced;
+ mAudioErrorCause = silenced ? new IllegalStateException(
+ "The audio source has been silenced.") : null;
+ updateInProgressStatusEvent();
+ } else {
+ Logger.w(TAG, "Audio source silenced transitions"
+ + " to the same state " + silenced);
+ }
+ }
+
+ @Override
+ public void onError(@NonNull Throwable throwable) {
+ Logger.e(TAG, "Error occurred after audio source started.",
+ throwable);
+ if (throwable instanceof AudioSourceAccessException) {
+ audioErrorConsumer.accept(throwable);
+ }
+ }
+ });
+
+ mAudioEncoder.setEncoderCallback(new EncoderCallback() {
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeStart() {
+ // No-op.
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeStop() {
+ completer.set(null);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeError(@NonNull EncodeException e) {
+ if (mAudioErrorCause == null) {
+ audioErrorConsumer.accept(e);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodedData(@NonNull EncodedData encodedData) {
+ if (mAudioState == AudioState.DISABLED) {
+ throw new AssertionError(
+ "Audio is not enabled but audio encoded data is "
+ + "produced.");
+ }
+
+ // If the media muxer doesn't yet exist, we may need to create and
+ // start it. Otherwise we can write the data.
+ if (mMediaMuxer == null) {
+ if (!mInProgressRecordingStopping) {
+ // BufferCopiedEncodedData is used to copy the content of
+ // the encoded data, preventing byte buffers of the media
+ // codec from being occupied. Also, since the resources of
+ // BufferCopiedEncodedData will be automatically released
+ // by garbage collection, there is no need to call its
+ // close() function.
+ mPendingAudioRingBuffer.enqueue(
+ new BufferCopiedEncodedData(encodedData));
+
+ if (mPendingFirstVideoData != null) {
+ // Both audio and data are ready. Start the muxer.
+ Logger.d(TAG, "Received audio data. Starting muxer...");
+ setupAndStartMediaMuxer(recordingToStart);
+ } else {
+ Logger.d(TAG, "Cached audio data while we wait"
+ + " for video keyframe before starting muxer.");
+ }
+ } else {
+ // Recording is stopping before muxer has been started.
+ Logger.d(TAG,
+ "Drop audio data since recording is stopping.");
+ }
+ encodedData.close();
+ } else {
+ try (EncodedData audioDataToWrite = encodedData) {
+ writeAudioData(audioDataToWrite, recordingToStart);
+ }
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onOutputConfigUpdate(@NonNull OutputConfig outputConfig) {
+ mAudioOutputConfig = outputConfig;
+ }
+ }, mSequentialExecutor);
+ return "audioEncodingFuture";
+ }));
+ }
+
+ Futures.addCallback(Futures.allAsList(mEncodingFutures),
+ new FutureCallback>() {
+ @Override
+ public void onSuccess(@Nullable List result) {
+ Logger.d(TAG, "Encodings end successfully.");
+ finalizeInProgressRecording(mRecordingStopError, mRecordingStopErrorCause);
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ Logger.d(TAG, "Encodings end with error: " + t);
+ // If the media muxer hasn't been set up, assume the encoding fails
+ // because of no valid data has been produced.
+ finalizeInProgressRecording(
+ mMediaMuxer == null ? ERROR_NO_VALID_DATA : ERROR_ENCODING_FAILED,
+ t);
+ }
+ },
+ // Can use direct executor since completers are always completed on sequential
+ // executor.
+ CameraXExecutors.directExecutor());
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void writeVideoData(@NonNull EncodedData encodedData,
+ @NonNull RecordingRecord recording) {
+ if (mVideoTrackIndex == null) {
+ // Throw an exception if the data comes before the track is added.
+ throw new AssertionError(
+ "Video data comes before the track is added to MediaMuxer.");
+ }
+
+ long newRecordingBytes = mRecordingBytes + encodedData.size();
+ if (mFileSizeLimitInBytes != OutputOptions.FILE_SIZE_UNLIMITED
+ && newRecordingBytes > mFileSizeLimitInBytes) {
+ Logger.d(TAG,
+ String.format("Reach file size limit %d > %d", newRecordingBytes,
+ mFileSizeLimitInBytes));
+ onInProgressRecordingInternalError(recording, ERROR_FILE_SIZE_LIMIT_REACHED, null);
+ return;
+ }
+
+ long newRecordingDurationNs = 0L;
+ long currentPresentationTimeUs = encodedData.getPresentationTimeUs();
+
+ if (mFirstRecordingVideoDataTimeUs == Long.MAX_VALUE) {
+ mFirstRecordingVideoDataTimeUs = currentPresentationTimeUs;
+ Logger.d(TAG, String.format("First video time: %d (%s)", mFirstRecordingVideoDataTimeUs,
+ readableUs(mFirstRecordingVideoDataTimeUs)));
+ } else {
+ newRecordingDurationNs = TimeUnit.MICROSECONDS.toNanos(
+ currentPresentationTimeUs - Math.min(mFirstRecordingVideoDataTimeUs,
+ mFirstRecordingAudioDataTimeUs));
+ Preconditions.checkState(mPreviousRecordingVideoDataTimeUs != Long.MAX_VALUE, "There "
+ + "should be a previous data for adjusting the duration.");
+ // We currently don't send an additional empty buffer (bufferInfo.size = 0) with
+ // MediaCodec.BUFFER_FLAG_END_OF_STREAM to let the muxer know the duration of the
+ // last data, so it will be assumed to have the same duration as the data before it. So
+ // add the estimated value to the duration to ensure the final duration will not
+ // exceed the limit.
+ long adjustedDurationNs = newRecordingDurationNs + TimeUnit.MICROSECONDS.toNanos(
+ currentPresentationTimeUs - mPreviousRecordingVideoDataTimeUs);
+ if (mDurationLimitNs != OutputOptions.DURATION_UNLIMITED
+ && adjustedDurationNs > mDurationLimitNs) {
+ Logger.d(TAG, String.format("Video data reaches duration limit %d > %d",
+ adjustedDurationNs, mDurationLimitNs));
+ onInProgressRecordingInternalError(recording, ERROR_DURATION_LIMIT_REACHED, null);
+ return;
+ }
+ }
+
+ mMediaMuxer.writeSampleData(mVideoTrackIndex, encodedData.getByteBuffer(),
+ encodedData.getBufferInfo());
+
+ mRecordingBytes = newRecordingBytes;
+ mRecordingDurationNs = newRecordingDurationNs;
+ mPreviousRecordingVideoDataTimeUs = currentPresentationTimeUs;
+
+ updateInProgressStatusEvent();
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void writeAudioData(@NonNull EncodedData encodedData,
+ @NonNull RecordingRecord recording) {
+
+ long newRecordingBytes = mRecordingBytes + encodedData.size();
+ if (mFileSizeLimitInBytes != OutputOptions.FILE_SIZE_UNLIMITED
+ && newRecordingBytes > mFileSizeLimitInBytes) {
+ Logger.d(TAG,
+ String.format("Reach file size limit %d > %d",
+ newRecordingBytes,
+ mFileSizeLimitInBytes));
+ onInProgressRecordingInternalError(recording, ERROR_FILE_SIZE_LIMIT_REACHED, null);
+ return;
+ }
+
+ long newRecordingDurationNs = 0L;
+ long currentPresentationTimeUs = encodedData.getPresentationTimeUs();
+ if (mFirstRecordingAudioDataTimeUs == Long.MAX_VALUE) {
+ mFirstRecordingAudioDataTimeUs = currentPresentationTimeUs;
+ Logger.d(TAG, String.format("First audio time: %d (%s)", mFirstRecordingAudioDataTimeUs,
+ readableUs(mFirstRecordingAudioDataTimeUs)));
+ } else {
+ newRecordingDurationNs = TimeUnit.MICROSECONDS.toNanos(
+ currentPresentationTimeUs - Math.min(mFirstRecordingVideoDataTimeUs,
+ mFirstRecordingAudioDataTimeUs));
+ Preconditions.checkState(mPreviousRecordingAudioDataTimeUs != Long.MAX_VALUE, "There "
+ + "should be a previous data for adjusting the duration.");
+ // We currently don't send an additional empty buffer (bufferInfo.size = 0) with
+ // MediaCodec.BUFFER_FLAG_END_OF_STREAM to let the muxer know the duration of the
+ // last data, so it will be assumed to have the same duration as the data before it. So
+ // add the estimated value to the duration to ensure the final duration will not
+ // exceed the limit.
+ long adjustedDurationNs = newRecordingDurationNs + TimeUnit.MICROSECONDS.toNanos(
+ currentPresentationTimeUs - mPreviousRecordingAudioDataTimeUs);
+ if (mDurationLimitNs != OutputOptions.DURATION_UNLIMITED
+ && adjustedDurationNs > mDurationLimitNs) {
+ Logger.d(TAG, String.format("Audio data reaches duration limit %d > %d",
+ adjustedDurationNs, mDurationLimitNs));
+ onInProgressRecordingInternalError(recording, ERROR_DURATION_LIMIT_REACHED, null);
+ return;
+ }
+ }
+
+ mMediaMuxer.writeSampleData(mAudioTrackIndex,
+ encodedData.getByteBuffer(),
+ encodedData.getBufferInfo());
+
+ mRecordingBytes = newRecordingBytes;
+ mPreviousRecordingAudioDataTimeUs = currentPresentationTimeUs;
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void pauseInternal(@NonNull RecordingRecord recordingToPause) {
+ // Only pause recording if recording is in-progress and it is not stopping.
+ if (mInProgressRecording == recordingToPause && !mInProgressRecordingStopping) {
+ if (isAudioEnabled()) {
+ mAudioEncoder.pause();
+ }
+ mVideoEncoder.pause();
+
+ mInProgressRecording.updateVideoRecordEvent(VideoRecordEvent.pause(
+ mInProgressRecording.getOutputOptions(),
+ getInProgressRecordingStats()));
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void resumeInternal(@NonNull RecordingRecord recordingToResume) {
+ // Only resume recording if recording is in-progress and it is not stopping.
+ if (mInProgressRecording == recordingToResume && !mInProgressRecordingStopping) {
+ if (isAudioEnabled()) {
+ mAudioEncoder.start();
+ }
+ mVideoEncoder.start();
+
+ mInProgressRecording.updateVideoRecordEvent(VideoRecordEvent.resume(
+ mInProgressRecording.getOutputOptions(),
+ getInProgressRecordingStats()));
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ void stopInternal(@NonNull RecordingRecord recordingToStop,
+ long explicitlyStopTime, @VideoRecordError int stopError,
+ @Nullable Throwable errorCause) {
+ // Only stop recording if recording is in-progress and it is not already stopping.
+ if (mInProgressRecording == recordingToStop && !mInProgressRecordingStopping) {
+ mInProgressRecordingStopping = true;
+ mRecordingStopError = stopError;
+ mRecordingStopErrorCause = errorCause;
+ if (isAudioEnabled()) {
+ clearPendingAudioRingBuffer();
+ mAudioEncoder.stop(explicitlyStopTime);
+ }
+ if (mPendingFirstVideoData != null) {
+ mPendingFirstVideoData.close();
+ mPendingFirstVideoData = null;
+ }
+
+ if (mSourceState != SourceState.ACTIVE_NON_STREAMING) {
+ // As b/197047288, if the source is still ACTIVE, we will wait for the source to
+ // become non-streaming before notifying the encoder the source has stopped.
+ // Similarly, if the source is already INACTIVE, we won't know that the source
+ // has stopped until the surface request callback, so we'll wait for that.
+ // In both cases, we set a timeout to ensure the source is always signalled on
+ // devices that require it and to act as a flag that we need to signal the source
+ // stopped.
+ Encoder finalVideoEncoder = mVideoEncoder;
+ mSourceNonStreamingTimeout = CameraXExecutors.mainThreadExecutor().schedule(
+ () -> mSequentialExecutor.execute(() -> {
+ Logger.d(TAG, "The source didn't become non-streaming "
+ + "before timeout. Waited " + SOURCE_NON_STREAMING_TIMEOUT_MS
+ + "ms");
+ if (DeviceQuirks.get(
+ DeactivateEncoderSurfaceBeforeStopEncoderQuirk.class)
+ != null) {
+ // Even in the case of timeout, we tell the encoder the source has
+ // stopped because devices with this quirk require that the codec
+ // produce a new surface.
+ notifyEncoderSourceStopped(finalVideoEncoder);
+ }
+ }), SOURCE_NON_STREAMING_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+ } else {
+ // Source is already non-streaming. Signal source is stopped right away.
+ notifyEncoderSourceStopped(mVideoEncoder);
+ }
+
+ // Stop the encoder. This will tell the encoder to stop encoding new data. We'll notify
+ // the encoder when the source has actually stopped in the FutureCallback.
+ // If the recording is explicitly stopped by the user, pass the stop timestamp to the
+ // encoder so that the encoding can be stop as close as to the actual stop time.
+ mVideoEncoder.stop(explicitlyStopTime);
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ static void notifyEncoderSourceStopped(@NonNull Encoder encoder) {
+ if (encoder instanceof SucklessEncoderImpl) {
+ ((SucklessEncoderImpl) encoder).signalSourceStopped();
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void clearPendingAudioRingBuffer() {
+ while (!mPendingAudioRingBuffer.isEmpty()) {
+ mPendingAudioRingBuffer.dequeue();
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void reset() {
+ if (mAudioEncoder != null) {
+ Logger.d(TAG, "Releasing audio encoder.");
+ mAudioEncoder.release();
+ mAudioEncoder = null;
+ mAudioOutputConfig = null;
+ }
+ tryReleaseVideoEncoder();
+ if (mAudioSource != null) {
+ releaseCurrentAudioSource();
+ }
+
+ setAudioState(AudioState.INITIALIZING);
+ onReset();
+ }
+
+ @SuppressWarnings("FutureReturnValueIgnored")
+ @ExecutedBy("mSequentialExecutor")
+ private void tryReleaseVideoEncoder() {
+ if (mVideoEncoderSessionToRelease != null) {
+ Preconditions.checkState(
+ mVideoEncoderSessionToRelease.getVideoEncoder() == mVideoEncoder);
+
+ Logger.d(TAG, "Releasing video encoder: " + mVideoEncoder);
+ mVideoEncoderSessionToRelease.terminateNow();
+ mVideoEncoderSessionToRelease = null;
+ mVideoEncoder = null;
+ mVideoOutputConfig = null;
+ setLatestSurface(null);
+ } else {
+ safeToCloseVideoEncoder();
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void onReset() {
+ synchronized (mLock) {
+ switch (mState) {
+ case PENDING_PAUSED:
+ // Fall-through
+ case PENDING_RECORDING:
+ updateNonPendingState(State.CONFIGURING);
+ break;
+ case ERROR:
+ // Fall-through
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ setState(State.CONFIGURING);
+ break;
+ case CONFIGURING:
+ // No-op
+ break;
+ }
+ }
+
+ mNeedsReset = false;
+
+ // If the latest surface request hasn't been serviced, use it to re-configure the Recorder.
+ if (mLatestSurfaceRequest != null && !mLatestSurfaceRequest.isServiced()) {
+ configureInternal(mLatestSurfaceRequest, mVideoSourceTimebase);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private int internalAudioStateToAudioStatsState(@NonNull AudioState audioState) {
+ switch (audioState) {
+ case DISABLED:
+ // Fall-through
+ case INITIALIZING:
+ // Audio will not be initialized until the first recording with audio enabled is
+ // started. So if the audio state is INITIALIZING, consider the audio is disabled.
+ return AudioStats.AUDIO_STATE_DISABLED;
+ case ACTIVE:
+ if (mIsAudioSourceSilenced) {
+ return AudioStats.AUDIO_STATE_SOURCE_SILENCED;
+ } else {
+ return AudioStats.AUDIO_STATE_ACTIVE;
+ }
+ case ERROR_ENCODER:
+ return AudioStats.AUDIO_STATE_ENCODER_ERROR;
+ case ERROR_SOURCE:
+ return AudioStats.AUDIO_STATE_SOURCE_ERROR;
+ case IDLING:
+ // AudioStats should not be produced when audio is in IDLING state.
+ break;
+ }
+ // Should not reach.
+ throw new AssertionError("Invalid internal audio state: " + audioState);
+ }
+
+ @NonNull
+ private StreamState internalStateToStreamState(@NonNull State state) {
+ // Stopping state should be treated as inactive on certain chipsets. See b/196039619.
+ DeactivateEncoderSurfaceBeforeStopEncoderQuirk quirk =
+ DeviceQuirks.get(DeactivateEncoderSurfaceBeforeStopEncoderQuirk.class);
+ return state == State.RECORDING || (state == State.STOPPING && quirk == null)
+ ? StreamState.ACTIVE : StreamState.INACTIVE;
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ boolean isAudioEnabled() {
+ return mAudioState == AudioState.ACTIVE;
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ void finalizeInProgressRecording(@VideoRecordError int error, @Nullable Throwable throwable) {
+ if (mInProgressRecording == null) {
+ throw new AssertionError("Attempted to finalize in-progress recording, but no "
+ + "recording is in progress.");
+ }
+
+ @VideoRecordError int errorToSend = error;
+ if (mMediaMuxer != null) {
+ try {
+ mMediaMuxer.stop();
+ mMediaMuxer.release();
+ } catch (IllegalStateException e) {
+ Logger.e(TAG, "MediaMuxer failed to stop or release with error: " + e.getMessage());
+ if (errorToSend == ERROR_NONE) {
+ errorToSend = ERROR_UNKNOWN;
+ }
+ }
+ mMediaMuxer = null;
+ } else if (errorToSend == ERROR_NONE) {
+ // Muxer was never started, so recording has no data.
+ errorToSend = ERROR_NO_VALID_DATA;
+ }
+
+ mInProgressRecording.finalizeRecording(mOutputUri);
+
+ OutputOptions outputOptions = mInProgressRecording.getOutputOptions();
+ RecordingStats stats = getInProgressRecordingStats();
+ OutputResults outputResults = OutputResults.of(mOutputUri);
+ mInProgressRecording.updateVideoRecordEvent(errorToSend == ERROR_NONE
+ ? VideoRecordEvent.finalize(
+ outputOptions,
+ stats,
+ outputResults)
+ : VideoRecordEvent.finalizeWithError(
+ outputOptions,
+ stats,
+ outputResults,
+ errorToSend,
+ throwable));
+
+ RecordingRecord finalizedRecording = mInProgressRecording;
+ mInProgressRecording = null;
+ mInProgressRecordingStopping = false;
+ mAudioTrackIndex = null;
+ mVideoTrackIndex = null;
+ mEncodingFutures.clear();
+ mOutputUri = Uri.EMPTY;
+ mRecordingBytes = 0L;
+ mRecordingDurationNs = 0L;
+ mFirstRecordingVideoDataTimeUs = Long.MAX_VALUE;
+ mFirstRecordingAudioDataTimeUs = Long.MAX_VALUE;
+ mPreviousRecordingVideoDataTimeUs = Long.MAX_VALUE;
+ mPreviousRecordingAudioDataTimeUs = Long.MAX_VALUE;
+ mRecordingStopError = ERROR_UNKNOWN;
+ mRecordingStopErrorCause = null;
+ mAudioErrorCause = null;
+ clearPendingAudioRingBuffer();
+
+ switch (mAudioState) {
+ case IDLING:
+ throw new AssertionError(
+ "Incorrectly finalize recording when audio state is IDLING");
+ case INITIALIZING:
+ // No-op, the audio hasn't been initialized. Keep it in INITIALIZING state.
+ break;
+ case DISABLED:
+ // Fall-through
+ case ACTIVE:
+ setAudioState(AudioState.IDLING);
+ mAudioSource.stop();
+ break;
+ case ERROR_ENCODER:
+ // Fall-through
+ case ERROR_SOURCE:
+ // Reset audio state to INITIALIZING if the audio encoder encountered error, so
+ // that it can be setup again when the next recording with audio enabled is started.
+ setAudioState(AudioState.INITIALIZING);
+ break;
+ }
+
+ onRecordingFinalized(finalizedRecording);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void onRecordingFinalized(@NonNull RecordingRecord finalizedRecording) {
+ boolean needsReset = false;
+ boolean startRecordingPaused = false;
+ boolean needsConfigure = false;
+ RecordingRecord recordingToStart = null;
+ RecordingRecord pendingRecordingToFinalize = null;
+ @VideoRecordError int error = ERROR_NONE;
+ Throwable errorCause = null;
+ synchronized (mLock) {
+ if (mActiveRecordingRecord != finalizedRecording) {
+ throw new AssertionError("Active recording did not match finalized recording on "
+ + "finalize.");
+ }
+
+ mActiveRecordingRecord = null;
+ switch (mState) {
+ case RESETTING:
+ needsReset = true;
+ break;
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ // If finalized while in a RECORDING or PAUSED state, then the recording was
+ // likely finalized due to an error.
+ // Fall-through
+ case STOPPING:
+ if (mEncoderNotUsePersistentInputSurface) {
+ // If the encoder doesn't use persistent input surface, the active
+ // surface will become invalid after a recording is finalized. If there's
+ // an unserviced surface request, configure with it directly, otherwise
+ // wait for a new surface update.
+ mActiveSurface = null;
+ if (mLatestSurfaceRequest != null && !mLatestSurfaceRequest.isServiced()) {
+ needsConfigure = true;
+ }
+ setState(State.CONFIGURING);
+ } else {
+ setState(State.IDLING);
+ }
+ break;
+ case PENDING_PAUSED:
+ startRecordingPaused = true;
+ // Fall-through
+ case PENDING_RECORDING:
+ if (mSourceState == SourceState.INACTIVE) {
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ setState(State.CONFIGURING);
+ error = ERROR_SOURCE_INACTIVE;
+ errorCause = PENDING_RECORDING_ERROR_CAUSE_SOURCE_INACTIVE;
+ } else if (mEncoderNotUsePersistentInputSurface) {
+ // If the encoder doesn't use persistent input surface, the active
+ // surface will become invalid after a recording is finalized. If there's
+ // an unserviced surface request, configure with it directly, otherwise
+ // wait for a new surface update.
+ mActiveSurface = null;
+ if (mLatestSurfaceRequest != null && !mLatestSurfaceRequest.isServiced()) {
+ needsConfigure = true;
+ }
+ updateNonPendingState(State.CONFIGURING);
+ } else if (mVideoEncoder != null) {
+ // If there's no VideoEncoder, it may need to wait for the new
+ // VideoEncoder to be configured.
+ recordingToStart = makePendingRecordingActiveLocked(mState);
+ }
+ break;
+ case ERROR:
+ // Error state is non-recoverable. Nothing to do here.
+ break;
+ case CONFIGURING:
+ // No-op, the Recorder has been reset before the recording is finalized. So
+ // keep the state in CONFIGURING.
+ break;
+ case IDLING:
+ throw new AssertionError("Unexpected state on finalize of recording: "
+ + mState);
+ }
+ }
+
+ // Perform required actions from state changes inline on sequential executor but unlocked.
+ if (needsConfigure) {
+ configureInternal(mLatestSurfaceRequest, mVideoSourceTimebase);
+ } else if (needsReset) {
+ reset();
+ } else if (recordingToStart != null) {
+ // A pending recording will only be started if we're not waiting for a new surface.
+ // Otherwise the recording will be started after receiving a new surface request.
+ if (mEncoderNotUsePersistentInputSurface) {
+ throw new AssertionError("Attempt to start a pending recording while the Recorder"
+ + " is waiting for a new surface request.");
+ }
+ startRecording(recordingToStart, startRecordingPaused);
+ } else if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, error, errorCause);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ void onInProgressRecordingInternalError(@NonNull RecordingRecord recording,
+ @VideoRecordError int error, @Nullable Throwable cause) {
+ if (recording != mInProgressRecording) {
+ throw new AssertionError("Internal error occurred on recording that is not the current "
+ + "in-progress recording.");
+ }
+
+ boolean needsStop = false;
+ synchronized (mLock) {
+ switch (mState) {
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ setState(State.STOPPING);
+ needsStop = true;
+ // Fall-through
+ case STOPPING:
+ // Fall-through
+ case RESETTING:
+ // Fall-through
+ case PENDING_RECORDING:
+ // Fall-through
+ case PENDING_PAUSED:
+ // Fall-through
+ if (recording != mActiveRecordingRecord) {
+ throw new AssertionError("Internal error occurred for recording but it is"
+ + " not the active recording.");
+ }
+ break;
+ case CONFIGURING:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case ERROR:
+ throw new AssertionError("In-progress recording error occurred while in "
+ + "unexpected state: " + mState);
+ }
+ }
+
+ if (needsStop) {
+ stopInternal(recording, Encoder.NO_TIMESTAMP, error, cause);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ void tryServicePendingRecording() {
+ boolean startRecordingPaused = false;
+ RecordingRecord recordingToStart = null;
+ RecordingRecord pendingRecordingToFinalize = null;
+ @VideoRecordError int error = ERROR_NONE;
+ Throwable errorCause = null;
+ synchronized (mLock) {
+ switch (mState) {
+ case PENDING_PAUSED:
+ startRecordingPaused = true;
+ // Fall-through
+ case PENDING_RECORDING:
+ if (mActiveRecordingRecord != null || mNeedsReset) {
+ // Active recording is still finalizing or the Recorder is expected to be
+ // reset. Pending recording will be serviced in onRecordingFinalized() or
+ // in onReset().
+ break;
+ }
+ if (mSourceState == SourceState.INACTIVE) {
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ restoreNonPendingState(); // Equivalent to setState(mNonPendingState)
+ error = ERROR_SOURCE_INACTIVE;
+ errorCause = PENDING_RECORDING_ERROR_CAUSE_SOURCE_INACTIVE;
+ } else if (mVideoEncoder != null) {
+ // If there's no VideoEncoder, it may need to wait for the new
+ // VideoEncoder to be configured.
+ recordingToStart = makePendingRecordingActiveLocked(mState);
+ }
+ break;
+ case CONFIGURING:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case RECORDING:
+ // Fall-through
+ case PAUSED:
+ // Fall-through
+ case STOPPING:
+ // Fall-through
+ case RESETTING:
+ // Fall-through
+ case ERROR:
+ break;
+ }
+ }
+
+ if (recordingToStart != null) {
+ // Start new active recording inline on sequential executor (but unlocked).
+ startRecording(recordingToStart, startRecordingPaused);
+ } else if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, error, errorCause);
+ }
+ }
+
+ /**
+ * Makes the pending recording active and returns the new active recording.
+ *
+ * This method will not actually start the recording. It is up to the caller to start the
+ * returned recording. However, the Recorder.State will be updated to reflect what the state
+ * should be after the recording is started. This allows the recording to be started when no
+ * longer under lock.
+ */
+ @GuardedBy("mLock")
+ @NonNull
+ private RecordingRecord makePendingRecordingActiveLocked(@NonNull State state) {
+ boolean startRecordingPaused = false;
+ if (state == State.PENDING_PAUSED) {
+ startRecordingPaused = true;
+ } else if (state != State.PENDING_RECORDING) {
+ throw new AssertionError("makePendingRecordingActiveLocked() can only be called from "
+ + "a pending state.");
+ }
+ if (mActiveRecordingRecord != null) {
+ throw new AssertionError("Cannot make pending recording active because another "
+ + "recording is already active.");
+ }
+ if (mPendingRecordingRecord == null) {
+ throw new AssertionError("Pending recording should exist when in a PENDING"
+ + " state.");
+ }
+ // Swap the pending recording to the active recording and start it
+ RecordingRecord recordingToStart = mActiveRecordingRecord = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ // Start recording if start() has been called before video encoder is setup.
+ if (startRecordingPaused) {
+ setState(State.PAUSED);
+ } else {
+ setState(State.RECORDING);
+ }
+
+ return recordingToStart;
+ }
+
+ /**
+ * Actually starts a recording on the sequential executor.
+ *
+ *
This is intended to be called while unlocked on the sequential executor. It should only
+ * be called immediately after a pending recording has just been made active. The recording
+ * passed to this method should be the newly-made-active recording.
+ */
+ @ExecutedBy("mSequentialExecutor")
+ private void startRecording(@NonNull RecordingRecord recordingToStart,
+ boolean startRecordingPaused) {
+ // Start pending recording inline since we are already on sequential executor.
+ startInternal(recordingToStart);
+ if (startRecordingPaused) {
+ pauseInternal(recordingToStart);
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ void updateInProgressStatusEvent() {
+ if (mInProgressRecording != null) {
+ mInProgressRecording.updateVideoRecordEvent(
+ VideoRecordEvent.status(
+ mInProgressRecording.getOutputOptions(),
+ getInProgressRecordingStats()));
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ @NonNull
+ RecordingStats getInProgressRecordingStats() {
+ return RecordingStats.of(mRecordingDurationNs, mRecordingBytes,
+ AudioStats.of(internalAudioStateToAudioStatsState(mAudioState), mAudioErrorCause));
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ T getObservableData(@NonNull StateObservable observable) {
+ ListenableFuture future = observable.fetchData();
+ try {
+ // A StateObservable always has a state available and the future got from fetchData()
+ // will complete immediately.
+ return future.get();
+ } catch (ExecutionException | InterruptedException e) {
+ throw new IllegalStateException(e);
+ }
+ }
+
+ boolean isAudioSupported() {
+ return getObservableData(mMediaSpec).getAudioSpec().getChannelCount()
+ != AudioSpec.CHANNEL_COUNT_NONE;
+ }
+
+ @GuardedBy("mLock")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void setState(@NonNull State state) {
+ // If we're attempt to transition to the same state, then we likely have a logic error.
+ // All state transitions should be intentional, so throw an AssertionError here.
+ if (mState == state) {
+ throw new AssertionError("Attempted to transition to state " + state + ", but "
+ + "Recorder is already in state " + state);
+ }
+
+ Logger.d(TAG, "Transitioning Recorder internal state: " + mState + " --> " + state);
+ // If we are transitioning from a non-pending state to a pending state, we need to store
+ // the non-pending state so we can transition back if the pending recording is stopped
+ // before it becomes active.
+ StreamInfo.StreamState streamState = null;
+ if (PENDING_STATES.contains(state)) {
+ if (!PENDING_STATES.contains(mState)) {
+ if (!VALID_NON_PENDING_STATES_WHILE_PENDING.contains(mState)) {
+ throw new AssertionError(
+ "Invalid state transition. Should not be transitioning "
+ + "to a PENDING state from state " + mState);
+ }
+ mNonPendingState = mState;
+ streamState = internalStateToStreamState(mNonPendingState);
+ }
+ } else if (mNonPendingState != null) {
+ // Transitioning out of a pending state. Clear the non-pending state.
+ mNonPendingState = null;
+ }
+
+ mState = state;
+ if (streamState == null) {
+ streamState = internalStateToStreamState(mState);
+ }
+ mStreamInfo.setState(StreamInfo.of(mStreamId, streamState));
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void setLatestSurface(@Nullable Surface surface) {
+ if (mLatestSurface == surface) {
+ return;
+ }
+ mLatestSurface = surface;
+ synchronized (mLock) {
+ setStreamId(surface != null ? surface.hashCode() : StreamInfo.STREAM_ID_ANY);
+ }
+ }
+
+ @GuardedBy("mLock")
+ private void setStreamId(int streamId) {
+ if (mStreamId == streamId) {
+ return;
+ }
+ Logger.d(TAG, "Transitioning streamId: " + mStreamId + " --> " + streamId);
+ mStreamId = streamId;
+ mStreamInfo.setState(StreamInfo.of(streamId, internalStateToStreamState(mState)));
+ }
+
+ /**
+ * Updates the non-pending state while in a pending state.
+ *
+ * If called from a non-pending state, an assertion error will be thrown.
+ */
+ @GuardedBy("mLock")
+ private void updateNonPendingState(@NonNull State state) {
+ if (!PENDING_STATES.contains(mState)) {
+ throw new AssertionError("Can only updated non-pending state from a pending state, "
+ + "but state is " + mState);
+ }
+
+ if (!VALID_NON_PENDING_STATES_WHILE_PENDING.contains(state)) {
+ throw new AssertionError(
+ "Invalid state transition. State is not a valid non-pending state while in a "
+ + "pending state: " + state);
+ }
+
+ if (mNonPendingState != state) {
+ mNonPendingState = state;
+ mStreamInfo.setState(
+ StreamInfo.of(mStreamId, internalStateToStreamState(state)));
+ }
+ }
+
+ /**
+ * Convenience for restoring the state to the non-pending state.
+ *
+ *
This is equivalent to calling setState(mNonPendingState), but performs a few safety
+ * checks. This can only be called while in a pending state.
+ */
+ @GuardedBy("mLock")
+ private void restoreNonPendingState() {
+ if (!PENDING_STATES.contains(mState)) {
+ throw new AssertionError("Cannot restore non-pending state when in state " + mState);
+ }
+
+ setState(mNonPendingState);
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ void setAudioState(@NonNull AudioState audioState) {
+ Logger.d(TAG, "Transitioning audio state: " + mAudioState + " --> " + audioState);
+ mAudioState = audioState;
+ }
+
+ private static int supportedMuxerFormatOrDefaultFrom(
+ @Nullable CamcorderProfileProxy profileProxy, int defaultMuxerFormat) {
+ if (profileProxy != null) {
+ switch (profileProxy.getFileFormat()) {
+ case MediaRecorder.OutputFormat.MPEG_4:
+ return android.media.MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
+ case MediaRecorder.OutputFormat.WEBM:
+ return android.media.MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM;
+ case MediaRecorder.OutputFormat.THREE_GPP:
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) {
+ // MediaMuxer does not support 3GPP on pre-Android O(API 26) devices.
+ return android.media.MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
+ } else {
+ return android.media.MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP;
+ }
+ default:
+ break;
+ }
+ }
+ return defaultMuxerFormat;
+ }
+
+ @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+ @AutoValue
+ abstract static class RecordingRecord implements AutoCloseable {
+
+ private final CloseGuardHelper mCloseGuard = CloseGuardHelper.create();
+
+ private final AtomicBoolean mInitialized = new AtomicBoolean(false);
+
+ private final AtomicReference mMediaMuxerSupplier =
+ new AtomicReference<>(null);
+
+ private final AtomicReference mAudioSourceSupplier =
+ new AtomicReference<>(null);
+
+ private final AtomicReference> mRecordingFinalizer =
+ new AtomicReference<>(ignored -> {
+ /* no-op by default */
+ });
+
+ @NonNull
+ static RecordingRecord from(@NonNull SucklessPendingRecording pendingRecording, long recordingId) {
+ return new AutoValue_SucklessRecorder_RecordingRecord(
+ pendingRecording.getOutputOptions(),
+ pendingRecording.getListenerExecutor(),
+ pendingRecording.getEventListener(),
+ pendingRecording.isAudioEnabled(),
+ recordingId
+ );
+ }
+
+ @NonNull
+ abstract OutputOptions getOutputOptions();
+
+ @Nullable
+ abstract Executor getCallbackExecutor();
+
+ @Nullable
+ abstract Consumer getEventListener();
+
+ abstract boolean hasAudioEnabled();
+
+ abstract long getRecordingId();
+
+ /**
+ * Performs initialization for this recording.
+ *
+ * @throws AssertionError if this recording has already been initialized.
+ * @throws IOException if it fails to duplicate the file descriptor when the
+ * {@link #getOutputOptions() OutputOptions} is {@link FileDescriptorOutputOptions}.
+ */
+ void initializeRecording(@NonNull Context context) throws IOException {
+ if (mInitialized.getAndSet(true)) {
+ throw new AssertionError("Recording " + this + " has already been initialized");
+ }
+ OutputOptions outputOptions = getOutputOptions();
+
+ final ParcelFileDescriptor dupedParcelFileDescriptor;
+ if (outputOptions instanceof FileDescriptorOutputOptions) {
+ // Duplicate ParcelFileDescriptor to make input descriptor can be safely closed,
+ // or throw an IOException if it fails.
+ dupedParcelFileDescriptor =
+ ((FileDescriptorOutputOptions) outputOptions)
+ .getParcelFileDescriptor().dup();
+ } else {
+ dupedParcelFileDescriptor = null;
+ }
+
+ mCloseGuard.open("finalizeRecording");
+
+ MediaMuxerSupplier mediaMuxerSupplier =
+ (muxerOutputFormat, outputUriCreatedCallback) -> {
+ MediaMuxer mediaMuxer;
+ Uri outputUri = Uri.EMPTY;
+ if (outputOptions instanceof MuxerOutputOptions) {
+ mediaMuxer = ((MuxerOutputOptions) outputOptions).getMediaMuxer();
+ } else {
+ throw new AssertionError(
+ "Invalid output options type: "
+ + outputOptions.getClass().getSimpleName());
+ }
+ outputUriCreatedCallback.accept(outputUri);
+ return mediaMuxer;
+ };
+ mMediaMuxerSupplier.set(mediaMuxerSupplier);
+
+ Consumer recordingFinalizer = null;
+ if (hasAudioEnabled()) {
+ if (Build.VERSION.SDK_INT >= 31) {
+ // Use anonymous inner class instead of lambda since we need to propagate
+ // permission requirements
+ @SuppressWarnings("Convert2Lambda")
+ AudioSourceSupplier audioSourceSupplier = new AudioSourceSupplier() {
+ @NonNull
+ @Override
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ public AudioSource get(@NonNull AudioSource.Settings settings,
+ @NonNull Executor executor)
+ throws AudioSourceAccessException {
+ // Context will only be held in local scope of the supplier so it will
+ // not be retained after performOneTimeAudioSourceCreation() is called.
+ return new AudioSource(settings, executor, context);
+ }
+ };
+ mAudioSourceSupplier.set(audioSourceSupplier);
+ } else {
+ // Use anonymous inner class instead of lambda since we need to propagate
+ // permission requirements
+ @SuppressWarnings("Convert2Lambda")
+ AudioSourceSupplier audioSourceSupplier = new AudioSourceSupplier() {
+ @NonNull
+ @Override
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ public AudioSource get(@NonNull AudioSource.Settings settings,
+ @NonNull Executor executor)
+ throws AudioSourceAccessException {
+ // Do not set (or retain) context on other API levels
+ return new AudioSource(settings, executor, null);
+ }
+ };
+ mAudioSourceSupplier.set(audioSourceSupplier);
+ }
+ }
+
+ if (outputOptions instanceof MediaStoreOutputOptions) {
+ MediaStoreOutputOptions mediaStoreOutputOptions =
+ (MediaStoreOutputOptions) outputOptions;
+ // TODO(b/201946954): Investigate whether we should add a setting to disable
+ // scan/update to allow users to perform it themselves.
+ if (Build.VERSION.SDK_INT >= 29) {
+ recordingFinalizer = outputUri -> {
+ if (outputUri.equals(Uri.EMPTY)) {
+ return;
+ }
+ ContentValues contentValues = new ContentValues();
+ contentValues.put(MediaStore.Video.Media.IS_PENDING, NOT_PENDING);
+ mediaStoreOutputOptions.getContentResolver().update(outputUri,
+ contentValues, null, null);
+ };
+ } else {
+ // Context will only be held in local scope of the consumer so it will not be
+ // retained after finalizeOutputFile() is called.
+ recordingFinalizer = outputUri -> {
+ if (outputUri.equals(Uri.EMPTY)) {
+ return;
+ }
+ String filePath = OutputUtil.getAbsolutePathFromUri(
+ mediaStoreOutputOptions.getContentResolver(), outputUri,
+ MEDIA_COLUMN);
+ if (filePath != null) {
+ // Use null mime type list to have MediaScanner derive mime type from
+ // extension
+ MediaScannerConnection.scanFile(context,
+ new String[]{filePath}, /*mimeTypes=*/null, (path, uri) -> {
+ if (uri == null) {
+ Logger.e(TAG, String.format("File scanning operation "
+ + "failed [path: %s]", path));
+ } else {
+ Logger.d(TAG, String.format("File scan completed "
+ + "successfully [path: %s, URI: %s]", path,
+ uri));
+ }
+ });
+ } else {
+ Logger.d(TAG,
+ "Skipping media scanner scan. Unable to retrieve file path "
+ + "from URI: " + outputUri);
+ }
+ };
+ }
+ } else if (outputOptions instanceof FileDescriptorOutputOptions) {
+ recordingFinalizer = ignored -> {
+ try {
+ // dupedParcelFileDescriptor should be non-null.
+ dupedParcelFileDescriptor.close();
+ } catch (IOException e) {
+ // IOException is not expected to be thrown while closing
+ // ParcelFileDescriptor.
+ Logger.e(TAG, "Failed to close dup'd ParcelFileDescriptor", e);
+ }
+ };
+ }
+
+ if (recordingFinalizer != null) {
+ mRecordingFinalizer.set(recordingFinalizer);
+ }
+ }
+
+ /**
+ * Updates the recording status and callback to users.
+ */
+ void updateVideoRecordEvent(@NonNull VideoRecordEvent event) {
+ if (!Objects.equals(event.getOutputOptions(), getOutputOptions())) {
+ throw new AssertionError("Attempted to update event listener with event from "
+ + "incorrect recording [Recording: " + event.getOutputOptions()
+ + ", Expected: " + getOutputOptions() + "]");
+ }
+ String message = "Sending VideoRecordEvent " + event.getClass().getSimpleName();
+ if (event instanceof VideoRecordEvent.Finalize) {
+ VideoRecordEvent.Finalize finalizeEvent = (VideoRecordEvent.Finalize) event;
+ if (finalizeEvent.hasError()) {
+ message += String.format(" [error: %s]",
+ VideoRecordEvent.Finalize.errorToString(
+ finalizeEvent.getError()));
+ }
+ }
+ Logger.d(TAG, message);
+ if (getCallbackExecutor() != null && getEventListener() != null) {
+ try {
+ getCallbackExecutor().execute(() -> getEventListener().accept(event));
+ } catch (RejectedExecutionException e) {
+ Logger.e(TAG, "The callback executor is invalid.", e);
+ }
+ }
+ }
+
+ /**
+ * Creates an {@link AudioSource} for this recording.
+ *
+ * An audio source can only be created once per recording, so subsequent calls to this
+ * method will throw an {@link AssertionError}.
+ *
+ *
Calling this method when audio is not enabled for this recording will also throw an
+ * {@link AssertionError}.
+ */
+ @NonNull
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ AudioSource performOneTimeAudioSourceCreation(
+ @NonNull AudioSource.Settings settings, @NonNull Executor audioSourceExecutor)
+ throws AudioSourceAccessException {
+ if (!hasAudioEnabled()) {
+ throw new AssertionError("Recording does not have audio enabled. Unable to create"
+ + " audio source for recording " + this);
+ }
+
+ AudioSourceSupplier audioSourceSupplier = mAudioSourceSupplier.getAndSet(null);
+ if (audioSourceSupplier == null) {
+ throw new AssertionError("One-time audio source creation has already occurred for"
+ + " recording " + this);
+ }
+
+ return audioSourceSupplier.get(settings, audioSourceExecutor);
+ }
+
+ /**
+ * Creates a {@link MediaMuxer} for this recording.
+ *
+ *
A media muxer can only be created once per recording, so subsequent calls to this
+ * method will throw an {@link AssertionError}.
+ *
+ * @param muxerOutputFormat the output file format.
+ * @param outputUriCreatedCallback A callback that will send the returned media muxer's
+ * output {@link Uri}. It will be {@link Uri#EMPTY} if the
+ * {@link #getOutputOptions() OutputOptions} is
+ * {@link FileDescriptorOutputOptions}.
+ * Note: This callback will be called inline.
+ * @return the media muxer.
+ * @throws IOException if the creation of the media mixer fails.
+ * @throws AssertionError if the recording is not initialized or subsequent calls to this
+ * method.
+ */
+ @NonNull
+ MediaMuxer performOneTimeMediaMuxerCreation(int muxerOutputFormat,
+ @NonNull Consumer outputUriCreatedCallback) throws IOException {
+ if (!mInitialized.get()) {
+ throw new AssertionError("Recording " + this + " has not been initialized");
+ }
+ MediaMuxerSupplier mediaMuxerSupplier = mMediaMuxerSupplier.getAndSet(null);
+ if (mediaMuxerSupplier == null) {
+ throw new AssertionError("One-time media muxer creation has already occurred for"
+ + " recording " + this);
+ }
+ return mediaMuxerSupplier.get(muxerOutputFormat, outputUriCreatedCallback);
+ }
+
+ /**
+ * Performs final operations required to finalize this recording.
+ *
+ * Recording finalization can only occur once. Any subsequent calls to this method or
+ * {@link #close()} will throw an {@link AssertionError}.
+ *
+ *
Finalizing an uninitialized recording is no-op.
+ *
+ * @param uri The uri of the output file.
+ */
+ void finalizeRecording(@NonNull Uri uri) {
+ if (!mInitialized.get()) {
+ return;
+ }
+ finalizeRecordingInternal(mRecordingFinalizer.getAndSet(null), uri);
+ }
+
+ /**
+ * Close this recording, as if calling {@link #finalizeRecording(Uri)} with parameter
+ * {@link Uri#EMPTY}.
+ *
+ *
This method is equivalent to calling {@link #finalizeRecording(Uri)} with parameter
+ * {@link Uri#EMPTY}.
+ *
+ *
Recording finalization can only occur once. Any subsequent calls to this method or
+ * {@link #finalizeRecording(Uri)} will throw an {@link AssertionError}.
+ *
+ *
Closing an uninitialized recording is no-op.
+ */
+ @Override
+ public void close() {
+ finalizeRecording(Uri.EMPTY);
+ }
+
+ @Override
+ @SuppressWarnings("GenericException") // super.finalize() throws Throwable
+ protected void finalize() throws Throwable {
+ try {
+ mCloseGuard.warnIfOpen();
+ Consumer finalizer = mRecordingFinalizer.getAndSet(null);
+ if (finalizer != null) {
+ finalizeRecordingInternal(finalizer, Uri.EMPTY);
+ }
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private void finalizeRecordingInternal(@Nullable Consumer finalizer,
+ @NonNull Uri uri) {
+ if (finalizer == null) {
+ throw new AssertionError(
+ "Recording " + this + " has already been finalized");
+ }
+ mCloseGuard.close();
+ finalizer.accept(uri);
+ }
+
+ private interface MediaMuxerSupplier {
+ @NonNull
+ MediaMuxer get(int muxerOutputFormat, @NonNull Consumer outputUriCreatedCallback)
+ throws IOException;
+ }
+
+ private interface AudioSourceSupplier {
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ @NonNull
+ AudioSource get(@NonNull AudioSource.Settings settings,
+ @NonNull Executor audioSourceExecutor) throws AudioSourceAccessException;
+ }
+ }
+
+ /**
+ * Builder class for {@link Recorder} objects.
+ */
+ @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+ public static final class Builder {
+
+ private final MediaSpec.Builder mMediaSpecBuilder;
+ private Executor mExecutor = null;
+ private EncoderFactory mVideoEncoderFactory = DEFAULT_ENCODER_FACTORY;
+ private EncoderFactory mAudioEncoderFactory = DEFAULT_ENCODER_FACTORY;
+
+ /**
+ * Constructor for {@code Recorder.Builder}.
+ *
+ * Creates a builder which is pre-populated with appropriate default configuration
+ * options.
+ */
+ public Builder() {
+ mMediaSpecBuilder = MediaSpec.builder();
+ }
+
+ /**
+ * Sets the {@link Executor} that runs the Recorder background task.
+ *
+ *
The executor is used to run the Recorder tasks, the audio encoding and the video
+ * encoding. For the best performance, it's recommended to be an {@link Executor} that is
+ * capable of running at least two tasks concurrently, such as a
+ * {@link java.util.concurrent.ThreadPoolExecutor} backed by 2 or more threads.
+ *
+ *
If not set, the Recorder will be run on the IO executor internally managed by CameraX.
+ */
+ @NonNull
+ public Builder setExecutor(@NonNull Executor executor) {
+ Preconditions.checkNotNull(executor, "The specified executor can't be null.");
+ mExecutor = executor;
+ return this;
+ }
+
+ // Usually users can use the CameraX predefined configuration for creating a recorder. We
+ // may see which options of MediaSpec to be exposed.
+
+ /**
+ * Sets the {@link QualitySelector} of this Recorder.
+ *
+ *
The provided quality selector is used to select the resolution of the recording
+ * depending on the resolutions supported by the camera and codec capabilities.
+ *
+ *
If no quality selector is provided, the default is
+ * {@link Recorder#DEFAULT_QUALITY_SELECTOR}.
+ *
+ *
{@link #setAspectRatio(int)} can be used with to specify the intended video aspect
+ * ratio.
+ *
+ * @see QualitySelector
+ * @see #setAspectRatio(int)
+ */
+ @NonNull
+ public Builder setQualitySelector(@NonNull QualitySelector qualitySelector) {
+ Preconditions.checkNotNull(qualitySelector,
+ "The specified quality selector can't be null.");
+ mMediaSpecBuilder.configureVideo(
+ builder -> builder.setQualitySelector(qualitySelector));
+ return this;
+ }
+
+ /**
+ * Sets the intended video encoding bitrate for recording.
+ *
+ *
The target video encoding bitrate attempts to keep the actual video encoding
+ * bitrate close to the requested {@code bitrate}. Bitrate may vary during a recording
+ * depending on the scene
+ * being recorded.
+ *
+ *
Additional checks will be performed on the requested {@code bitrate} to make sure the
+ * specified bitrate is applicable, and sometimes the passed bitrate will be changed
+ * internally to ensure the video recording can proceed smoothly based on the
+ * capabilities of the platform.
+ *
+ *
This API only affects the video stream and should not be considered the
+ * target for the entire recording. The audio stream's bitrate is not affected by this API.
+ *
+ *
If this method isn't called, an appropriate bitrate for normal video
+ * recording is selected by default. Only call this method if a custom bitrate is desired.
+ *
+ * @param bitrate the target video encoding bitrate in bits per second.
+ * @throws IllegalArgumentException if bitrate is 0 or less.
+ */
+ @NonNull
+ public Builder setTargetVideoEncodingBitRate(@IntRange(from = 1) int bitrate) {
+ if (bitrate <= 0) {
+ throw new IllegalArgumentException("The requested target bitrate " + bitrate
+ + " is not supported. Target bitrate must be greater than 0.");
+ }
+
+ mMediaSpecBuilder.configureVideo(
+ builder -> builder.setBitrate(new Range<>(bitrate, bitrate)));
+ return this;
+ }
+
+ /**
+ * Sets the video aspect ratio of this Recorder.
+ *
+ *
The final video resolution will be based on the input aspect ratio and the
+ * QualitySelector in {@link #setQualitySelector(QualitySelector)}. Both settings will be
+ * respected. For example, if the aspect ratio is 4:3 and the preferred quality in
+ * QualitySelector is HD, then a HD quality resolution with 4:3 aspect ratio such as
+ * 1280x960 or 960x720 will be used. CameraX will choose an appropriate one depending on
+ * the resolutions supported by the camera and the codec capabilities. With this setting,
+ * no other aspect ratios (such as 16:9) will be used, nor any other qualities (such as
+ * UHD, FHD and SD). If no resolution with the settings can be found, it will fail to
+ * bind VideoCapture. Therefore, a recommended way is to provide a flexible
+ * QualitySelector if there is no specific video quality requirement, such as the setting
+ * in {@link Recorder#DEFAULT_QUALITY_SELECTOR}.
+ *
+ *
The default value is {@link AspectRatio#RATIO_DEFAULT}. If no aspect ratio is set, the
+ * selected resolution will be based only on the QualitySelector.
+ *
+ * @param aspectRatio the aspect ratio. Possible values are {@link AspectRatio#RATIO_4_3}
+ * and {@link AspectRatio#RATIO_16_9}.
+ *
+ * @see #setQualitySelector(QualitySelector)
+ */
+ @NonNull
+ public Builder setAspectRatio(@AspectRatio.Ratio int aspectRatio) {
+ mMediaSpecBuilder.configureVideo(builder -> builder.setAspectRatio(aspectRatio));
+ return this;
+ }
+
+ /**
+ * Sets the audio source for recordings with audio enabled.
+ *
+ *
This will only set the source of audio for recordings, but audio must still be
+ * enabled on a per-recording basis with {@link SucklessPendingRecording#withAudioEnabled()}
+ * before starting the recording.
+ *
+ * @param source The audio source to use. One of {@link AudioSpec#SOURCE_AUTO} or
+ * {@link AudioSpec#SOURCE_CAMCORDER}. Default is
+ * {@link AudioSpec#SOURCE_AUTO}.
+ */
+ @NonNull
+ Builder setAudioSource(@AudioSpec.Source int source) {
+ mMediaSpecBuilder.configureAudio(builder -> builder.setSource(source));
+ return this;
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @NonNull
+ Builder setVideoEncoderFactory(@NonNull EncoderFactory videoEncoderFactory) {
+ mVideoEncoderFactory = videoEncoderFactory;
+ return this;
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @NonNull
+ Builder setAudioEncoderFactory(@NonNull EncoderFactory audioEncoderFactory) {
+ mAudioEncoderFactory = audioEncoderFactory;
+ return this;
+ }
+
+ /**
+ * Builds the {@link Recorder} instance.
+ *
+ *
The {code build()} method can be called multiple times, generating a new
+ * {@link Recorder} instance each time. The returned instance is configured with the
+ * options set on this builder.
+ */
+ @NonNull
+ public SucklessRecorder build() {
+ return new SucklessRecorder(mExecutor, mMediaSpecBuilder.build(), mVideoEncoderFactory,
+ mAudioEncoderFactory);
+ }
+ }
+}
diff --git a/app/src/main/java/androidx/camera/video/SucklessRecording.java b/app/src/main/java/androidx/camera/video/SucklessRecording.java
new file mode 100644
index 0000000..89d62c8
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/SucklessRecording.java
@@ -0,0 +1,220 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.camera.video;
+
+import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
+
+import android.annotation.SuppressLint;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+import androidx.annotation.RestrictTo;
+import androidx.camera.core.impl.utils.CloseGuardHelper;
+import androidx.core.util.Consumer;
+import androidx.core.util.Preconditions;
+
+import java.util.concurrent.Executor;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * Provides controls for the currently active recording.
+ *
+ *
An active recording is created by starting a pending recording with
+ * {@link PendingRecording#start(Executor, Consumer)}. If there are no errors starting the
+ * recording, upon creation, an active recording will provide controls to pause, resume or stop a
+ * recording. If errors occur while starting the recording, the active recording will be
+ * instantiated in a {@link VideoRecordEvent.Finalize finalized} state, and all controls will be
+ * no-ops. The state of the recording can be observed by the video record event listener provided
+ * to {@link PendingRecording#start(Executor, Consumer)} when starting the recording.
+ *
+ *
Either {@link #stop()} or {@link #close()} can be called when it is desired to
+ * stop the recording. If {@link #stop()} or {@link #close()} are not called on this object
+ * before it is no longer referenced, it will be automatically stopped at a future point in time
+ * when the object is garbage collected, and no new recordings can be started from the same
+ * {@link Recorder} that generated the object until that occurs.
+ */
+@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+@SuppressLint("RestrictedApi")
+public final class SucklessRecording implements AutoCloseable {
+
+ // Indicates the recording has been explicitly stopped by users.
+ private final AtomicBoolean mIsClosed = new AtomicBoolean(false);
+ private final SucklessRecorder mRecorder;
+ private final long mRecordingId;
+ private final OutputOptions mOutputOptions;
+ private final CloseGuardHelper mCloseGuard = CloseGuardHelper.create();
+
+ SucklessRecording(@NonNull SucklessRecorder recorder, long recordingId, @NonNull OutputOptions options,
+ boolean finalizedOnCreation) {
+ mRecorder = recorder;
+ mRecordingId = recordingId;
+ mOutputOptions = options;
+
+ if (finalizedOnCreation) {
+ mIsClosed.set(true);
+ } else {
+ mCloseGuard.open("stop");
+ }
+ }
+
+ /**
+ * Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID.
+ *
+ *
The recording ID is expected to be unique to the recorder that generated the pending
+ * recording.
+ */
+ @NonNull
+ static SucklessRecording from(@NonNull SucklessPendingRecording pendingRecording, long recordingId) {
+ Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
+ return new SucklessRecording(pendingRecording.getRecorder(),
+ recordingId,
+ pendingRecording.getOutputOptions(),
+ /*finalizedOnCreation=*/false);
+ }
+
+ /**
+ * Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID in a
+ * finalized state.
+ *
+ *
This can be used if there was an error setting up the active recording and it would not
+ * be able to be started.
+ *
+ *
The recording ID is expected to be unique to the recorder that generated the pending
+ * recording.
+ */
+ @NonNull
+ static SucklessRecording createFinalizedFrom(@NonNull SucklessPendingRecording pendingRecording,
+ long recordingId) {
+ Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
+ return new SucklessRecording(pendingRecording.getRecorder(),
+ recordingId,
+ pendingRecording.getOutputOptions(),
+ /*finalizedOnCreation=*/true);
+ }
+
+ @NonNull
+ OutputOptions getOutputOptions() {
+ return mOutputOptions;
+ }
+
+ /**
+ * Pauses the current recording if active.
+ *
+ *
Successful pausing of a recording will generate a {@link VideoRecordEvent.Pause} event
+ * which will be sent to the listener passed to
+ * {@link PendingRecording#start(Executor, Consumer)}.
+ *
+ *
If the recording has already been paused or has been finalized internally, this is a
+ * no-op.
+ *
+ * @throws IllegalStateException if the recording has been stopped with
+ * {@link #close()} or {@link #stop()}.
+ */
+ public void pause() {
+ if (mIsClosed.get()) {
+ throw new IllegalStateException("The recording has been stopped.");
+ }
+ mRecorder.pause(this);
+ }
+
+ /**
+ * Resumes the current recording if paused.
+ *
+ *
Successful resuming of a recording will generate a {@link VideoRecordEvent.Resume} event
+ * which will be sent to the listener passed to
+ * {@link PendingRecording#start(Executor, Consumer)}.
+ *
+ *
If the recording is active or has been finalized internally, this is a no-op.
+ *
+ * @throws IllegalStateException if the recording has been stopped with
+ * {@link #close()} or {@link #stop()}.
+ */
+ public void resume() {
+ if (mIsClosed.get()) {
+ throw new IllegalStateException("The recording has been stopped.");
+ }
+ mRecorder.resume(this);
+ }
+
+ /**
+ * Stops the recording, as if calling {@link #close()}.
+ *
+ *
This method is equivalent to calling {@link #close()}.
+ */
+ public void stop() {
+ close();
+ }
+
+ /**
+ * Close this recording.
+ *
+ *
Once {@link #stop()} or {@code close()} called, all methods for controlling the state of
+ * this recording besides {@link #stop()} or {@code close()} will throw an
+ * {@link IllegalStateException}.
+ *
+ *
Once an active recording has been closed, the next recording can be started with
+ * {@link PendingRecording#start(Executor, Consumer)}.
+ *
+ *
This method is idempotent; if the recording has already been closed or has been
+ * finalized internally, calling {@link #stop()} or {@code close()} is a no-op.
+ *
+ *
This method is invoked automatically on active recording instances managed by the {@code
+ * try-with-resources} statement.
+ */
+ @Override
+ public void close() {
+ mCloseGuard.close();
+ if (mIsClosed.getAndSet(true)) {
+ return;
+ }
+ mRecorder.stop(this);
+ }
+
+ @Override
+ @SuppressWarnings("GenericException") // super.finalize() throws Throwable
+ protected void finalize() throws Throwable {
+ try {
+ mCloseGuard.warnIfOpen();
+ stop();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /** Returns the recording ID which is unique to the recorder that generated this recording. */
+ long getRecordingId() {
+ return mRecordingId;
+ }
+
+ /**
+ * Returns whether the recording is closed.
+ *
+ *
The returned value does not reflect the state of the recording; it only reflects
+ * whether {@link #stop()} or {@link #close()} was called on this object.
+ *
+ *
The state of the recording should be checked from the listener passed to
+ * {@link PendingRecording#start(Executor, Consumer)}. Once the active recording is
+ * stopped, a {@link VideoRecordEvent.Finalize} event will be sent to the listener.
+ *
+ * @hide
+ */
+ @RestrictTo(LIBRARY_GROUP)
+ public boolean isClosed() {
+ return mIsClosed.get();
+ }
+}
+
diff --git a/app/src/main/java/androidx/camera/video/internal/encoder/SucklessEncoderImpl.java b/app/src/main/java/androidx/camera/video/internal/encoder/SucklessEncoderImpl.java
new file mode 100644
index 0000000..556fc8a
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/internal/encoder/SucklessEncoderImpl.java
@@ -0,0 +1,1690 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.camera.video.internal.encoder;
+
+import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.CONFIGURED;
+import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.ERROR;
+import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.PAUSED;
+import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.PENDING_RELEASE;
+import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.PENDING_START;
+import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.PENDING_START_PAUSED;
+import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.RELEASED;
+import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.STARTED;
+import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.STOPPING;
+
+import static java.util.Objects.requireNonNull;
+
+import android.annotation.SuppressLint;
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.util.Range;
+import android.view.Surface;
+
+import androidx.annotation.DoNotInline;
+import androidx.annotation.GuardedBy;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.camera.core.Logger;
+import androidx.camera.core.impl.Timebase;
+import androidx.camera.core.impl.annotation.ExecutedBy;
+import androidx.camera.core.impl.utils.executor.CameraXExecutors;
+import androidx.camera.core.impl.utils.futures.FutureCallback;
+import androidx.camera.core.impl.utils.futures.Futures;
+import androidx.camera.video.internal.DebugUtils;
+import androidx.camera.video.internal.compat.quirk.AudioEncoderIgnoresInputTimestampQuirk;
+import androidx.camera.video.internal.compat.quirk.CameraUseInconsistentTimebaseQuirk;
+import androidx.camera.video.internal.compat.quirk.DeviceQuirks;
+import androidx.camera.video.internal.compat.quirk.EncoderNotUsePersistentInputSurfaceQuirk;
+import androidx.camera.video.internal.compat.quirk.VideoEncoderSuspendDoesNotIncludeSuspendTimeQuirk;
+import androidx.camera.video.internal.workaround.EncoderFinder;
+import androidx.camera.video.internal.workaround.VideoTimebaseConverter;
+import androidx.concurrent.futures.CallbackToFutureAdapter;
+import androidx.concurrent.futures.CallbackToFutureAdapter.Completer;
+import androidx.core.util.Preconditions;
+
+import com.google.common.util.concurrent.ListenableFuture;
+
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Deque;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+import java.util.Set;
+import java.util.concurrent.CancellationException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * The encoder implementation.
+ *
+ *
An encoder could be either a video encoder or an audio encoder.
+ */
+@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+@SuppressLint("RestrictedApi")
+public class SucklessEncoderImpl implements Encoder {
+
+ enum InternalState {
+ /**
+ * The initial state.
+ */
+ CONFIGURED,
+
+ /**
+ * The state is when encoder is in {@link InternalState#CONFIGURED} state and {@link #start}
+ * is called.
+ */
+ STARTED,
+
+ /**
+ * The state is when encoder is in {@link InternalState#STARTED} state and {@link #pause}
+ * is called.
+ */
+ PAUSED,
+
+ /**
+ * The state is when encoder is in {@link InternalState#STARTED} state and {@link #stop} is
+ * called.
+ */
+ STOPPING,
+
+ /**
+ * The state is when the encoder is in {@link InternalState#STOPPING} state and a
+ * {@link #start} is called. It is an extension of {@link InternalState#STOPPING}.
+ */
+ PENDING_START,
+
+ /**
+ * The state is when the encoder is in {@link InternalState#STOPPING} state, then
+ * {@link #start} and {@link #pause} is called. It is an extension of
+ * {@link InternalState#STOPPING}.
+ */
+ PENDING_START_PAUSED,
+
+ /**
+ * The state is when the encoder is in {@link InternalState#STOPPING} state and a
+ * {@link #release} is called. It is an extension of {@link InternalState#STOPPING}.
+ */
+ PENDING_RELEASE,
+
+ /**
+ * Then state is when the encoder encounter error. Error state is a transitional state
+ * where encoder user is supposed to wait for {@link EncoderCallback#onEncodeStop} or
+ * {@link EncoderCallback#onEncodeError}. Any method call during this state should be
+ * ignore except {@link #release}.
+ */
+ ERROR,
+
+ /** The state is when the encoder is released. */
+ RELEASED,
+ }
+
+ private static final boolean DEBUG = false;
+ private static final long NO_LIMIT_LONG = Long.MAX_VALUE;
+ private static final Range NO_RANGE = Range.create(NO_LIMIT_LONG, NO_LIMIT_LONG);
+ private static final long STOP_TIMEOUT_MS = 1000L;
+ private static final int FAKE_BUFFER_INDEX = -9999;
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final String mTag;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Object mLock = new Object();
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final boolean mIsVideoEncoder;
+ private final MediaFormat mMediaFormat;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final MediaCodec mMediaCodec;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final EncoderInput mEncoderInput;
+ private final EncoderInfo mEncoderInfo;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Executor mEncoderExecutor;
+ private final ListenableFuture mReleasedFuture;
+ private final Completer mReleasedCompleter;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Queue mFreeInputBufferIndexQueue = new ArrayDeque<>();
+ private final Queue> mAcquisitionQueue = new ArrayDeque<>();
+ private final Set mInputBufferSet = new HashSet<>();
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Set mEncodedDataSet = new HashSet<>();
+ /*
+ * mActivePauseResumeTimeRanges is a queue used to track all active pause/resume time ranges.
+ * An active pause/resume range means the latest output buffer still has not exceeded this
+ * range, so this range is still needed to check for later output buffers. The first element
+ * in the queue is the oldest range and the last element is the newest.
+ */
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Deque> mActivePauseResumeTimeRanges = new ArrayDeque<>();
+ final Timebase mInputTimebase;
+ final TimeProvider mTimeProvider = new SystemTimeProvider();
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @GuardedBy("mLock")
+ EncoderCallback mEncoderCallback = EncoderCallback.EMPTY;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @GuardedBy("mLock")
+ Executor mEncoderCallbackExecutor = CameraXExecutors.directExecutor();
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ InternalState mState;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ Range mStartStopTimeRangeUs = NO_RANGE;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ long mTotalPausedDurationUs = 0L;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ boolean mPendingCodecStop = false;
+ // The data timestamp that an encoding stops at. If this timestamp is null, it means the
+ // encoding hasn't receiving enough data to be stopped.
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ Long mLastDataStopTimestamp = null;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ Future> mStopTimeoutFuture = null;
+ private MediaCodecCallback mMediaCodecCallback = null;
+
+ private boolean mIsFlushedAfterEndOfStream = false;
+ private boolean mSourceStoppedSignalled = false;
+ boolean mMediaCodecEosSignalled = false;
+
+ final EncoderFinder mEncoderFinder = new EncoderFinder();
+
+ /**
+ * Creates the encoder with a {@link EncoderConfig}
+ *
+ * @param executor the executor suitable for background task
+ * @param encoderConfig the encoder config
+ * @throws InvalidConfigException when the encoder cannot be configured.
+ */
+ public SucklessEncoderImpl(@NonNull Executor executor, @NonNull EncoderConfig encoderConfig)
+ throws InvalidConfigException {
+ Preconditions.checkNotNull(executor);
+ Preconditions.checkNotNull(encoderConfig);
+
+ mEncoderExecutor = CameraXExecutors.newSequentialExecutor(executor);
+
+ if (encoderConfig instanceof AudioEncoderConfig) {
+ mTag = "AudioEncoder";
+ mIsVideoEncoder = false;
+ mEncoderInput = new ByteBufferInput();
+ } else if (encoderConfig instanceof VideoEncoderConfig) {
+ mTag = "VideoEncoder";
+ mIsVideoEncoder = true;
+ mEncoderInput = new SurfaceInput();
+ } else {
+ throw new InvalidConfigException("Unknown encoder config type");
+ }
+
+ mInputTimebase = encoderConfig.getInputTimebase();
+ Logger.d(mTag, "mInputTimebase = " + mInputTimebase);
+ mMediaFormat = encoderConfig.toMediaFormat();
+ Logger.d(mTag, "mMediaFormat = " + mMediaFormat);
+ mMediaCodec = mEncoderFinder.findEncoder(mMediaFormat);
+ clampVideoBitrateIfNotSupported(mMediaCodec.getCodecInfo(), mMediaFormat);
+ Logger.i(mTag, "Selected encoder: " + mMediaCodec.getName());
+ mEncoderInfo = createEncoderInfo(mIsVideoEncoder, mMediaCodec.getCodecInfo(),
+ encoderConfig.getMimeType());
+ try {
+ reset();
+ } catch (MediaCodec.CodecException e) {
+ throw new InvalidConfigException(e);
+ }
+
+ AtomicReference> releaseFutureRef = new AtomicReference<>();
+ mReleasedFuture = Futures.nonCancellationPropagating(
+ CallbackToFutureAdapter.getFuture(completer -> {
+ releaseFutureRef.set(completer);
+ return "mReleasedFuture";
+ }));
+ mReleasedCompleter = Preconditions.checkNotNull(releaseFutureRef.get());
+
+ setState(CONFIGURED);
+ }
+
+ /**
+ * If video bitrate in MediaFormat is not supported by supplied MediaCodecInfo,
+ * clamp bitrate in MediaFormat
+ *
+ * @param mediaCodecInfo MediaCodecInfo object
+ * @param mediaFormat MediaFormat object
+ */
+ private void clampVideoBitrateIfNotSupported(@NonNull MediaCodecInfo mediaCodecInfo,
+ @NonNull MediaFormat mediaFormat) {
+
+ if (!mediaCodecInfo.isEncoder() || !mIsVideoEncoder) {
+ return;
+ }
+
+ try {
+ String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
+ MediaCodecInfo.CodecCapabilities caps = mediaCodecInfo.getCapabilitiesForType(mime);
+ Preconditions.checkArgument(caps != null,
+ "MIME type is not supported");
+
+ if (mediaFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
+ // We only handle video bitrate issues at this moment.
+ MediaCodecInfo.VideoCapabilities videoCaps = caps.getVideoCapabilities();
+ Preconditions.checkArgument(videoCaps != null,
+ "Not video codec");
+
+ int origBitrate = mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE);
+ int newBitrate = videoCaps.getBitrateRange().clamp(origBitrate);
+ if (origBitrate != newBitrate) {
+ mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, newBitrate);
+ Logger.d(mTag, "updated bitrate from " + origBitrate
+ + " to " + newBitrate);
+ }
+ }
+ } catch (IllegalArgumentException e) {
+ Logger.w(mTag, "Unexpected error while validating video bitrate", e);
+ }
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void reset() {
+ mStartStopTimeRangeUs = NO_RANGE;
+ mTotalPausedDurationUs = 0L;
+ mActivePauseResumeTimeRanges.clear();
+ mFreeInputBufferIndexQueue.clear();
+
+ // Cancel incomplete acquisitions if exists.
+ for (Completer completer : mAcquisitionQueue) {
+ completer.setCancelled();
+ }
+ mAcquisitionQueue.clear();
+
+ mMediaCodec.reset();
+ mIsFlushedAfterEndOfStream = false;
+ mSourceStoppedSignalled = false;
+ mMediaCodecEosSignalled = false;
+ mPendingCodecStop = false;
+ if (mStopTimeoutFuture != null) {
+ mStopTimeoutFuture.cancel(true);
+ mStopTimeoutFuture = null;
+ }
+ if (mMediaCodecCallback != null) {
+ mMediaCodecCallback.stop();
+ }
+ mMediaCodecCallback = new MediaCodecCallback();
+ mMediaCodec.setCallback(mMediaCodecCallback);
+
+ mMediaCodec.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (mEncoderInput instanceof SurfaceInput) {
+ ((SurfaceInput) mEncoderInput).resetSurface();
+ }
+ }
+
+ /** Gets the {@link EncoderInput} of the encoder */
+ @Override
+ @NonNull
+ public EncoderInput getInput() {
+ return mEncoderInput;
+ }
+
+ @NonNull
+ @Override
+ public EncoderInfo getEncoderInfo() {
+ return mEncoderInfo;
+ }
+
+ @Override
+ public int getConfiguredBitrate() {
+ int configuredBitrate = 0;
+ if (mMediaFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
+ configuredBitrate = mMediaFormat.getInteger(MediaFormat.KEY_BIT_RATE);
+ }
+ return configuredBitrate;
+ }
+
+ /**
+ * Starts the encoder.
+ *
+ * If the encoder is not started yet, it will first trigger
+ * {@link EncoderCallback#onEncodeStart}. Then continually invoke the
+ * {@link EncoderCallback#onEncodedData} callback until the encoder is paused, stopped or
+ * released. It can call {@link #pause} to pause the encoding after started. If the encoder is
+ * in paused state, then calling this method will resume the encoding.
+ */
+ @SuppressWarnings("StatementWithEmptyBody") // to better organize the logic and comments
+ @Override
+ public void start() {
+ final long startTriggerTimeUs = generatePresentationTimeUs();
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case CONFIGURED:
+ mLastDataStopTimestamp = null;
+
+ Logger.d(mTag, "Start on " + DebugUtils.readableUs(startTriggerTimeUs));
+ try {
+ if (mIsFlushedAfterEndOfStream) {
+ // If the codec is flushed after an end-of-stream, it was never
+ // signalled that the source stopped, so we will reset the codec
+ // before starting it again.
+ reset();
+ }
+ mStartStopTimeRangeUs = Range.create(startTriggerTimeUs, NO_LIMIT_LONG);
+ mMediaCodec.start();
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ return;
+ }
+ if (mEncoderInput instanceof ByteBufferInput) {
+ ((ByteBufferInput) mEncoderInput).setActive(true);
+ }
+ setState(STARTED);
+ break;
+ case PAUSED:
+ // Resume
+
+ // The Encoder has been resumed, so reset the stop timestamp flags.
+ mLastDataStopTimestamp = null;
+
+ final Range pauseRange = mActivePauseResumeTimeRanges.removeLast();
+ Preconditions.checkState(
+ pauseRange != null && pauseRange.getUpper() == NO_LIMIT_LONG,
+ "There should be a \"pause\" before \"resume\"");
+ final long pauseTimeUs = pauseRange.getLower();
+ mActivePauseResumeTimeRanges.addLast(
+ Range.create(pauseTimeUs, startTriggerTimeUs));
+ // Do not update total paused duration here since current output buffer may
+ // still before the pause range.
+
+ Logger.d(mTag, "Resume on " + DebugUtils.readableUs(startTriggerTimeUs)
+ + "\nPaused duration = " + DebugUtils.readableUs(
+ (startTriggerTimeUs - pauseTimeUs))
+ );
+
+ if (!mIsVideoEncoder && DeviceQuirks.get(
+ AudioEncoderIgnoresInputTimestampQuirk.class) != null) {
+ // Do nothing. Since we keep handling audio data in the codec after
+ // paused, we don't have to resume the codec and the input source.
+ } else if (mIsVideoEncoder && DeviceQuirks.get(
+ VideoEncoderSuspendDoesNotIncludeSuspendTimeQuirk.class) != null) {
+ // Do nothing. Since we don't pause the codec when paused, we don't have
+ // to resume the codec.
+ } else {
+ setMediaCodecPaused(false);
+ if (mEncoderInput instanceof ByteBufferInput) {
+ ((ByteBufferInput) mEncoderInput).setActive(true);
+ }
+ }
+ // If this is a video encoder, then request a key frame in order to complete
+ // the resume process as soon as possible in MediaCodec.Callback
+ // .onOutputBufferAvailable().
+ if (mIsVideoEncoder) {
+ requestKeyFrameToMediaCodec();
+ }
+ setState(STARTED);
+ break;
+ case STARTED:
+ case ERROR:
+ case PENDING_START:
+ // Do nothing
+ break;
+ case STOPPING:
+ case PENDING_START_PAUSED:
+ setState(PENDING_START);
+ break;
+ case PENDING_RELEASE:
+ case RELEASED:
+ throw new IllegalStateException("Encoder is released");
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void stop() {
+ stop(NO_TIMESTAMP);
+ }
+
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void stop(long expectedStopTimeUs) {
+ final long stopTriggerTimeUs = generatePresentationTimeUs();
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case CONFIGURED:
+ case STOPPING:
+ case ERROR:
+ // Do nothing
+ break;
+ case STARTED:
+ case PAUSED:
+ InternalState currentState = mState;
+ setState(STOPPING);
+ final long startTimeUs = mStartStopTimeRangeUs.getLower();
+ if (startTimeUs == NO_LIMIT_LONG) {
+ throw new AssertionError("There should be a \"start\" before \"stop\"");
+ }
+ long stopTimeUs;
+ if (expectedStopTimeUs == NO_TIMESTAMP) {
+ stopTimeUs = stopTriggerTimeUs;
+ } else if (expectedStopTimeUs < startTimeUs) {
+ // If the recording is stopped immediately after started, it's possible
+ // that the expected stop time is less than the start time because the
+ // encoder is run on different executor. Ignore the expected stop time in
+ // this case so that the recording can be stopped correctly.
+ Logger.w(mTag, "The expected stop time is less than the start time. Use "
+ + "current time as stop time.");
+ stopTimeUs = stopTriggerTimeUs;
+ } else {
+ stopTimeUs = expectedStopTimeUs;
+ }
+ if (stopTimeUs < startTimeUs) {
+ throw new AssertionError("The start time should be before the stop time.");
+ }
+ // Store the stop time. The codec will be stopped after receiving the data
+ // that has a timestamp equal or greater than the stop time.
+ mStartStopTimeRangeUs = Range.create(startTimeUs, stopTimeUs);
+ Logger.d(mTag, "Stop on " + DebugUtils.readableUs(stopTimeUs));
+ // If the Encoder is paused and has received enough data, directly signal
+ // the codec to stop.
+ if (currentState == PAUSED && mLastDataStopTimestamp != null) {
+ signalCodecStop();
+ } else {
+ mPendingCodecStop = true;
+ // If somehow the data doesn't reach the expected timestamp before it
+ // times out, stop the codec so that the Encoder can at least be stopped.
+ // Set mDataStopTimeStamp to be null in order to catch this issue in test.
+ mStopTimeoutFuture =
+ CameraXExecutors.mainThreadExecutor().schedule(
+ () -> mEncoderExecutor.execute(() -> {
+ if (mPendingCodecStop) {
+ Logger.w(mTag,
+ "The data didn't reach the expected "
+ + "timestamp before timeout, stop"
+ + " the codec.");
+ mLastDataStopTimestamp = null;
+ signalCodecStop();
+ mPendingCodecStop = false;
+ }
+ }), STOP_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+ }
+ break;
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ setState(CONFIGURED);
+ break;
+ case PENDING_RELEASE:
+ case RELEASED:
+ throw new IllegalStateException("Encoder is released");
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void signalCodecStop() {
+ if (mEncoderInput instanceof ByteBufferInput) {
+ ((ByteBufferInput) mEncoderInput).setActive(false);
+ // Wait for all issued input buffer done to avoid input loss.
+ List> futures = new ArrayList<>();
+ for (InputBuffer inputBuffer : mInputBufferSet) {
+ futures.add(inputBuffer.getTerminationFuture());
+ }
+ Futures.successfulAsList(futures).addListener(this::signalEndOfInputStream,
+ mEncoderExecutor);
+ } else if (mEncoderInput instanceof SurfaceInput) {
+ try {
+ mMediaCodec.signalEndOfInputStream();
+ // On some devices, MediaCodec#signalEndOfInputStream() doesn't work.
+ // See b/255209101.
+ mMediaCodecEosSignalled = true;
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ }
+ }
+ }
+
+ /**
+ * Pauses the encoder.
+ *
+ * {@code pause} only work between {@link #start} and {@link #stop}. Once the encoder is
+ * paused, it will drop the input data until {@link #start} is invoked again.
+ */
+ @Override
+ public void pause() {
+ final long pauseTriggerTimeUs = generatePresentationTimeUs();
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case CONFIGURED:
+ case PAUSED:
+ case ERROR:
+ case STOPPING:
+ case PENDING_START_PAUSED:
+ // Do nothing
+ break;
+ case PENDING_START:
+ setState(PENDING_START_PAUSED);
+ break;
+ case STARTED:
+ // Create and insert a pause/resume range.
+ Logger.d(mTag, "Pause on " + DebugUtils.readableUs(pauseTriggerTimeUs));
+ mActivePauseResumeTimeRanges.addLast(
+ Range.create(pauseTriggerTimeUs, NO_LIMIT_LONG));
+ setState(PAUSED);
+ break;
+ case PENDING_RELEASE:
+ case RELEASED:
+ throw new IllegalStateException("Encoder is released");
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ /**
+ * Releases the encoder.
+ *
+ *
Once the encoder is released, it cannot be used anymore. Any other method call after
+ * the encoder is released will get {@link IllegalStateException}. If it is in encoding, make
+ * sure call {@link #stop} before {@code release} to normally end the stream, or it may get
+ * uncertain result if call {@code release} while encoding.
+ */
+ @Override
+ public void release() {
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case CONFIGURED:
+ case STARTED:
+ case PAUSED:
+ case ERROR:
+ releaseInternal();
+ break;
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ setState(PENDING_RELEASE);
+ break;
+ case PENDING_RELEASE:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ /** {@inheritDoc} */
+ @NonNull
+ @Override
+ public ListenableFuture getReleasedFuture() {
+ return mReleasedFuture;
+ }
+
+ /**
+ * Sends a hint to the encoder that the source has stopped producing data.
+ *
+ * This will allow the encoder to reset when it is stopped and no more input data is
+ * incoming. This can optimize the time needed to start the next session with
+ * {@link #start()} and can regenerate a {@link Surface} on devices that don't support
+ * persistent input surfaces.
+ */
+ public void signalSourceStopped() {
+ mEncoderExecutor.execute(() -> {
+ mSourceStoppedSignalled = true;
+ if (mIsFlushedAfterEndOfStream) {
+ mMediaCodec.stop();
+ reset();
+ }
+ });
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void releaseInternal() {
+ if (mIsFlushedAfterEndOfStream) {
+ mMediaCodec.stop();
+ mIsFlushedAfterEndOfStream = false;
+ }
+
+ mMediaCodec.release();
+
+ if (mEncoderInput instanceof SurfaceInput) {
+ ((SurfaceInput) mEncoderInput).releaseSurface();
+ }
+
+ setState(RELEASED);
+
+ mReleasedCompleter.set(null);
+ }
+
+ /**
+ * Sets callback to encoder.
+ *
+ * @param encoderCallback the encoder callback
+ * @param executor the callback executor
+ */
+ @Override
+ public void setEncoderCallback(
+ @NonNull EncoderCallback encoderCallback,
+ @NonNull Executor executor) {
+ synchronized (mLock) {
+ mEncoderCallback = encoderCallback;
+ mEncoderCallbackExecutor = executor;
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void requestKeyFrame() {
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case STARTED:
+ requestKeyFrameToMediaCodec();
+ break;
+ case CONFIGURED:
+ case PAUSED:
+ case ERROR:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ // No-op
+ break;
+ case RELEASED:
+ case PENDING_RELEASE:
+ throw new IllegalStateException("Encoder is released");
+ }
+ });
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void setState(InternalState state) {
+ if (mState == state) {
+ return;
+ }
+ Logger.d(mTag, "Transitioning encoder internal state: " + mState + " --> " + state);
+ mState = state;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void setMediaCodecPaused(boolean paused) {
+ Bundle bundle = new Bundle();
+ bundle.putInt(MediaCodec.PARAMETER_KEY_SUSPEND, paused ? 1 : 0);
+ mMediaCodec.setParameters(bundle);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void requestKeyFrameToMediaCodec() {
+ Bundle bundle = new Bundle();
+ bundle.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mMediaCodec.setParameters(bundle);
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void signalEndOfInputStream() {
+ Futures.addCallback(acquireInputBuffer(),
+ new FutureCallback() {
+ @Override
+ public void onSuccess(InputBuffer inputBuffer) {
+ inputBuffer.setPresentationTimeUs(generatePresentationTimeUs());
+ inputBuffer.setEndOfStream(true);
+ inputBuffer.submit();
+
+ Futures.addCallback(inputBuffer.getTerminationFuture(),
+ new FutureCallback() {
+ @ExecutedBy("mEncoderExecutor")
+ @Override
+ public void onSuccess(@Nullable Void result) {
+ // Do nothing.
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ if (t instanceof MediaCodec.CodecException) {
+ handleEncodeError(
+ (MediaCodec.CodecException) t);
+ } else {
+ handleEncodeError(EncodeException.ERROR_UNKNOWN,
+ t.getMessage(), t);
+ }
+ }
+ }, mEncoderExecutor);
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ handleEncodeError(EncodeException.ERROR_UNKNOWN,
+ "Unable to acquire InputBuffer.", t);
+ }
+ }, mEncoderExecutor);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void handleEncodeError(@NonNull MediaCodec.CodecException e) {
+ handleEncodeError(EncodeException.ERROR_CODEC, e.getMessage(), e);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void handleEncodeError(@EncodeException.ErrorType int error, @Nullable String message,
+ @Nullable Throwable throwable) {
+ switch (mState) {
+ case CONFIGURED:
+ // Unable to start MediaCodec. This is a fatal error. Try to reset the encoder.
+ notifyError(error, message, throwable);
+ reset();
+ break;
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START_PAUSED:
+ case PENDING_START:
+ case PENDING_RELEASE:
+ setState(ERROR);
+ stopMediaCodec(() -> notifyError(error, message, throwable));
+ break;
+ case ERROR:
+ //noinspection ConstantConditions
+ Logger.w(mTag, "Get more than one error: " + message + "(" + error + ")",
+ throwable);
+ break;
+ case RELEASED:
+ // Do nothing
+ break;
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ void notifyError(@EncodeException.ErrorType int error, @Nullable String message,
+ @Nullable Throwable throwable) {
+ EncoderCallback callback;
+ Executor executor;
+ synchronized (mLock) {
+ callback = mEncoderCallback;
+ executor = mEncoderCallbackExecutor;
+ }
+ try {
+ executor.execute(
+ () -> callback.onEncodeError(new EncodeException(error, message, throwable)));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void stopMediaCodec(@Nullable Runnable afterStop) {
+ /*
+ * MediaCodec#stop will free all its input/output ByteBuffers. Therefore, before calling
+ * MediaCodec#stop, it must ensure all dispatched EncodedData(output ByteBuffers) and
+ * InputBuffer(input ByteBuffers) are complete. Otherwise, the ByteBuffer receiver will
+ * get buffer overflow when accessing the ByteBuffers.
+ */
+ List> futures = new ArrayList<>();
+ for (EncodedDataImpl dataToClose : mEncodedDataSet) {
+ futures.add(dataToClose.getClosedFuture());
+ }
+ for (InputBuffer inputBuffer : mInputBufferSet) {
+ futures.add(inputBuffer.getTerminationFuture());
+ }
+ if (!futures.isEmpty()) {
+ Logger.d(mTag, "Waiting for resources to return."
+ + " encoded data = " + mEncodedDataSet.size()
+ + ", input buffers = " + mInputBufferSet.size());
+ }
+ Futures.successfulAsList(futures).addListener(() -> {
+ // If the encoder is not in ERROR state, stop the codec first before resetting.
+ // Otherwise, reset directly.
+ if (mState != ERROR) {
+ if (!futures.isEmpty()) {
+ Logger.d(mTag, "encoded data and input buffers are returned");
+ }
+ if (mEncoderInput instanceof SurfaceInput && !mSourceStoppedSignalled) {
+ // For a SurfaceInput, the codec is in control of de-queuing buffers from the
+ // underlying BufferQueue. If we stop the codec, then it will stop de-queuing
+ // buffers and the BufferQueue may run out of input buffers, causing the camera
+ // pipeline to stall. Instead of stopping, we will flush the codec. Since the
+ // codec is operating in asynchronous mode, this will cause the codec to
+ // continue to discard buffers. We should have already received the
+ // end-of-stream signal on an output buffer at this point, so those buffers
+ // are not needed anyways. We will defer resetting the codec until just
+ // before starting the codec again.
+ mMediaCodec.flush();
+ mIsFlushedAfterEndOfStream = true;
+ } else {
+ // Non-SurfaceInputs give us more control over input buffers. We can directly
+ // stop the codec instead of flushing.
+ // Additionally, if we already received a signal that the source is stopped,
+ // then there shouldn't be new buffers being produced, and we don't need to
+ // flush.
+ mMediaCodec.stop();
+ }
+ }
+ if (afterStop != null) {
+ afterStop.run();
+ }
+ handleStopped();
+ }, mEncoderExecutor);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void handleStopped() {
+ if (mState == PENDING_RELEASE) {
+ releaseInternal();
+ } else {
+ InternalState oldState = mState;
+ if (!mIsFlushedAfterEndOfStream) {
+ // Only reset if the codec is stopped (not flushed). If the codec is flushed, we
+ // want it to continue to discard buffers. We will reset before starting the
+ // codec again.
+ reset();
+ }
+ setState(CONFIGURED);
+ if (oldState == PENDING_START || oldState == PENDING_START_PAUSED) {
+ start();
+ if (oldState == PENDING_START_PAUSED) {
+ pause();
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void updateTotalPausedDuration(long bufferPresentationTimeUs) {
+ while (!mActivePauseResumeTimeRanges.isEmpty()) {
+ Range pauseRange = mActivePauseResumeTimeRanges.getFirst();
+ if (bufferPresentationTimeUs > pauseRange.getUpper()) {
+ // Later than current pause, remove this pause and update total paused duration.
+ mActivePauseResumeTimeRanges.removeFirst();
+ mTotalPausedDurationUs += (pauseRange.getUpper() - pauseRange.getLower());
+ Logger.d(mTag,
+ "Total paused duration = " + DebugUtils.readableUs(mTotalPausedDurationUs));
+ } else {
+ break;
+ }
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ long getAdjustedTimeUs(@NonNull BufferInfo bufferInfo) {
+ long adjustedTimeUs;
+ if (mTotalPausedDurationUs > 0L) {
+ adjustedTimeUs = bufferInfo.presentationTimeUs - mTotalPausedDurationUs;
+ } else {
+ adjustedTimeUs = bufferInfo.presentationTimeUs;
+ }
+ return adjustedTimeUs;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ boolean isInPauseRange(long timeUs) {
+ for (Range range : mActivePauseResumeTimeRanges) {
+ if (range.contains(timeUs)) {
+ return true;
+ } else if (timeUs < range.getLower()) {
+ // Earlier than pause range.
+ return false;
+ }
+ // Later than current pause, keep searching.
+ }
+ return false;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ @NonNull
+ ListenableFuture acquireInputBuffer() {
+ switch (mState) {
+ case CONFIGURED:
+ return Futures.immediateFailedFuture(new IllegalStateException(
+ "Encoder is not started yet."));
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ AtomicReference> ref = new AtomicReference<>();
+ ListenableFuture future = CallbackToFutureAdapter.getFuture(
+ completer -> {
+ ref.set(completer);
+ return "acquireInputBuffer";
+ });
+ Completer completer = Preconditions.checkNotNull(ref.get());
+ mAcquisitionQueue.offer(completer);
+ completer.addCancellationListener(() -> mAcquisitionQueue.remove(completer),
+ mEncoderExecutor);
+ matchAcquisitionsAndFreeBufferIndexes();
+ return future;
+ case ERROR:
+ return Futures.immediateFailedFuture(new IllegalStateException(
+ "Encoder is in error state."));
+ case RELEASED:
+ return Futures.immediateFailedFuture(new IllegalStateException(
+ "Encoder is released."));
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void matchAcquisitionsAndFreeBufferIndexes() {
+ while (!mAcquisitionQueue.isEmpty() && !mFreeInputBufferIndexQueue.isEmpty()) {
+ Completer completer = requireNonNull(mAcquisitionQueue.poll());
+ int bufferIndex = requireNonNull(mFreeInputBufferIndexQueue.poll());
+
+ InputBufferImpl inputBuffer;
+ try {
+ inputBuffer = new InputBufferImpl(mMediaCodec, bufferIndex);
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ return;
+ }
+ if (completer.set(inputBuffer)) {
+ mInputBufferSet.add(inputBuffer);
+ inputBuffer.getTerminationFuture().addListener(
+ () -> mInputBufferSet.remove(inputBuffer), mEncoderExecutor);
+ } else {
+ inputBuffer.cancel();
+ }
+ }
+ }
+
+ @NonNull
+ private static EncoderInfo createEncoderInfo(boolean isVideoEncoder,
+ @NonNull MediaCodecInfo codecInfo, @NonNull String mime) throws InvalidConfigException {
+ return isVideoEncoder ? new VideoEncoderInfoImpl(codecInfo, mime)
+ : new AudioEncoderInfoImpl(codecInfo, mime);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ long generatePresentationTimeUs() {
+ return mTimeProvider.uptimeUs();
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ static boolean isKeyFrame(@NonNull BufferInfo bufferInfo) {
+ return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ static boolean hasEndOfStreamFlag(@NonNull BufferInfo bufferInfo) {
+ return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+ class MediaCodecCallback extends MediaCodec.Callback {
+ @Nullable
+ private final VideoTimebaseConverter mVideoTimestampConverter;
+
+ private boolean mHasSendStartCallback = false;
+ private boolean mHasFirstData = false;
+ private boolean mHasEndData = false;
+ /** The last presentation time of BufferInfo without modified. */
+ private long mLastPresentationTimeUs = 0L;
+ /**
+ * The last sent presentation time of BufferInfo. The value could be adjusted by total
+ * pause duration.
+ */
+ private long mLastSentAdjustedTimeUs = 0L;
+ private boolean mIsOutputBufferInPauseState = false;
+ private boolean mIsKeyFrameRequired = false;
+ private boolean mStopped = false;
+
+ MediaCodecCallback() {
+ if (mIsVideoEncoder) {
+ Timebase inputTimebase;
+ if (DeviceQuirks.get(CameraUseInconsistentTimebaseQuirk.class) != null) {
+ inputTimebase = null;
+ } else {
+ inputTimebase = mInputTimebase;
+ }
+ mVideoTimestampConverter = new VideoTimebaseConverter(mTimeProvider, inputTimebase);
+ } else {
+ mVideoTimestampConverter = null;
+ }
+ }
+
+ @Override
+ public void onInputBufferAvailable(MediaCodec mediaCodec, int index) {
+ mEncoderExecutor.execute(() -> {
+ if (mStopped) {
+ Logger.w(mTag, "Receives input frame after codec is reset.");
+ return;
+ }
+ switch (mState) {
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ mFreeInputBufferIndexQueue.offer(index);
+ matchAcquisitionsAndFreeBufferIndexes();
+ break;
+ case CONFIGURED:
+ case ERROR:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ @Override
+ public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec, int index,
+ @NonNull BufferInfo bufferInfo) {
+ mEncoderExecutor.execute(() -> {
+ if (mStopped) {
+ Logger.w(mTag, "Receives frame after codec is reset.");
+ return;
+ }
+ switch (mState) {
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ final EncoderCallback encoderCallback;
+ final Executor executor;
+ synchronized (mLock) {
+ encoderCallback = mEncoderCallback;
+ executor = mEncoderCallbackExecutor;
+ }
+
+ if (DEBUG) {
+ Logger.d(mTag, DebugUtils.readableBufferInfo(bufferInfo));
+ }
+
+ // Handle start of stream
+ if (!mHasSendStartCallback) {
+ mHasSendStartCallback = true;
+ try {
+ executor.execute(encoderCallback::onEncodeStart);
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ }
+
+ if (checkBufferInfo(bufferInfo)) {
+ if (!mHasFirstData) {
+ mHasFirstData = true;
+ }
+ BufferInfo outBufferInfo = resolveOutputBufferInfo(bufferInfo);
+ mLastSentAdjustedTimeUs = outBufferInfo.presentationTimeUs;
+ try {
+ EncodedDataImpl encodedData = new EncodedDataImpl(mediaCodec, index,
+ outBufferInfo);
+ sendEncodedData(encodedData, encoderCallback, executor);
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ return;
+ }
+ } else {
+ // Not necessary to return fake buffer
+ if (index != FAKE_BUFFER_INDEX) {
+ try {
+ mMediaCodec.releaseOutputBuffer(index, false);
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ return;
+ }
+ }
+ }
+
+ // Handle end of stream
+ if (!mHasEndData && isEndOfStream(bufferInfo)) {
+ mHasEndData = true;
+ stopMediaCodec(() -> {
+ if (mState == ERROR) {
+ // Error occur during stopping.
+ return;
+ }
+ try {
+ executor.execute(encoderCallback::onEncodeStop);
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ });
+ }
+ break;
+ case CONFIGURED:
+ case ERROR:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ @NonNull
+ private BufferInfo resolveOutputBufferInfo(@NonNull BufferInfo bufferInfo) {
+ long adjustedTimeUs = getAdjustedTimeUs(bufferInfo);
+ if (bufferInfo.presentationTimeUs == adjustedTimeUs) {
+ return bufferInfo;
+ }
+
+ // If adjusted time <= last sent time, the buffer should have been detected and
+ // dropped in checkBufferInfo().
+ Preconditions.checkState(adjustedTimeUs > mLastSentAdjustedTimeUs);
+ if (DEBUG) {
+ Logger.d(mTag, "Adjust bufferInfo.presentationTimeUs to "
+ + DebugUtils.readableUs(adjustedTimeUs));
+ }
+ BufferInfo newBufferInfo = new BufferInfo();
+ newBufferInfo.set(bufferInfo.offset, bufferInfo.size, adjustedTimeUs, bufferInfo.flags);
+ return newBufferInfo;
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void sendEncodedData(@NonNull EncodedDataImpl encodedData,
+ @NonNull EncoderCallback callback, @NonNull Executor executor) {
+ mEncodedDataSet.add(encodedData);
+ Futures.addCallback(encodedData.getClosedFuture(),
+ new FutureCallback() {
+ @Override
+ public void onSuccess(@Nullable Void result) {
+ mEncodedDataSet.remove(encodedData);
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ mEncodedDataSet.remove(encodedData);
+ if (t instanceof MediaCodec.CodecException) {
+ handleEncodeError(
+ (MediaCodec.CodecException) t);
+ } else {
+ handleEncodeError(EncodeException.ERROR_UNKNOWN,
+ t.getMessage(), t);
+ }
+ }
+ }, mEncoderExecutor);
+ try {
+ executor.execute(() -> callback.onEncodedData(encodedData));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ encodedData.close();
+ }
+ }
+
+ /**
+ * Checks the {@link BufferInfo} and updates related states.
+ *
+ * @return {@code true} if the buffer is valid, otherwise {@code false}.
+ */
+ @ExecutedBy("mEncoderExecutor")
+ private boolean checkBufferInfo(@NonNull BufferInfo bufferInfo) {
+ if (mHasEndData) {
+ Logger.d(mTag, "Drop buffer by already reach end of stream.");
+ return false;
+ }
+
+ if (bufferInfo.size <= 0) {
+ Logger.d(mTag, "Drop buffer by invalid buffer size.");
+ return false;
+ }
+
+ if (mVideoTimestampConverter != null) {
+ bufferInfo.presentationTimeUs =
+ mVideoTimestampConverter.convertToUptimeUs(bufferInfo.presentationTimeUs);
+ }
+
+ // MediaCodec may send out of order buffer
+ if (bufferInfo.presentationTimeUs <= mLastPresentationTimeUs) {
+ Logger.d(mTag, "Drop buffer by out of order buffer from MediaCodec.");
+ return false;
+ }
+ mLastPresentationTimeUs = bufferInfo.presentationTimeUs;
+
+ // Ignore buffers are not in start/stop range. One situation is to ignore outdated
+ // frames when using the Surface of MediaCodec#createPersistentInputSurface. After
+ // the persistent Surface stops, it will keep a small number of old frames in its
+ // buffer, and send those old frames in the next startup.
+ if (!mStartStopTimeRangeUs.contains(bufferInfo.presentationTimeUs)) {
+ Logger.d(mTag, "Drop buffer by not in start-stop range.");
+ // If data hasn't reached the expected stop timestamp, set the stop timestamp.
+ if (mPendingCodecStop
+ && bufferInfo.presentationTimeUs >= mStartStopTimeRangeUs.getUpper()) {
+ if (mStopTimeoutFuture != null) {
+ mStopTimeoutFuture.cancel(true);
+ }
+ mLastDataStopTimestamp = bufferInfo.presentationTimeUs;
+ signalCodecStop();
+ mPendingCodecStop = false;
+ }
+ return false;
+ }
+
+ if (updatePauseRangeStateAndCheckIfBufferPaused(bufferInfo)) {
+ Logger.d(mTag, "Drop buffer by pause.");
+ return false;
+ }
+
+ // We should check if the adjusted time is valid. see b/189114207.
+ if (getAdjustedTimeUs(bufferInfo) <= mLastSentAdjustedTimeUs) {
+ Logger.d(mTag, "Drop buffer by adjusted time is less than the last sent time.");
+ if (mIsVideoEncoder && isKeyFrame(bufferInfo)) {
+ mIsKeyFrameRequired = true;
+ }
+ return false;
+ }
+
+ if (!mHasFirstData && !mIsKeyFrameRequired && mIsVideoEncoder) {
+ mIsKeyFrameRequired = true;
+ }
+
+ if (mIsKeyFrameRequired) {
+ if (!isKeyFrame(bufferInfo)) {
+ requestKeyFrameToMediaCodec();
+ }
+ mIsKeyFrameRequired = false;
+ }
+
+ return true;
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private boolean isEndOfStream(@NonNull BufferInfo bufferInfo) {
+ return hasEndOfStreamFlag(bufferInfo) || isEosSignalledAndStopTimeReached(bufferInfo);
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private boolean isEosSignalledAndStopTimeReached(@NonNull BufferInfo bufferInfo) {
+ return mMediaCodecEosSignalled
+ && bufferInfo.presentationTimeUs > mStartStopTimeRangeUs.getUpper();
+ }
+
+ @SuppressWarnings("StatementWithEmptyBody") // to better organize the logic and comments
+ @ExecutedBy("mEncoderExecutor")
+ private boolean updatePauseRangeStateAndCheckIfBufferPaused(
+ @NonNull BufferInfo bufferInfo) {
+ updateTotalPausedDuration(bufferInfo.presentationTimeUs);
+ boolean isInPauseRange = isInPauseRange(bufferInfo.presentationTimeUs);
+ if (!mIsOutputBufferInPauseState && isInPauseRange) {
+ Logger.d(mTag, "Switch to pause state");
+ // From resume to pause
+ mIsOutputBufferInPauseState = true;
+
+ // Invoke paused callback
+ Executor executor;
+ EncoderCallback encoderCallback;
+ synchronized (mLock) {
+ executor = mEncoderCallbackExecutor;
+ encoderCallback = mEncoderCallback;
+ }
+ executor.execute(encoderCallback::onEncodePaused);
+
+ // We must ensure that the current state is PAUSED before we stop the input
+ // source and pause the codec. This is because start() may be called before the
+ // output buffer reaches the pause range.
+ if (mState == PAUSED) {
+ if (!mIsVideoEncoder && DeviceQuirks.get(
+ AudioEncoderIgnoresInputTimestampQuirk.class) != null) {
+ // Do nothing, which means keep handling audio data in the codec.
+ } else if (mIsVideoEncoder && DeviceQuirks.get(
+ VideoEncoderSuspendDoesNotIncludeSuspendTimeQuirk.class) != null) {
+ // Do nothing, which means don't pause the codec.
+ } else {
+ if (mEncoderInput instanceof ByteBufferInput) {
+ ((ByteBufferInput) mEncoderInput).setActive(false);
+ }
+ setMediaCodecPaused(true);
+ }
+ }
+
+ // An encoding session could be pause/resume for multiple times. So a later pause
+ // should overwrite the previous data stop time.
+ mLastDataStopTimestamp = bufferInfo.presentationTimeUs;
+ // If the encoder has been stopped before the data enters pause period, stop the
+ // codec directly.
+ if (mPendingCodecStop) {
+ if (mStopTimeoutFuture != null) {
+ mStopTimeoutFuture.cancel(true);
+ }
+ signalCodecStop();
+ mPendingCodecStop = false;
+ }
+ } else if (mIsOutputBufferInPauseState && !isInPauseRange) {
+ // From pause to resume
+ Logger.d(mTag, "Switch to resume state");
+ mIsOutputBufferInPauseState = false;
+ if (mIsVideoEncoder && !isKeyFrame(bufferInfo)) {
+ mIsKeyFrameRequired = true;
+ }
+ }
+
+ return mIsOutputBufferInPauseState;
+ }
+
+ @Override
+ public void onError(@NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ handleEncodeError(e);
+ break;
+ case CONFIGURED:
+ case ERROR:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ @Override
+ public void onOutputFormatChanged(@NonNull MediaCodec mediaCodec,
+ @NonNull MediaFormat mediaFormat) {
+ mEncoderExecutor.execute(() -> {
+ if (mStopped) {
+ Logger.w(mTag, "Receives onOutputFormatChanged after codec is reset.");
+ return;
+ }
+ switch (mState) {
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ EncoderCallback encoderCallback;
+ Executor executor;
+ synchronized (mLock) {
+ encoderCallback = mEncoderCallback;
+ executor = mEncoderCallbackExecutor;
+ }
+ try {
+ executor.execute(
+ () -> encoderCallback.onOutputConfigUpdate(() -> mediaFormat));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ break;
+ case CONFIGURED:
+ case ERROR:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ /** Stop process further frame output. */
+ @ExecutedBy("mEncoderExecutor")
+ void stop() {
+ mStopped = true;
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+ class SurfaceInput implements Encoder.SurfaceInput {
+
+ private final Object mLock = new Object();
+
+ @GuardedBy("mLock")
+ private Surface mSurface;
+
+ @GuardedBy("mLock")
+ private final Set mObsoleteSurfaces = new HashSet<>();
+
+ @GuardedBy("mLock")
+ private OnSurfaceUpdateListener mSurfaceUpdateListener;
+
+ @GuardedBy("mLock")
+ private Executor mSurfaceUpdateExecutor;
+
+ /**
+ * Sets the surface update listener.
+ *
+ * @param executor the executor to invoke the listener
+ * @param listener the surface update listener
+ */
+ @Override
+ public void setOnSurfaceUpdateListener(@NonNull Executor executor,
+ @NonNull OnSurfaceUpdateListener listener) {
+ Surface surface;
+ synchronized (mLock) {
+ mSurfaceUpdateListener = Preconditions.checkNotNull(listener);
+ mSurfaceUpdateExecutor = Preconditions.checkNotNull(executor);
+ surface = mSurface;
+ }
+ if (surface != null) {
+ notifySurfaceUpdate(executor, listener, surface);
+ }
+ }
+
+ @SuppressLint("NewApi")
+ void resetSurface() {
+ Surface surface;
+ Executor executor;
+ OnSurfaceUpdateListener listener;
+ EncoderNotUsePersistentInputSurfaceQuirk quirk = DeviceQuirks.get(
+ EncoderNotUsePersistentInputSurfaceQuirk.class);
+ synchronized (mLock) {
+ if (quirk == null) {
+ if (mSurface == null) {
+ mSurface = Api23Impl.createPersistentInputSurface();
+ surface = mSurface;
+ } else {
+ surface = null;
+ }
+ Api23Impl.setInputSurface(mMediaCodec, mSurface);
+ } else {
+ if (mSurface != null) {
+ mObsoleteSurfaces.add(mSurface);
+ }
+ mSurface = mMediaCodec.createInputSurface();
+ surface = mSurface;
+ }
+ listener = mSurfaceUpdateListener;
+ executor = mSurfaceUpdateExecutor;
+ }
+ if (surface != null && listener != null && executor != null) {
+ notifySurfaceUpdate(executor, listener, surface);
+ }
+ }
+
+ void releaseSurface() {
+ Surface surface;
+ Set obsoleteSurfaces;
+ synchronized (mLock) {
+ surface = mSurface;
+ mSurface = null;
+ obsoleteSurfaces = new HashSet<>(mObsoleteSurfaces);
+ mObsoleteSurfaces.clear();
+ }
+ if (surface != null) {
+ surface.release();
+ }
+ for (Surface obsoleteSurface : obsoleteSurfaces) {
+ obsoleteSurface.release();
+ }
+ }
+
+ private void notifySurfaceUpdate(@NonNull Executor executor,
+ @NonNull OnSurfaceUpdateListener listener, @NonNull Surface surface) {
+ try {
+ executor.execute(() -> listener.onSurfaceUpdate(surface));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ class ByteBufferInput implements Encoder.ByteBufferInput {
+
+ private final Map, Executor> mStateObservers =
+ new LinkedHashMap<>();
+
+ private State mBufferProviderState = State.INACTIVE;
+
+ private final List> mAcquisitionList = new ArrayList<>();
+
+ /** {@inheritDoc} */
+ @NonNull
+ @Override
+ public ListenableFuture fetchData() {
+ return CallbackToFutureAdapter.getFuture(completer -> {
+ mEncoderExecutor.execute(() -> completer.set(mBufferProviderState));
+ return "fetchData";
+ });
+ }
+
+ /** {@inheritDoc} */
+ @NonNull
+ @Override
+ public ListenableFuture acquireBuffer() {
+ return CallbackToFutureAdapter.getFuture(completer -> {
+ mEncoderExecutor.execute(() -> {
+ if (mBufferProviderState == State.ACTIVE) {
+ ListenableFuture future = acquireInputBuffer();
+ Futures.propagate(future, completer);
+ // Cancel by outer, also cancel internal future.
+ completer.addCancellationListener(() -> cancelInputBuffer(future),
+ CameraXExecutors.directExecutor());
+
+ // Keep tracking the acquisition by internal future. Once the provider state
+ // transition to inactive, cancel the internal future can also send signal
+ // to outer future since we propagate the internal result to the completer.
+ mAcquisitionList.add(future);
+ future.addListener(() -> mAcquisitionList.remove(future), mEncoderExecutor);
+ } else if (mBufferProviderState == State.INACTIVE) {
+ completer.setException(
+ new IllegalStateException("BufferProvider is not active."));
+ } else {
+ completer.setException(
+ new IllegalStateException(
+ "Unknown state: " + mBufferProviderState));
+ }
+ });
+ return "acquireBuffer";
+ });
+ }
+
+ private void cancelInputBuffer(@NonNull ListenableFuture inputBufferFuture) {
+ if (!inputBufferFuture.cancel(true)) {
+ // Not able to cancel the future, need to cancel the input buffer as possible.
+ Preconditions.checkState(inputBufferFuture.isDone());
+ try {
+ inputBufferFuture.get().cancel();
+ } catch (ExecutionException | InterruptedException | CancellationException e) {
+ Logger.w(mTag, "Unable to cancel the input buffer: " + e);
+ }
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void addObserver(@NonNull Executor executor,
+ @NonNull Observer super State> observer) {
+ mEncoderExecutor.execute(() -> {
+ mStateObservers.put(Preconditions.checkNotNull(observer),
+ Preconditions.checkNotNull(executor));
+ final State state = mBufferProviderState;
+ executor.execute(() -> observer.onNewData(state));
+ });
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void removeObserver(@NonNull Observer super State> observer) {
+ mEncoderExecutor.execute(
+ () -> mStateObservers.remove(Preconditions.checkNotNull(observer)));
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ void setActive(boolean isActive) {
+ final State newState = isActive ? State.ACTIVE : State.INACTIVE;
+ if (mBufferProviderState == newState) {
+ return;
+ }
+ mBufferProviderState = newState;
+
+ if (newState == State.INACTIVE) {
+ for (ListenableFuture future : mAcquisitionList) {
+ future.cancel(true);
+ }
+ mAcquisitionList.clear();
+ }
+
+ for (Map.Entry, Executor> entry : mStateObservers.entrySet()) {
+ try {
+ entry.getValue().execute(() -> entry.getKey().onNewData(newState));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ }
+ }
+ }
+
+ /**
+ * Nested class to avoid verification errors for methods introduced in Android 6.0 (API 23).
+ */
+ @RequiresApi(23)
+ private static class Api23Impl {
+
+ private Api23Impl() {
+ }
+
+ @DoNotInline
+ @NonNull
+ static Surface createPersistentInputSurface() {
+ return MediaCodec.createPersistentInputSurface();
+ }
+
+ @DoNotInline
+ static void setInputSurface(@NonNull MediaCodec mediaCodec, @NonNull Surface surface) {
+ mediaCodec.setInputSurface(surface);
+ }
+ }
+}
diff --git a/app/src/main/java/androidx/camera/video/originals/EncoderImpl.java b/app/src/main/java/androidx/camera/video/originals/EncoderImpl.java
new file mode 100644
index 0000000..1749057
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/originals/EncoderImpl.java
@@ -0,0 +1,1699 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.camera.video.internal.encoder;
+
+import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.CONFIGURED;
+import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.ERROR;
+import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.PAUSED;
+import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.PENDING_RELEASE;
+import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.PENDING_START;
+import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.PENDING_START_PAUSED;
+import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.RELEASED;
+import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.STARTED;
+import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.STOPPING;
+
+import static java.util.Objects.requireNonNull;
+
+import android.annotation.SuppressLint;
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.os.Bundle;
+import android.util.Range;
+import android.view.Surface;
+
+import androidx.annotation.DoNotInline;
+import androidx.annotation.GuardedBy;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.camera.core.Logger;
+import androidx.camera.core.impl.Timebase;
+import androidx.camera.core.impl.annotation.ExecutedBy;
+import androidx.camera.core.impl.utils.executor.CameraXExecutors;
+import androidx.camera.core.impl.utils.futures.FutureCallback;
+import androidx.camera.core.impl.utils.futures.Futures;
+import androidx.camera.video.internal.DebugUtils;
+import androidx.camera.video.internal.compat.quirk.AudioEncoderIgnoresInputTimestampQuirk;
+import androidx.camera.video.internal.compat.quirk.CameraUseInconsistentTimebaseQuirk;
+import androidx.camera.video.internal.compat.quirk.DeviceQuirks;
+import androidx.camera.video.internal.compat.quirk.EncoderNotUsePersistentInputSurfaceQuirk;
+import androidx.camera.video.internal.compat.quirk.VideoEncoderSuspendDoesNotIncludeSuspendTimeQuirk;
+import androidx.camera.video.internal.workaround.EncoderFinder;
+import androidx.camera.video.internal.workaround.VideoTimebaseConverter;
+import androidx.concurrent.futures.CallbackToFutureAdapter;
+import androidx.concurrent.futures.CallbackToFutureAdapter.Completer;
+import androidx.core.util.Preconditions;
+
+import com.google.common.util.concurrent.ListenableFuture;
+
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Deque;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+import java.util.Set;
+import java.util.concurrent.CancellationException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * The encoder implementation.
+ *
+ * An encoder could be either a video encoder or an audio encoder.
+ */
+@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+public class EncoderImpl implements Encoder {
+
+ enum InternalState {
+ /**
+ * The initial state.
+ */
+ CONFIGURED,
+
+ /**
+ * The state is when encoder is in {@link InternalState#CONFIGURED} state and {@link #start}
+ * is called.
+ */
+ STARTED,
+
+ /**
+ * The state is when encoder is in {@link InternalState#STARTED} state and {@link #pause}
+ * is called.
+ */
+ PAUSED,
+
+ /**
+ * The state is when encoder is in {@link InternalState#STARTED} state and {@link #stop} is
+ * called.
+ */
+ STOPPING,
+
+ /**
+ * The state is when the encoder is in {@link InternalState#STOPPING} state and a
+ * {@link #start} is called. It is an extension of {@link InternalState#STOPPING}.
+ */
+ PENDING_START,
+
+ /**
+ * The state is when the encoder is in {@link InternalState#STOPPING} state, then
+ * {@link #start} and {@link #pause} is called. It is an extension of
+ * {@link InternalState#STOPPING}.
+ */
+ PENDING_START_PAUSED,
+
+ /**
+ * The state is when the encoder is in {@link InternalState#STOPPING} state and a
+ * {@link #release} is called. It is an extension of {@link InternalState#STOPPING}.
+ */
+ PENDING_RELEASE,
+
+ /**
+ * Then state is when the encoder encounter error. Error state is a transitional state
+ * where encoder user is supposed to wait for {@link EncoderCallback#onEncodeStop} or
+ * {@link EncoderCallback#onEncodeError}. Any method call during this state should be
+ * ignore except {@link #release}.
+ */
+ ERROR,
+
+ /** The state is when the encoder is released. */
+ RELEASED,
+ }
+
+ private static final boolean DEBUG = false;
+ private static final long NO_LIMIT_LONG = Long.MAX_VALUE;
+ private static final Range NO_RANGE = Range.create(NO_LIMIT_LONG, NO_LIMIT_LONG);
+ private static final long STOP_TIMEOUT_MS = 1000L;
+ private static final int FAKE_BUFFER_INDEX = -9999;
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final String mTag;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Object mLock = new Object();
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final boolean mIsVideoEncoder;
+ private final MediaFormat mMediaFormat;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final MediaCodec mMediaCodec;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final EncoderInput mEncoderInput;
+ private final EncoderInfo mEncoderInfo;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Executor mEncoderExecutor;
+ private final ListenableFuture mReleasedFuture;
+ private final Completer mReleasedCompleter;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Queue mFreeInputBufferIndexQueue = new ArrayDeque<>();
+ private final Queue> mAcquisitionQueue = new ArrayDeque<>();
+ private final Set mInputBufferSet = new HashSet<>();
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Set mEncodedDataSet = new HashSet<>();
+ /*
+ * mActivePauseResumeTimeRanges is a queue used to track all active pause/resume time ranges.
+ * An active pause/resume range means the latest output buffer still has not exceeded this
+ * range, so this range is still needed to check for later output buffers. The first element
+ * in the queue is the oldest range and the last element is the newest.
+ */
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ final Deque> mActivePauseResumeTimeRanges = new ArrayDeque<>();
+ final Timebase mInputTimebase;
+ final TimeProvider mTimeProvider = new SystemTimeProvider();
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @GuardedBy("mLock")
+ EncoderCallback mEncoderCallback = EncoderCallback.EMPTY;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @GuardedBy("mLock")
+ Executor mEncoderCallbackExecutor = CameraXExecutors.directExecutor();
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ InternalState mState;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ Range mStartStopTimeRangeUs = NO_RANGE;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ long mTotalPausedDurationUs = 0L;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ boolean mPendingCodecStop = false;
+ // The data timestamp that an encoding stops at. If this timestamp is null, it means the
+ // encoding hasn't receiving enough data to be stopped.
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ Long mLastDataStopTimestamp = null;
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ Future> mStopTimeoutFuture = null;
+ private MediaCodecCallback mMediaCodecCallback = null;
+
+ private boolean mIsFlushedAfterEndOfStream = false;
+ private boolean mSourceStoppedSignalled = false;
+ boolean mMediaCodecEosSignalled = false;
+
+ final EncoderFinder mEncoderFinder = new EncoderFinder();
+
+ /**
+ * Creates the encoder with a {@link EncoderConfig}
+ *
+ * @param executor the executor suitable for background task
+ * @param encoderConfig the encoder config
+ * @throws InvalidConfigException when the encoder cannot be configured.
+ */
+ public EncoderImpl(@NonNull Executor executor, @NonNull EncoderConfig encoderConfig)
+ throws InvalidConfigException {
+ Preconditions.checkNotNull(executor);
+ Preconditions.checkNotNull(encoderConfig);
+
+ mEncoderExecutor = CameraXExecutors.newSequentialExecutor(executor);
+
+ if (encoderConfig instanceof AudioEncoderConfig) {
+ mTag = "AudioEncoder";
+ mIsVideoEncoder = false;
+ mEncoderInput = new ByteBufferInput();
+ } else if (encoderConfig instanceof VideoEncoderConfig) {
+ mTag = "VideoEncoder";
+ mIsVideoEncoder = true;
+ mEncoderInput = new SurfaceInput();
+ } else {
+ throw new InvalidConfigException("Unknown encoder config type");
+ }
+
+ mInputTimebase = encoderConfig.getInputTimebase();
+ Logger.d(mTag, "mInputTimebase = " + mInputTimebase);
+ mMediaFormat = encoderConfig.toMediaFormat();
+ Logger.d(mTag, "mMediaFormat = " + mMediaFormat);
+ mMediaCodec = mEncoderFinder.findEncoder(mMediaFormat);
+ clampVideoBitrateIfNotSupported(mMediaCodec.getCodecInfo(), mMediaFormat);
+ Logger.i(mTag, "Selected encoder: " + mMediaCodec.getName());
+ mEncoderInfo = createEncoderInfo(mIsVideoEncoder, mMediaCodec.getCodecInfo(),
+ encoderConfig.getMimeType());
+ try {
+ reset();
+ } catch (MediaCodec.CodecException e) {
+ throw new InvalidConfigException(e);
+ }
+
+ AtomicReference> releaseFutureRef = new AtomicReference<>();
+ mReleasedFuture = Futures.nonCancellationPropagating(
+ CallbackToFutureAdapter.getFuture(completer -> {
+ releaseFutureRef.set(completer);
+ return "mReleasedFuture";
+ }));
+ mReleasedCompleter = Preconditions.checkNotNull(releaseFutureRef.get());
+
+ setState(CONFIGURED);
+ }
+
+ /**
+ * If video bitrate in MediaFormat is not supported by supplied MediaCodecInfo,
+ * clamp bitrate in MediaFormat
+ *
+ * @param mediaCodecInfo MediaCodecInfo object
+ * @param mediaFormat MediaFormat object
+ */
+ private void clampVideoBitrateIfNotSupported(@NonNull MediaCodecInfo mediaCodecInfo,
+ @NonNull MediaFormat mediaFormat) {
+
+ if (!mediaCodecInfo.isEncoder() || !mIsVideoEncoder) {
+ return;
+ }
+
+ try {
+ String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
+ MediaCodecInfo.CodecCapabilities caps = mediaCodecInfo.getCapabilitiesForType(mime);
+ Preconditions.checkArgument(caps != null,
+ "MIME type is not supported");
+
+ if (mediaFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
+ // We only handle video bitrate issues at this moment.
+ MediaCodecInfo.VideoCapabilities videoCaps = caps.getVideoCapabilities();
+ Preconditions.checkArgument(videoCaps != null,
+ "Not video codec");
+
+ int origBitrate = mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE);
+ int newBitrate = videoCaps.getBitrateRange().clamp(origBitrate);
+ if (origBitrate != newBitrate) {
+ mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, newBitrate);
+ Logger.d(mTag, "updated bitrate from " + origBitrate
+ + " to " + newBitrate);
+ }
+ }
+ } catch (IllegalArgumentException e) {
+ Logger.w(mTag, "Unexpected error while validating video bitrate", e);
+ }
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void reset() {
+ mStartStopTimeRangeUs = NO_RANGE;
+ mTotalPausedDurationUs = 0L;
+ mActivePauseResumeTimeRanges.clear();
+ mFreeInputBufferIndexQueue.clear();
+
+ // Cancel incomplete acquisitions if exists.
+ for (Completer completer : mAcquisitionQueue) {
+ completer.setCancelled();
+ }
+ mAcquisitionQueue.clear();
+
+ mMediaCodec.reset();
+ mIsFlushedAfterEndOfStream = false;
+ mSourceStoppedSignalled = false;
+ mMediaCodecEosSignalled = false;
+ mPendingCodecStop = false;
+ if (mStopTimeoutFuture != null) {
+ mStopTimeoutFuture.cancel(true);
+ mStopTimeoutFuture = null;
+ }
+ if (mMediaCodecCallback != null) {
+ mMediaCodecCallback.stop();
+ }
+ mMediaCodecCallback = new MediaCodecCallback();
+ mMediaCodec.setCallback(mMediaCodecCallback);
+
+ mMediaCodec.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (mEncoderInput instanceof SurfaceInput) {
+ ((SurfaceInput) mEncoderInput).resetSurface();
+ }
+ }
+
+ /** Gets the {@link EncoderInput} of the encoder */
+ @Override
+ @NonNull
+ public EncoderInput getInput() {
+ return mEncoderInput;
+ }
+
+ @NonNull
+ @Override
+ public EncoderInfo getEncoderInfo() {
+ return mEncoderInfo;
+ }
+
+ @Override
+ public int getConfiguredBitrate() {
+ int configuredBitrate = 0;
+ if (mMediaFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
+ configuredBitrate = mMediaFormat.getInteger(MediaFormat.KEY_BIT_RATE);
+ }
+ return configuredBitrate;
+ }
+
+ /**
+ * Starts the encoder.
+ *
+ * If the encoder is not started yet, it will first trigger
+ * {@link EncoderCallback#onEncodeStart}. Then continually invoke the
+ * {@link EncoderCallback#onEncodedData} callback until the encoder is paused, stopped or
+ * released. It can call {@link #pause} to pause the encoding after started. If the encoder is
+ * in paused state, then calling this method will resume the encoding.
+ */
+ @SuppressWarnings("StatementWithEmptyBody") // to better organize the logic and comments
+ @Override
+ public void start() {
+ final long startTriggerTimeUs = generatePresentationTimeUs();
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case CONFIGURED:
+ mLastDataStopTimestamp = null;
+
+ Logger.d(mTag, "Start on " + DebugUtils.readableUs(startTriggerTimeUs));
+ try {
+ if (mIsFlushedAfterEndOfStream) {
+ // If the codec is flushed after an end-of-stream, it was never
+ // signalled that the source stopped, so we will reset the codec
+ // before starting it again.
+ reset();
+ }
+ mStartStopTimeRangeUs = Range.create(startTriggerTimeUs, NO_LIMIT_LONG);
+ mMediaCodec.start();
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ return;
+ }
+ if (mEncoderInput instanceof ByteBufferInput) {
+ ((ByteBufferInput) mEncoderInput).setActive(true);
+ }
+ setState(STARTED);
+ break;
+ case PAUSED:
+ // Resume
+
+ // The Encoder has been resumed, so reset the stop timestamp flags.
+ mLastDataStopTimestamp = null;
+
+ final Range pauseRange = mActivePauseResumeTimeRanges.removeLast();
+ Preconditions.checkState(
+ pauseRange != null && pauseRange.getUpper() == NO_LIMIT_LONG,
+ "There should be a \"pause\" before \"resume\"");
+ final long pauseTimeUs = pauseRange.getLower();
+ mActivePauseResumeTimeRanges.addLast(
+ Range.create(pauseTimeUs, startTriggerTimeUs));
+ // Do not update total paused duration here since current output buffer may
+ // still before the pause range.
+
+ Logger.d(mTag, "Resume on " + DebugUtils.readableUs(startTriggerTimeUs)
+ + "\nPaused duration = " + DebugUtils.readableUs(
+ (startTriggerTimeUs - pauseTimeUs))
+ );
+
+ if (!mIsVideoEncoder && DeviceQuirks.get(
+ AudioEncoderIgnoresInputTimestampQuirk.class) != null) {
+ // Do nothing. Since we keep handling audio data in the codec after
+ // paused, we don't have to resume the codec and the input source.
+ } else if (mIsVideoEncoder && DeviceQuirks.get(
+ VideoEncoderSuspendDoesNotIncludeSuspendTimeQuirk.class) != null) {
+ // Do nothing. Since we don't pause the codec when paused, we don't have
+ // to resume the codec.
+ } else {
+ setMediaCodecPaused(false);
+ if (mEncoderInput instanceof ByteBufferInput) {
+ ((ByteBufferInput) mEncoderInput).setActive(true);
+ }
+ }
+ // If this is a video encoder, then request a key frame in order to complete
+ // the resume process as soon as possible in MediaCodec.Callback
+ // .onOutputBufferAvailable().
+ if (mIsVideoEncoder) {
+ requestKeyFrameToMediaCodec();
+ }
+ setState(STARTED);
+ break;
+ case STARTED:
+ case ERROR:
+ case PENDING_START:
+ // Do nothing
+ break;
+ case STOPPING:
+ case PENDING_START_PAUSED:
+ setState(PENDING_START);
+ break;
+ case PENDING_RELEASE:
+ case RELEASED:
+ throw new IllegalStateException("Encoder is released");
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void stop() {
+ stop(NO_TIMESTAMP);
+ }
+
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void stop(long expectedStopTimeUs) {
+ final long stopTriggerTimeUs = generatePresentationTimeUs();
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case CONFIGURED:
+ case STOPPING:
+ case ERROR:
+ // Do nothing
+ break;
+ case STARTED:
+ case PAUSED:
+ InternalState currentState = mState;
+ setState(STOPPING);
+ final long startTimeUs = mStartStopTimeRangeUs.getLower();
+ if (startTimeUs == NO_LIMIT_LONG) {
+ throw new AssertionError("There should be a \"start\" before \"stop\"");
+ }
+ long stopTimeUs;
+ if (expectedStopTimeUs == NO_TIMESTAMP) {
+ stopTimeUs = stopTriggerTimeUs;
+ } else if (expectedStopTimeUs < startTimeUs) {
+ // If the recording is stopped immediately after started, it's possible
+ // that the expected stop time is less than the start time because the
+ // encoder is run on different executor. Ignore the expected stop time in
+ // this case so that the recording can be stopped correctly.
+ Logger.w(mTag, "The expected stop time is less than the start time. Use "
+ + "current time as stop time.");
+ stopTimeUs = stopTriggerTimeUs;
+ } else {
+ stopTimeUs = expectedStopTimeUs;
+ }
+ if (stopTimeUs < startTimeUs) {
+ throw new AssertionError("The start time should be before the stop time.");
+ }
+ // Store the stop time. The codec will be stopped after receiving the data
+ // that has a timestamp equal or greater than the stop time.
+ mStartStopTimeRangeUs = Range.create(startTimeUs, stopTimeUs);
+ Logger.d(mTag, "Stop on " + DebugUtils.readableUs(stopTimeUs));
+ // If the Encoder is paused and has received enough data, directly signal
+ // the codec to stop.
+ if (currentState == PAUSED && mLastDataStopTimestamp != null) {
+ signalCodecStop();
+ } else {
+ mPendingCodecStop = true;
+ // If somehow the data doesn't reach the expected timestamp before it
+ // times out, stop the codec so that the Encoder can at least be stopped.
+ // Set mDataStopTimeStamp to be null in order to catch this issue in test.
+ mStopTimeoutFuture =
+ CameraXExecutors.mainThreadExecutor().schedule(
+ () -> mEncoderExecutor.execute(() -> {
+ if (mPendingCodecStop) {
+ Logger.w(mTag,
+ "The data didn't reach the expected "
+ + "timestamp before timeout, stop"
+ + " the codec.");
+ mLastDataStopTimestamp = null;
+ signalCodecStop();
+ mPendingCodecStop = false;
+ }
+ }), STOP_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+ }
+ break;
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ setState(CONFIGURED);
+ break;
+ case PENDING_RELEASE:
+ case RELEASED:
+ throw new IllegalStateException("Encoder is released");
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void signalCodecStop() {
+ if (mEncoderInput instanceof ByteBufferInput) {
+ ((ByteBufferInput) mEncoderInput).setActive(false);
+ // Wait for all issued input buffer done to avoid input loss.
+ List> futures = new ArrayList<>();
+ for (InputBuffer inputBuffer : mInputBufferSet) {
+ futures.add(inputBuffer.getTerminationFuture());
+ }
+ Futures.successfulAsList(futures).addListener(this::signalEndOfInputStream,
+ mEncoderExecutor);
+ } else if (mEncoderInput instanceof SurfaceInput) {
+ try {
+ mMediaCodec.signalEndOfInputStream();
+ // On some devices, MediaCodec#signalEndOfInputStream() doesn't work.
+ // See b/255209101.
+ mMediaCodecEosSignalled = true;
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ }
+ }
+ }
+
+ /**
+ * Pauses the encoder.
+ *
+ * {@code pause} only work between {@link #start} and {@link #stop}. Once the encoder is
+ * paused, it will drop the input data until {@link #start} is invoked again.
+ */
+ @Override
+ public void pause() {
+ final long pauseTriggerTimeUs = generatePresentationTimeUs();
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case CONFIGURED:
+ case PAUSED:
+ case ERROR:
+ case STOPPING:
+ case PENDING_START_PAUSED:
+ // Do nothing
+ break;
+ case PENDING_START:
+ setState(PENDING_START_PAUSED);
+ break;
+ case STARTED:
+ // Create and insert a pause/resume range.
+ Logger.d(mTag, "Pause on " + DebugUtils.readableUs(pauseTriggerTimeUs));
+ mActivePauseResumeTimeRanges.addLast(
+ Range.create(pauseTriggerTimeUs, NO_LIMIT_LONG));
+ setState(PAUSED);
+ break;
+ case PENDING_RELEASE:
+ case RELEASED:
+ throw new IllegalStateException("Encoder is released");
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ /**
+ * Releases the encoder.
+ *
+ *
Once the encoder is released, it cannot be used anymore. Any other method call after
+ * the encoder is released will get {@link IllegalStateException}. If it is in encoding, make
+ * sure call {@link #stop} before {@code release} to normally end the stream, or it may get
+ * uncertain result if call {@code release} while encoding.
+ */
+ @Override
+ public void release() {
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case CONFIGURED:
+ case STARTED:
+ case PAUSED:
+ case ERROR:
+ releaseInternal();
+ break;
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ setState(PENDING_RELEASE);
+ break;
+ case PENDING_RELEASE:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ /** {@inheritDoc} */
+ @NonNull
+ @Override
+ public ListenableFuture getReleasedFuture() {
+ return mReleasedFuture;
+ }
+
+ /**
+ * Sends a hint to the encoder that the source has stopped producing data.
+ *
+ * This will allow the encoder to reset when it is stopped and no more input data is
+ * incoming. This can optimize the time needed to start the next session with
+ * {@link #start()} and can regenerate a {@link Surface} on devices that don't support
+ * persistent input surfaces.
+ */
+ public void signalSourceStopped() {
+ mEncoderExecutor.execute(() -> {
+ mSourceStoppedSignalled = true;
+ if (mIsFlushedAfterEndOfStream) {
+ mMediaCodec.stop();
+ reset();
+ }
+ });
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void releaseInternal() {
+ if (mIsFlushedAfterEndOfStream) {
+ mMediaCodec.stop();
+ mIsFlushedAfterEndOfStream = false;
+ }
+
+ mMediaCodec.release();
+
+ if (mEncoderInput instanceof SurfaceInput) {
+ ((SurfaceInput) mEncoderInput).releaseSurface();
+ }
+
+ setState(RELEASED);
+
+ mReleasedCompleter.set(null);
+ }
+
+ /**
+ * Sets callback to encoder.
+ *
+ * @param encoderCallback the encoder callback
+ * @param executor the callback executor
+ */
+ @Override
+ public void setEncoderCallback(
+ @NonNull EncoderCallback encoderCallback,
+ @NonNull Executor executor) {
+ synchronized (mLock) {
+ mEncoderCallback = encoderCallback;
+ mEncoderCallbackExecutor = executor;
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void requestKeyFrame() {
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case STARTED:
+ requestKeyFrameToMediaCodec();
+ break;
+ case CONFIGURED:
+ case PAUSED:
+ case ERROR:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ // No-op
+ break;
+ case RELEASED:
+ case PENDING_RELEASE:
+ throw new IllegalStateException("Encoder is released");
+ }
+ });
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void setState(InternalState state) {
+ if (mState == state) {
+ return;
+ }
+ Logger.d(mTag, "Transitioning encoder internal state: " + mState + " --> " + state);
+ mState = state;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void setMediaCodecPaused(boolean paused) {
+ Bundle bundle = new Bundle();
+ bundle.putInt(MediaCodec.PARAMETER_KEY_SUSPEND, paused ? 1 : 0);
+ mMediaCodec.setParameters(bundle);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void requestKeyFrameToMediaCodec() {
+ Bundle bundle = new Bundle();
+ bundle.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mMediaCodec.setParameters(bundle);
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void signalEndOfInputStream() {
+ Futures.addCallback(acquireInputBuffer(),
+ new FutureCallback() {
+ @Override
+ public void onSuccess(InputBuffer inputBuffer) {
+ inputBuffer.setPresentationTimeUs(generatePresentationTimeUs());
+ inputBuffer.setEndOfStream(true);
+ inputBuffer.submit();
+
+ Futures.addCallback(inputBuffer.getTerminationFuture(),
+ new FutureCallback() {
+ @ExecutedBy("mEncoderExecutor")
+ @Override
+ public void onSuccess(@Nullable Void result) {
+ // Do nothing.
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ if (t instanceof MediaCodec.CodecException) {
+ handleEncodeError(
+ (MediaCodec.CodecException) t);
+ } else {
+ handleEncodeError(EncodeException.ERROR_UNKNOWN,
+ t.getMessage(), t);
+ }
+ }
+ }, mEncoderExecutor);
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ handleEncodeError(EncodeException.ERROR_UNKNOWN,
+ "Unable to acquire InputBuffer.", t);
+ }
+ }, mEncoderExecutor);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void handleEncodeError(@NonNull MediaCodec.CodecException e) {
+ handleEncodeError(EncodeException.ERROR_CODEC, e.getMessage(), e);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void handleEncodeError(@EncodeException.ErrorType int error, @Nullable String message,
+ @Nullable Throwable throwable) {
+ switch (mState) {
+ case CONFIGURED:
+ // Unable to start MediaCodec. This is a fatal error. Try to reset the encoder.
+ notifyError(error, message, throwable);
+ reset();
+ break;
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START_PAUSED:
+ case PENDING_START:
+ case PENDING_RELEASE:
+ setState(ERROR);
+ stopMediaCodec(() -> notifyError(error, message, throwable));
+ break;
+ case ERROR:
+ //noinspection ConstantConditions
+ Logger.w(mTag, "Get more than one error: " + message + "(" + error + ")",
+ throwable);
+ break;
+ case RELEASED:
+ // Do nothing
+ break;
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ void notifyError(@EncodeException.ErrorType int error, @Nullable String message,
+ @Nullable Throwable throwable) {
+ EncoderCallback callback;
+ Executor executor;
+ synchronized (mLock) {
+ callback = mEncoderCallback;
+ executor = mEncoderCallbackExecutor;
+ }
+ try {
+ executor.execute(
+ () -> callback.onEncodeError(new EncodeException(error, message, throwable)));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void stopMediaCodec(@Nullable Runnable afterStop) {
+ /*
+ * MediaCodec#stop will free all its input/output ByteBuffers. Therefore, before calling
+ * MediaCodec#stop, it must ensure all dispatched EncodedData(output ByteBuffers) and
+ * InputBuffer(input ByteBuffers) are complete. Otherwise, the ByteBuffer receiver will
+ * get buffer overflow when accessing the ByteBuffers.
+ */
+ List> futures = new ArrayList<>();
+ for (EncodedDataImpl dataToClose : mEncodedDataSet) {
+ futures.add(dataToClose.getClosedFuture());
+ }
+ for (InputBuffer inputBuffer : mInputBufferSet) {
+ futures.add(inputBuffer.getTerminationFuture());
+ }
+ if (!futures.isEmpty()) {
+ Logger.d(mTag, "Waiting for resources to return."
+ + " encoded data = " + mEncodedDataSet.size()
+ + ", input buffers = " + mInputBufferSet.size());
+ }
+ Futures.successfulAsList(futures).addListener(() -> {
+ // If the encoder is not in ERROR state, stop the codec first before resetting.
+ // Otherwise, reset directly.
+ if (mState != ERROR) {
+ if (!futures.isEmpty()) {
+ Logger.d(mTag, "encoded data and input buffers are returned");
+ }
+ if (mEncoderInput instanceof SurfaceInput && !mSourceStoppedSignalled) {
+ // For a SurfaceInput, the codec is in control of de-queuing buffers from the
+ // underlying BufferQueue. If we stop the codec, then it will stop de-queuing
+ // buffers and the BufferQueue may run out of input buffers, causing the camera
+ // pipeline to stall. Instead of stopping, we will flush the codec. Since the
+ // codec is operating in asynchronous mode, this will cause the codec to
+ // continue to discard buffers. We should have already received the
+ // end-of-stream signal on an output buffer at this point, so those buffers
+ // are not needed anyways. We will defer resetting the codec until just
+ // before starting the codec again.
+ mMediaCodec.flush();
+ mIsFlushedAfterEndOfStream = true;
+ } else {
+ // Non-SurfaceInputs give us more control over input buffers. We can directly
+ // stop the codec instead of flushing.
+ // Additionally, if we already received a signal that the source is stopped,
+ // then there shouldn't be new buffers being produced, and we don't need to
+ // flush.
+ mMediaCodec.stop();
+ }
+ }
+ if (afterStop != null) {
+ afterStop.run();
+ }
+ handleStopped();
+ }, mEncoderExecutor);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void handleStopped() {
+ if (mState == PENDING_RELEASE) {
+ releaseInternal();
+ } else {
+ InternalState oldState = mState;
+ if (!mIsFlushedAfterEndOfStream) {
+ // Only reset if the codec is stopped (not flushed). If the codec is flushed, we
+ // want it to continue to discard buffers. We will reset before starting the
+ // codec again.
+ reset();
+ }
+ setState(CONFIGURED);
+ if (oldState == PENDING_START || oldState == PENDING_START_PAUSED) {
+ start();
+ if (oldState == PENDING_START_PAUSED) {
+ pause();
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void updateTotalPausedDuration(long bufferPresentationTimeUs) {
+ while (!mActivePauseResumeTimeRanges.isEmpty()) {
+ Range pauseRange = mActivePauseResumeTimeRanges.getFirst();
+ if (bufferPresentationTimeUs > pauseRange.getUpper()) {
+ // Later than current pause, remove this pause and update total paused duration.
+ mActivePauseResumeTimeRanges.removeFirst();
+ mTotalPausedDurationUs += (pauseRange.getUpper() - pauseRange.getLower());
+ Logger.d(mTag,
+ "Total paused duration = " + DebugUtils.readableUs(mTotalPausedDurationUs));
+ } else {
+ break;
+ }
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ long getAdjustedTimeUs(@NonNull BufferInfo bufferInfo) {
+ long adjustedTimeUs;
+ if (mTotalPausedDurationUs > 0L) {
+ adjustedTimeUs = bufferInfo.presentationTimeUs - mTotalPausedDurationUs;
+ } else {
+ adjustedTimeUs = bufferInfo.presentationTimeUs;
+ }
+ return adjustedTimeUs;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ boolean isInPauseRange(long timeUs) {
+ for (Range range : mActivePauseResumeTimeRanges) {
+ if (range.contains(timeUs)) {
+ return true;
+ } else if (timeUs < range.getLower()) {
+ // Earlier than pause range.
+ return false;
+ }
+ // Later than current pause, keep searching.
+ }
+ return false;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ @NonNull
+ ListenableFuture acquireInputBuffer() {
+ switch (mState) {
+ case CONFIGURED:
+ return Futures.immediateFailedFuture(new IllegalStateException(
+ "Encoder is not started yet."));
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ AtomicReference> ref = new AtomicReference<>();
+ ListenableFuture future = CallbackToFutureAdapter.getFuture(
+ completer -> {
+ ref.set(completer);
+ return "acquireInputBuffer";
+ });
+ Completer completer = Preconditions.checkNotNull(ref.get());
+ mAcquisitionQueue.offer(completer);
+ completer.addCancellationListener(() -> mAcquisitionQueue.remove(completer),
+ mEncoderExecutor);
+ matchAcquisitionsAndFreeBufferIndexes();
+ return future;
+ case ERROR:
+ return Futures.immediateFailedFuture(new IllegalStateException(
+ "Encoder is in error state."));
+ case RELEASED:
+ return Futures.immediateFailedFuture(new IllegalStateException(
+ "Encoder is released."));
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @ExecutedBy("mEncoderExecutor")
+ void matchAcquisitionsAndFreeBufferIndexes() {
+ while (!mAcquisitionQueue.isEmpty() && !mFreeInputBufferIndexQueue.isEmpty()) {
+ Completer completer = requireNonNull(mAcquisitionQueue.poll());
+ int bufferIndex = requireNonNull(mFreeInputBufferIndexQueue.poll());
+
+ InputBufferImpl inputBuffer;
+ try {
+ inputBuffer = new InputBufferImpl(mMediaCodec, bufferIndex);
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ return;
+ }
+ if (completer.set(inputBuffer)) {
+ mInputBufferSet.add(inputBuffer);
+ inputBuffer.getTerminationFuture().addListener(
+ () -> mInputBufferSet.remove(inputBuffer), mEncoderExecutor);
+ } else {
+ inputBuffer.cancel();
+ }
+ }
+ }
+
+ @NonNull
+ private static EncoderInfo createEncoderInfo(boolean isVideoEncoder,
+ @NonNull MediaCodecInfo codecInfo, @NonNull String mime) throws InvalidConfigException {
+ return isVideoEncoder ? new VideoEncoderInfoImpl(codecInfo, mime)
+ : new AudioEncoderInfoImpl(codecInfo, mime);
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ long generatePresentationTimeUs() {
+ return mTimeProvider.uptimeUs();
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ static boolean isKeyFrame(@NonNull BufferInfo bufferInfo) {
+ return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ static boolean hasEndOfStreamFlag(@NonNull BufferInfo bufferInfo) {
+ return (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+ class MediaCodecCallback extends MediaCodec.Callback {
+ @Nullable
+ private final VideoTimebaseConverter mVideoTimestampConverter;
+
+ private boolean mHasSendStartCallback = false;
+ private boolean mHasFirstData = false;
+ private boolean mHasEndData = false;
+ /** The last presentation time of BufferInfo without modified. */
+ private long mLastPresentationTimeUs = 0L;
+ /**
+ * The last sent presentation time of BufferInfo. The value could be adjusted by total
+ * pause duration.
+ */
+ private long mLastSentAdjustedTimeUs = 0L;
+ private boolean mIsOutputBufferInPauseState = false;
+ private boolean mIsKeyFrameRequired = false;
+ private boolean mStopped = false;
+
+ MediaCodecCallback() {
+ if (mIsVideoEncoder) {
+ Timebase inputTimebase;
+ if (DeviceQuirks.get(CameraUseInconsistentTimebaseQuirk.class) != null) {
+ inputTimebase = null;
+ } else {
+ inputTimebase = mInputTimebase;
+ }
+ mVideoTimestampConverter = new VideoTimebaseConverter(mTimeProvider, inputTimebase);
+ } else {
+ mVideoTimestampConverter = null;
+ }
+ }
+
+ @Override
+ public void onInputBufferAvailable(MediaCodec mediaCodec, int index) {
+ mEncoderExecutor.execute(() -> {
+ if (mStopped) {
+ Logger.w(mTag, "Receives input frame after codec is reset.");
+ return;
+ }
+ switch (mState) {
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ mFreeInputBufferIndexQueue.offer(index);
+ matchAcquisitionsAndFreeBufferIndexes();
+ break;
+ case CONFIGURED:
+ case ERROR:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ @Override
+ public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec, int index,
+ @NonNull BufferInfo bufferInfo) {
+ mEncoderExecutor.execute(() -> {
+ if (mStopped) {
+ Logger.w(mTag, "Receives frame after codec is reset.");
+ return;
+ }
+ switch (mState) {
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ final EncoderCallback encoderCallback;
+ final Executor executor;
+ synchronized (mLock) {
+ encoderCallback = mEncoderCallback;
+ executor = mEncoderCallbackExecutor;
+ }
+
+ if (DEBUG) {
+ Logger.d(mTag, DebugUtils.readableBufferInfo(bufferInfo));
+ }
+
+ // Handle start of stream
+ if (!mHasSendStartCallback) {
+ mHasSendStartCallback = true;
+ try {
+ executor.execute(encoderCallback::onEncodeStart);
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ }
+
+ if (checkBufferInfo(bufferInfo)) {
+ if (!mHasFirstData) {
+ mHasFirstData = true;
+ }
+ BufferInfo outBufferInfo = resolveOutputBufferInfo(bufferInfo);
+ mLastSentAdjustedTimeUs = outBufferInfo.presentationTimeUs;
+ try {
+ EncodedDataImpl encodedData = new EncodedDataImpl(mediaCodec, index,
+ outBufferInfo);
+ sendEncodedData(encodedData, encoderCallback, executor);
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ return;
+ }
+ } else {
+ // Not necessary to return fake buffer
+ if (index != FAKE_BUFFER_INDEX) {
+ try {
+ mMediaCodec.releaseOutputBuffer(index, false);
+ } catch (MediaCodec.CodecException e) {
+ handleEncodeError(e);
+ return;
+ }
+ }
+ }
+
+ // Handle end of stream
+ if (!mHasEndData && isEndOfStream(bufferInfo)) {
+ mHasEndData = true;
+ stopMediaCodec(() -> {
+ if (mState == ERROR) {
+ // Error occur during stopping.
+ return;
+ }
+ try {
+ executor.execute(encoderCallback::onEncodeStop);
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ });
+ }
+ break;
+ case CONFIGURED:
+ case ERROR:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ @NonNull
+ private BufferInfo resolveOutputBufferInfo(@NonNull BufferInfo bufferInfo) {
+ long adjustedTimeUs = getAdjustedTimeUs(bufferInfo);
+ if (bufferInfo.presentationTimeUs == adjustedTimeUs) {
+ return bufferInfo;
+ }
+
+ // If adjusted time <= last sent time, the buffer should have been detected and
+ // dropped in checkBufferInfo().
+ Preconditions.checkState(adjustedTimeUs > mLastSentAdjustedTimeUs);
+ if (DEBUG) {
+ Logger.d(mTag, "Adjust bufferInfo.presentationTimeUs to "
+ + DebugUtils.readableUs(adjustedTimeUs));
+ }
+ BufferInfo newBufferInfo = new BufferInfo();
+ newBufferInfo.set(bufferInfo.offset, bufferInfo.size, adjustedTimeUs, bufferInfo.flags);
+ return newBufferInfo;
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private void sendEncodedData(@NonNull EncodedDataImpl encodedData,
+ @NonNull EncoderCallback callback, @NonNull Executor executor) {
+ mEncodedDataSet.add(encodedData);
+ Futures.addCallback(encodedData.getClosedFuture(),
+ new FutureCallback() {
+ @Override
+ public void onSuccess(@Nullable Void result) {
+ mEncodedDataSet.remove(encodedData);
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ mEncodedDataSet.remove(encodedData);
+ if (t instanceof MediaCodec.CodecException) {
+ handleEncodeError(
+ (MediaCodec.CodecException) t);
+ } else {
+ handleEncodeError(EncodeException.ERROR_UNKNOWN,
+ t.getMessage(), t);
+ }
+ }
+ }, mEncoderExecutor);
+ try {
+ executor.execute(() -> callback.onEncodedData(encodedData));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ encodedData.close();
+ }
+ }
+
+ /**
+ * Checks the {@link BufferInfo} and updates related states.
+ *
+ * @return {@code true} if the buffer is valid, otherwise {@code false}.
+ */
+ @ExecutedBy("mEncoderExecutor")
+ private boolean checkBufferInfo(@NonNull BufferInfo bufferInfo) {
+ if (mHasEndData) {
+ Logger.d(mTag, "Drop buffer by already reach end of stream.");
+ return false;
+ }
+
+ if (bufferInfo.size <= 0) {
+ Logger.d(mTag, "Drop buffer by invalid buffer size.");
+ return false;
+ }
+
+ // Sometimes the codec config data was notified by output callback, they should have
+ // been sent out by onOutputFormatChanged(), so ignore it.
+ if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ Logger.d(mTag, "Drop buffer by codec config.");
+ return false;
+ }
+
+ if (mVideoTimestampConverter != null) {
+ bufferInfo.presentationTimeUs =
+ mVideoTimestampConverter.convertToUptimeUs(bufferInfo.presentationTimeUs);
+ }
+
+ // MediaCodec may send out of order buffer
+ if (bufferInfo.presentationTimeUs <= mLastPresentationTimeUs) {
+ Logger.d(mTag, "Drop buffer by out of order buffer from MediaCodec.");
+ return false;
+ }
+ mLastPresentationTimeUs = bufferInfo.presentationTimeUs;
+
+ // Ignore buffers are not in start/stop range. One situation is to ignore outdated
+ // frames when using the Surface of MediaCodec#createPersistentInputSurface. After
+ // the persistent Surface stops, it will keep a small number of old frames in its
+ // buffer, and send those old frames in the next startup.
+ if (!mStartStopTimeRangeUs.contains(bufferInfo.presentationTimeUs)) {
+ Logger.d(mTag, "Drop buffer by not in start-stop range.");
+ // If data hasn't reached the expected stop timestamp, set the stop timestamp.
+ if (mPendingCodecStop
+ && bufferInfo.presentationTimeUs >= mStartStopTimeRangeUs.getUpper()) {
+ if (mStopTimeoutFuture != null) {
+ mStopTimeoutFuture.cancel(true);
+ }
+ mLastDataStopTimestamp = bufferInfo.presentationTimeUs;
+ signalCodecStop();
+ mPendingCodecStop = false;
+ }
+ return false;
+ }
+
+ if (updatePauseRangeStateAndCheckIfBufferPaused(bufferInfo)) {
+ Logger.d(mTag, "Drop buffer by pause.");
+ return false;
+ }
+
+ // We should check if the adjusted time is valid. see b/189114207.
+ if (getAdjustedTimeUs(bufferInfo) <= mLastSentAdjustedTimeUs) {
+ Logger.d(mTag, "Drop buffer by adjusted time is less than the last sent time.");
+ if (mIsVideoEncoder && isKeyFrame(bufferInfo)) {
+ mIsKeyFrameRequired = true;
+ }
+ return false;
+ }
+
+ if (!mHasFirstData && !mIsKeyFrameRequired && mIsVideoEncoder) {
+ mIsKeyFrameRequired = true;
+ }
+
+ if (mIsKeyFrameRequired) {
+ if (!isKeyFrame(bufferInfo)) {
+ Logger.d(mTag, "Drop buffer by not a key frame.");
+ requestKeyFrameToMediaCodec();
+ return false;
+ }
+ mIsKeyFrameRequired = false;
+ }
+
+ return true;
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private boolean isEndOfStream(@NonNull BufferInfo bufferInfo) {
+ return hasEndOfStreamFlag(bufferInfo) || isEosSignalledAndStopTimeReached(bufferInfo);
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ private boolean isEosSignalledAndStopTimeReached(@NonNull BufferInfo bufferInfo) {
+ return mMediaCodecEosSignalled
+ && bufferInfo.presentationTimeUs > mStartStopTimeRangeUs.getUpper();
+ }
+
+ @SuppressWarnings("StatementWithEmptyBody") // to better organize the logic and comments
+ @ExecutedBy("mEncoderExecutor")
+ private boolean updatePauseRangeStateAndCheckIfBufferPaused(
+ @NonNull BufferInfo bufferInfo) {
+ updateTotalPausedDuration(bufferInfo.presentationTimeUs);
+ boolean isInPauseRange = isInPauseRange(bufferInfo.presentationTimeUs);
+ if (!mIsOutputBufferInPauseState && isInPauseRange) {
+ Logger.d(mTag, "Switch to pause state");
+ // From resume to pause
+ mIsOutputBufferInPauseState = true;
+
+ // Invoke paused callback
+ Executor executor;
+ EncoderCallback encoderCallback;
+ synchronized (mLock) {
+ executor = mEncoderCallbackExecutor;
+ encoderCallback = mEncoderCallback;
+ }
+ executor.execute(encoderCallback::onEncodePaused);
+
+ // We must ensure that the current state is PAUSED before we stop the input
+ // source and pause the codec. This is because start() may be called before the
+ // output buffer reaches the pause range.
+ if (mState == PAUSED) {
+ if (!mIsVideoEncoder && DeviceQuirks.get(
+ AudioEncoderIgnoresInputTimestampQuirk.class) != null) {
+ // Do nothing, which means keep handling audio data in the codec.
+ } else if (mIsVideoEncoder && DeviceQuirks.get(
+ VideoEncoderSuspendDoesNotIncludeSuspendTimeQuirk.class) != null) {
+ // Do nothing, which means don't pause the codec.
+ } else {
+ if (mEncoderInput instanceof ByteBufferInput) {
+ ((ByteBufferInput) mEncoderInput).setActive(false);
+ }
+ setMediaCodecPaused(true);
+ }
+ }
+
+ // An encoding session could be pause/resume for multiple times. So a later pause
+ // should overwrite the previous data stop time.
+ mLastDataStopTimestamp = bufferInfo.presentationTimeUs;
+ // If the encoder has been stopped before the data enters pause period, stop the
+ // codec directly.
+ if (mPendingCodecStop) {
+ if (mStopTimeoutFuture != null) {
+ mStopTimeoutFuture.cancel(true);
+ }
+ signalCodecStop();
+ mPendingCodecStop = false;
+ }
+ } else if (mIsOutputBufferInPauseState && !isInPauseRange) {
+ // From pause to resume
+ Logger.d(mTag, "Switch to resume state");
+ mIsOutputBufferInPauseState = false;
+ if (mIsVideoEncoder && !isKeyFrame(bufferInfo)) {
+ mIsKeyFrameRequired = true;
+ }
+ }
+
+ return mIsOutputBufferInPauseState;
+ }
+
+ @Override
+ public void onError(@NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
+ mEncoderExecutor.execute(() -> {
+ switch (mState) {
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ handleEncodeError(e);
+ break;
+ case CONFIGURED:
+ case ERROR:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ @Override
+ public void onOutputFormatChanged(@NonNull MediaCodec mediaCodec,
+ @NonNull MediaFormat mediaFormat) {
+ mEncoderExecutor.execute(() -> {
+ if (mStopped) {
+ Logger.w(mTag, "Receives onOutputFormatChanged after codec is reset.");
+ return;
+ }
+ switch (mState) {
+ case STARTED:
+ case PAUSED:
+ case STOPPING:
+ case PENDING_START:
+ case PENDING_START_PAUSED:
+ case PENDING_RELEASE:
+ EncoderCallback encoderCallback;
+ Executor executor;
+ synchronized (mLock) {
+ encoderCallback = mEncoderCallback;
+ executor = mEncoderCallbackExecutor;
+ }
+ try {
+ executor.execute(
+ () -> encoderCallback.onOutputConfigUpdate(() -> mediaFormat));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ break;
+ case CONFIGURED:
+ case ERROR:
+ case RELEASED:
+ // Do nothing
+ break;
+ default:
+ throw new IllegalStateException("Unknown state: " + mState);
+ }
+ });
+ }
+
+ /** Stop process further frame output. */
+ @ExecutedBy("mEncoderExecutor")
+ void stop() {
+ mStopped = true;
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+ class SurfaceInput implements Encoder.SurfaceInput {
+
+ private final Object mLock = new Object();
+
+ @GuardedBy("mLock")
+ private Surface mSurface;
+
+ @GuardedBy("mLock")
+ private final Set mObsoleteSurfaces = new HashSet<>();
+
+ @GuardedBy("mLock")
+ private OnSurfaceUpdateListener mSurfaceUpdateListener;
+
+ @GuardedBy("mLock")
+ private Executor mSurfaceUpdateExecutor;
+
+ /**
+ * Sets the surface update listener.
+ *
+ * @param executor the executor to invoke the listener
+ * @param listener the surface update listener
+ */
+ @Override
+ public void setOnSurfaceUpdateListener(@NonNull Executor executor,
+ @NonNull OnSurfaceUpdateListener listener) {
+ Surface surface;
+ synchronized (mLock) {
+ mSurfaceUpdateListener = Preconditions.checkNotNull(listener);
+ mSurfaceUpdateExecutor = Preconditions.checkNotNull(executor);
+ surface = mSurface;
+ }
+ if (surface != null) {
+ notifySurfaceUpdate(executor, listener, surface);
+ }
+ }
+
+ @SuppressLint("NewApi")
+ void resetSurface() {
+ Surface surface;
+ Executor executor;
+ OnSurfaceUpdateListener listener;
+ EncoderNotUsePersistentInputSurfaceQuirk quirk = DeviceQuirks.get(
+ EncoderNotUsePersistentInputSurfaceQuirk.class);
+ synchronized (mLock) {
+ if (quirk == null) {
+ if (mSurface == null) {
+ mSurface = Api23Impl.createPersistentInputSurface();
+ surface = mSurface;
+ } else {
+ surface = null;
+ }
+ Api23Impl.setInputSurface(mMediaCodec, mSurface);
+ } else {
+ if (mSurface != null) {
+ mObsoleteSurfaces.add(mSurface);
+ }
+ mSurface = mMediaCodec.createInputSurface();
+ surface = mSurface;
+ }
+ listener = mSurfaceUpdateListener;
+ executor = mSurfaceUpdateExecutor;
+ }
+ if (surface != null && listener != null && executor != null) {
+ notifySurfaceUpdate(executor, listener, surface);
+ }
+ }
+
+ void releaseSurface() {
+ Surface surface;
+ Set obsoleteSurfaces;
+ synchronized (mLock) {
+ surface = mSurface;
+ mSurface = null;
+ obsoleteSurfaces = new HashSet<>(mObsoleteSurfaces);
+ mObsoleteSurfaces.clear();
+ }
+ if (surface != null) {
+ surface.release();
+ }
+ for (Surface obsoleteSurface : obsoleteSurfaces) {
+ obsoleteSurface.release();
+ }
+ }
+
+ private void notifySurfaceUpdate(@NonNull Executor executor,
+ @NonNull OnSurfaceUpdateListener listener, @NonNull Surface surface) {
+ try {
+ executor.execute(() -> listener.onSurfaceUpdate(surface));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") // synthetic accessor
+ class ByteBufferInput implements Encoder.ByteBufferInput {
+
+ private final Map, Executor> mStateObservers =
+ new LinkedHashMap<>();
+
+ private State mBufferProviderState = State.INACTIVE;
+
+ private final List> mAcquisitionList = new ArrayList<>();
+
+ /** {@inheritDoc} */
+ @NonNull
+ @Override
+ public ListenableFuture fetchData() {
+ return CallbackToFutureAdapter.getFuture(completer -> {
+ mEncoderExecutor.execute(() -> completer.set(mBufferProviderState));
+ return "fetchData";
+ });
+ }
+
+ /** {@inheritDoc} */
+ @NonNull
+ @Override
+ public ListenableFuture acquireBuffer() {
+ return CallbackToFutureAdapter.getFuture(completer -> {
+ mEncoderExecutor.execute(() -> {
+ if (mBufferProviderState == State.ACTIVE) {
+ ListenableFuture future = acquireInputBuffer();
+ Futures.propagate(future, completer);
+ // Cancel by outer, also cancel internal future.
+ completer.addCancellationListener(() -> cancelInputBuffer(future),
+ CameraXExecutors.directExecutor());
+
+ // Keep tracking the acquisition by internal future. Once the provider state
+ // transition to inactive, cancel the internal future can also send signal
+ // to outer future since we propagate the internal result to the completer.
+ mAcquisitionList.add(future);
+ future.addListener(() -> mAcquisitionList.remove(future), mEncoderExecutor);
+ } else if (mBufferProviderState == State.INACTIVE) {
+ completer.setException(
+ new IllegalStateException("BufferProvider is not active."));
+ } else {
+ completer.setException(
+ new IllegalStateException(
+ "Unknown state: " + mBufferProviderState));
+ }
+ });
+ return "acquireBuffer";
+ });
+ }
+
+ private void cancelInputBuffer(@NonNull ListenableFuture inputBufferFuture) {
+ if (!inputBufferFuture.cancel(true)) {
+ // Not able to cancel the future, need to cancel the input buffer as possible.
+ Preconditions.checkState(inputBufferFuture.isDone());
+ try {
+ inputBufferFuture.get().cancel();
+ } catch (ExecutionException | InterruptedException | CancellationException e) {
+ Logger.w(mTag, "Unable to cancel the input buffer: " + e);
+ }
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void addObserver(@NonNull Executor executor,
+ @NonNull Observer super State> observer) {
+ mEncoderExecutor.execute(() -> {
+ mStateObservers.put(Preconditions.checkNotNull(observer),
+ Preconditions.checkNotNull(executor));
+ final State state = mBufferProviderState;
+ executor.execute(() -> observer.onNewData(state));
+ });
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void removeObserver(@NonNull Observer super State> observer) {
+ mEncoderExecutor.execute(
+ () -> mStateObservers.remove(Preconditions.checkNotNull(observer)));
+ }
+
+ @ExecutedBy("mEncoderExecutor")
+ void setActive(boolean isActive) {
+ final State newState = isActive ? State.ACTIVE : State.INACTIVE;
+ if (mBufferProviderState == newState) {
+ return;
+ }
+ mBufferProviderState = newState;
+
+ if (newState == State.INACTIVE) {
+ for (ListenableFuture future : mAcquisitionList) {
+ future.cancel(true);
+ }
+ mAcquisitionList.clear();
+ }
+
+ for (Map.Entry, Executor> entry : mStateObservers.entrySet()) {
+ try {
+ entry.getValue().execute(() -> entry.getKey().onNewData(newState));
+ } catch (RejectedExecutionException e) {
+ Logger.e(mTag, "Unable to post to the supplied executor.", e);
+ }
+ }
+ }
+ }
+
+ /**
+ * Nested class to avoid verification errors for methods introduced in Android 6.0 (API 23).
+ */
+ @RequiresApi(23)
+ private static class Api23Impl {
+
+ private Api23Impl() {
+ }
+
+ @DoNotInline
+ @NonNull
+ static Surface createPersistentInputSurface() {
+ return MediaCodec.createPersistentInputSurface();
+ }
+
+ @DoNotInline
+ static void setInputSurface(@NonNull MediaCodec mediaCodec, @NonNull Surface surface) {
+ mediaCodec.setInputSurface(surface);
+ }
+ }
+}
+
diff --git a/app/src/main/java/androidx/camera/video/originals/PendingRecording.java b/app/src/main/java/androidx/camera/video/originals/PendingRecording.java
new file mode 100644
index 0000000..ee062e2
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/originals/PendingRecording.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.camera.video;
+
+import android.Manifest;
+import android.content.Context;
+
+import androidx.annotation.CheckResult;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.annotation.RequiresPermission;
+import androidx.camera.core.impl.utils.ContextUtil;
+import androidx.core.content.PermissionChecker;
+import androidx.core.util.Consumer;
+import androidx.core.util.Preconditions;
+
+import java.util.concurrent.Executor;
+
+/**
+ * A recording that can be started at a future time.
+ *
+ * A pending recording allows for configuration of a recording before it is started. Once a
+ * pending recording is started with {@link #start(Executor, Consumer)}, any changes to the pending
+ * recording will not affect the actual recording; any modifications to the recording will need
+ * to occur through the controls of the {@link SucklessRecording} class returned by
+ * {@link #start(Executor, Consumer)}.
+ *
+ *
A pending recording can be created using one of the {@link Recorder} methods for starting a
+ * recording such as {@link Recorder#prepareRecording(Context, MediaStoreOutputOptions)}.
+
+ *
There may be more settings that can only be changed per-recorder instead of per-recording,
+ * because it requires expensive operations like reconfiguring the camera. For those settings, use
+ * the {@link Recorder.Builder} methods to configure before creating the {@link Recorder}
+ * instance, then create the pending recording with it.
+ */
+@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+public final class PendingRecording {
+
+ private final Context mContext;
+ private final Recorder mRecorder;
+ private final OutputOptions mOutputOptions;
+ private Consumer mEventListener;
+ private Executor mListenerExecutor;
+
+ private boolean mAudioEnabled = false;
+
+ PendingRecording(@NonNull Context context, @NonNull Recorder recorder,
+ @NonNull OutputOptions options) {
+ // Application context is sufficient for all our needs, so store that to avoid leaking
+ // unused resources. For attribution, ContextUtil.getApplicationContext() will retain the
+ // attribution tag from the original context.
+ mContext = ContextUtil.getApplicationContext(context);
+ mRecorder = recorder;
+ mOutputOptions = options;
+ }
+
+ /**
+ * Returns an application context which was retrieved from the {@link Context} used to
+ * create this object.
+ */
+ @NonNull
+ Context getApplicationContext() {
+ return mContext;
+ }
+
+ @NonNull
+ Recorder getRecorder() {
+ return mRecorder;
+ }
+
+ @NonNull
+ OutputOptions getOutputOptions() {
+ return mOutputOptions;
+ }
+
+ @Nullable
+ Executor getListenerExecutor() {
+ return mListenerExecutor;
+ }
+
+ @Nullable
+ Consumer getEventListener() {
+ return mEventListener;
+ }
+
+ boolean isAudioEnabled() {
+ return mAudioEnabled;
+ }
+
+ /**
+ * Enables audio to be recorded for this recording.
+ *
+ * This method must be called prior to {@link #start(Executor, Consumer)} to enable audio
+ * in the recording. If this method is not called, the {@link SucklessRecording} generated by
+ * {@link #start(Executor, Consumer)} will not contain audio, and
+ * {@link AudioStats#getAudioState()} will always return
+ * {@link AudioStats#AUDIO_STATE_DISABLED} for all {@link RecordingStats} send to the listener
+ * set passed to {@link #start(Executor, Consumer)}.
+ *
+ *
Recording with audio requires the {@link android.Manifest.permission#RECORD_AUDIO}
+ * permission; without it, recording will fail at {@link #start(Executor, Consumer)} with an
+ * {@link IllegalStateException}.
+ *
+ * @return this pending recording
+ * @throws IllegalStateException if the {@link Recorder} this recording is associated to
+ * doesn't support audio.
+ * @throws SecurityException if the {@link Manifest.permission#RECORD_AUDIO} permission
+ * is denied for the current application.
+ */
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ @NonNull
+ public SucklessPendingRecording withAudioEnabled() {
+ // Check permissions and throw a security exception if RECORD_AUDIO is not granted.
+ if (PermissionChecker.checkSelfPermission(mContext, Manifest.permission.RECORD_AUDIO)
+ == PermissionChecker.PERMISSION_DENIED) {
+ throw new SecurityException("Attempted to enable audio for recording but application "
+ + "does not have RECORD_AUDIO permission granted.");
+ }
+ Preconditions.checkState(mRecorder.isAudioSupported(), "The Recorder this recording is "
+ + "associated to doesn't support audio.");
+ mAudioEnabled = true;
+ return this;
+ }
+
+ /**
+ * Starts the recording, making it an active recording.
+ *
+ *
Only a single recording can be active at a time, so if another recording is active,
+ * this will throw an {@link IllegalStateException}.
+ *
+ *
If there are no errors starting the recording, the returned {@link SucklessRecording}
+ * can be used to {@link SucklessRecording#pause() pause}, {@link SucklessRecording#resume() resume},
+ * or {@link SucklessRecording#stop() stop} the recording.
+ *
+ *
Upon successfully starting the recording, a {@link VideoRecordEvent.Start} event will
+ * be the first event sent to the provided event listener.
+ *
+ *
If errors occur while starting the recording, a {@link VideoRecordEvent.Finalize} event
+ * will be the first event sent to the provided listener, and information about the error can
+ * be found in that event's {@link VideoRecordEvent.Finalize#getError()} method. The returned
+ * {@link SucklessRecording} will be in a finalized state, and all controls will be no-ops.
+ *
+ *
If the returned {@link SucklessRecording} is garbage collected, the recording will be
+ * automatically stopped. A reference to the active recording must be maintained as long as
+ * the recording needs to be active.
+ *
+ * @throws IllegalStateException if the associated Recorder currently has an unfinished
+ * active recording.
+ * @param listenerExecutor the executor that the event listener will be run on.
+ * @param listener the event listener to handle video record events.
+ */
+ @NonNull
+ @CheckResult
+ public SucklessRecording start(
+ @NonNull Executor listenerExecutor,
+ @NonNull Consumer listener) {
+ Preconditions.checkNotNull(listenerExecutor, "Listener Executor can't be null.");
+ Preconditions.checkNotNull(listener, "Event listener can't be null");
+ mListenerExecutor = listenerExecutor;
+ mEventListener = listener;
+ return mRecorder.start(this);
+ }
+}
+
diff --git a/app/src/main/java/androidx/camera/video/originals/Recorder.java b/app/src/main/java/androidx/camera/video/originals/Recorder.java
new file mode 100644
index 0000000..52346a2
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/originals/Recorder.java
@@ -0,0 +1,3167 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.camera.video;
+
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_DURATION_LIMIT_REACHED;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_ENCODING_FAILED;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_FILE_SIZE_LIMIT_REACHED;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_INVALID_OUTPUT_OPTIONS;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_NONE;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_NO_VALID_DATA;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_RECORDER_ERROR;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_SOURCE_INACTIVE;
+import static androidx.camera.video.VideoRecordEvent.Finalize.ERROR_UNKNOWN;
+import static androidx.camera.video.VideoRecordEvent.Finalize.VideoRecordError;
+import static androidx.camera.video.internal.DebugUtils.readableUs;
+import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioEncoderConfig;
+import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioMimeInfo;
+import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioSourceSettings;
+
+import android.Manifest;
+import android.annotation.SuppressLint;
+import android.content.ContentValues;
+import android.content.Context;
+import android.location.Location;
+import android.media.MediaMuxer;
+import android.media.MediaRecorder;
+import android.media.MediaScannerConnection;
+import android.net.Uri;
+import android.os.Build;
+import android.os.ParcelFileDescriptor;
+import android.provider.MediaStore;
+import android.util.Pair;
+import android.util.Range;
+import android.util.Size;
+import android.view.Surface;
+
+import androidx.annotation.GuardedBy;
+import androidx.annotation.IntRange;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.annotation.RequiresPermission;
+import androidx.annotation.RestrictTo;
+import androidx.annotation.VisibleForTesting;
+import androidx.camera.core.AspectRatio;
+import androidx.camera.core.Logger;
+import androidx.camera.core.SurfaceRequest;
+import androidx.camera.core.impl.CamcorderProfileProxy;
+import androidx.camera.core.impl.MutableStateObservable;
+import androidx.camera.core.impl.Observable;
+import androidx.camera.core.impl.StateObservable;
+import androidx.camera.core.impl.Timebase;
+import androidx.camera.core.impl.annotation.ExecutedBy;
+import androidx.camera.core.impl.utils.CloseGuardHelper;
+import androidx.camera.core.impl.utils.executor.CameraXExecutors;
+import androidx.camera.core.impl.utils.futures.FutureCallback;
+import androidx.camera.core.impl.utils.futures.Futures;
+import androidx.camera.core.internal.utils.ArrayRingBuffer;
+import androidx.camera.core.internal.utils.RingBuffer;
+import androidx.camera.video.StreamInfo.StreamState;
+import androidx.camera.video.internal.AudioSource;
+import androidx.camera.video.internal.AudioSourceAccessException;
+import androidx.camera.video.internal.compat.Api26Impl;
+import androidx.camera.video.internal.compat.quirk.DeactivateEncoderSurfaceBeforeStopEncoderQuirk;
+import androidx.camera.video.internal.compat.quirk.DeviceQuirks;
+import androidx.camera.video.internal.compat.quirk.EncoderNotUsePersistentInputSurfaceQuirk;
+import androidx.camera.video.internal.config.MimeInfo;
+import androidx.camera.video.internal.encoder.AudioEncoderConfig;
+import androidx.camera.video.internal.encoder.BufferCopiedEncodedData;
+import androidx.camera.video.internal.encoder.EncodeException;
+import androidx.camera.video.internal.encoder.EncodedData;
+import androidx.camera.video.internal.encoder.Encoder;
+import androidx.camera.video.internal.encoder.EncoderCallback;
+import androidx.camera.video.internal.encoder.EncoderFactory;
+import androidx.camera.video.internal.encoder.EncoderImpl;
+import androidx.camera.video.internal.encoder.InvalidConfigException;
+import androidx.camera.video.internal.encoder.OutputConfig;
+import androidx.camera.video.internal.encoder.VideoEncoderInfo;
+import androidx.camera.video.internal.utils.OutputUtil;
+import androidx.camera.video.internal.workaround.CorrectNegativeLatLongForMediaMuxer;
+import androidx.concurrent.futures.CallbackToFutureAdapter;
+import androidx.core.util.Consumer;
+import androidx.core.util.Preconditions;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.util.concurrent.ListenableFuture;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executor;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * An implementation of {@link VideoOutput} for starting video recordings that are saved
+ * to a {@link File}, {@link ParcelFileDescriptor}, or {@link MediaStore}.
+ *
+ * A recorder can be used to save the video frames sent from the {@link VideoCapture} use case
+ * in common recording formats such as MPEG4.
+ *
+ *
Usage example of setting up {@link VideoCapture} with a recorder as output:
+ *
+ * ProcessCameraProvider cameraProvider = ...;
+ * CameraSelector cameraSelector = ...;
+ * ...
+ * // Create our preview to show on screen
+ * Preview preview = new Preview.Builder.build();
+ * // Create the video capture use case with a Recorder as the output
+ * VideoCapture videoCapture = VideoCapture.withOutput(new Recorder.Builder().build());
+ *
+ * // Bind use cases to Fragment/Activity lifecycle
+ * cameraProvider.bindToLifecycle(this, cameraSelector, preview, videoCapture);
+ *
+ *
+ * Once the recorder is attached to a video source as a {@link VideoOutput}, e.g. using it to
+ * create a {@link VideoCapture} by calling {@link VideoCapture#withOutput(VideoOutput)}, a new
+ * recording can be generated with one of the prepareRecording methods, such as
+ * {@link #prepareRecording(Context, MediaStoreOutputOptions)}. The {@link PendingRecording} class
+ * then can be used to adjust per-recording settings and to start the recording. It also requires
+ * passing a listener to {@link PendingRecording#start(Executor, Consumer)} to
+ * listen for {@link VideoRecordEvent}s such as {@link VideoRecordEvent.Start},
+ * {@link VideoRecordEvent.Pause}, {@link VideoRecordEvent.Resume}, and
+ * {@link VideoRecordEvent.Finalize}. This listener will also receive regular recording status
+ * updates via the {@link VideoRecordEvent.Status} event.
+ *
+ *
Attaching a single Recorder instance to multiple video sources at the same time may causes
+ * unexpected behaviors and is not recommended.
+ *
+ *
A recorder can also capture and save audio alongside video. The audio must be explicitly
+ * enabled with {@link PendingRecording#withAudioEnabled()} before starting the recording.
+ *
+ * @see VideoCapture#withOutput(VideoOutput)
+ * @see PendingRecording
+ */
+@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+public final class Recorder implements VideoOutput {
+
+ private static final String TAG = "Recorder";
+
+ enum State {
+ /**
+ * The Recorder is being configured.
+ *
+ *
The Recorder will reach this state whenever it is waiting for a surface request.
+ */
+ CONFIGURING,
+ /**
+ * There's a recording waiting for being started.
+ *
+ *
The Recorder will reach this state whenever a recording can not be serviced
+ * immediately.
+ */
+ PENDING_RECORDING,
+ /**
+ * There's a recording waiting for being paused.
+ *
+ *
The Recorder will reach this state whenever a recording can not be serviced
+ * immediately.
+ */
+ PENDING_PAUSED,
+ /**
+ * The Recorder is idling and ready to start a new recording.
+ */
+ IDLING,
+ /**
+ * There's a running recording and the Recorder is producing output.
+ */
+ RECORDING,
+ /**
+ * There's a running recording and it's paused.
+ */
+ PAUSED,
+ /**
+ * There's a recording being stopped.
+ */
+ STOPPING,
+ /**
+ * There's a running recording and the Recorder is being reset.
+ */
+ RESETTING,
+ /**
+ * The Recorder encountered errors and any operation will attempt will throw an
+ * {@link IllegalStateException}. Users can handle the error by monitoring
+ * {@link VideoRecordEvent}.
+ */
+ ERROR
+ }
+
+ enum AudioState {
+ /**
+ * The audio is being initializing.
+ */
+ INITIALIZING,
+ /**
+ * The audio has been initialized and is waiting for a new recording to be started.
+ */
+ IDLING,
+ /**
+ * Audio recording is disabled for the running recording.
+ */
+ DISABLED,
+ /**
+ * The recording is being recorded with audio.
+ */
+ ACTIVE,
+ /**
+ * The audio encoder encountered errors.
+ */
+ ERROR_ENCODER,
+ /**
+ * The audio source encountered errors.
+ */
+ ERROR_SOURCE,
+ }
+
+ /**
+ * The subset of states considered pending states.
+ */
+ private static final Set PENDING_STATES =
+ Collections.unmodifiableSet(EnumSet.of(State.PENDING_RECORDING, State.PENDING_PAUSED));
+
+ /**
+ * The subset of states which are valid non-pending states while in a pending state.
+ *
+ * All other states should not be possible if in a PENDING_* state. Pending states are
+ * meant to be transient states that occur while waiting for another operation to finish.
+ */
+ private static final Set VALID_NON_PENDING_STATES_WHILE_PENDING =
+ Collections.unmodifiableSet(EnumSet.of(
+ State.CONFIGURING, // Waiting for camera before starting recording.
+ State.IDLING, // Waiting for sequential executor to start pending recording.
+ State.RESETTING, // Waiting for camera/encoders to reset before starting.
+ State.STOPPING, // Waiting for previous recording to finalize before starting.
+ State.ERROR // Waiting for re-initialization before starting.
+ ));
+
+ /**
+ * Default quality selector for recordings.
+ *
+ * The default quality selector chooses a video quality suitable for recordings based on
+ * device and compatibility constraints. It is equivalent to:
+ *
{@code
+ * QualitySelector.fromOrderedList(Arrays.asList(Quality.FHD, Quality.HD, Quality.SD),
+ * FallbackStrategy.higherQualityOrLowerThan(Quality.FHD));
+ * }
+ *
+ * @see QualitySelector
+ */
+ public static final QualitySelector DEFAULT_QUALITY_SELECTOR =
+ QualitySelector.fromOrderedList(Arrays.asList(Quality.FHD, Quality.HD, Quality.SD),
+ FallbackStrategy.higherQualityOrLowerThan(Quality.FHD));
+
+ private static final VideoSpec VIDEO_SPEC_DEFAULT =
+ VideoSpec.builder()
+ .setQualitySelector(DEFAULT_QUALITY_SELECTOR)
+ .setAspectRatio(AspectRatio.RATIO_DEFAULT)
+ .build();
+ private static final MediaSpec MEDIA_SPEC_DEFAULT =
+ MediaSpec.builder()
+ .setOutputFormat(MediaSpec.OUTPUT_FORMAT_AUTO)
+ .setVideoSpec(VIDEO_SPEC_DEFAULT)
+ .build();
+ @SuppressWarnings("deprecation")
+ private static final String MEDIA_COLUMN = MediaStore.Video.Media.DATA;
+ private static final Exception PENDING_RECORDING_ERROR_CAUSE_SOURCE_INACTIVE =
+ new RuntimeException("The video frame producer became inactive before any "
+ + "data was received.");
+ private static final int PENDING = 1;
+ private static final int NOT_PENDING = 0;
+ private static final long SOURCE_NON_STREAMING_TIMEOUT_MS = 1000L;
+ // The audio data is expected to be less than 1 kB, the value of the cache size is used to limit
+ // the memory used within an acceptable range.
+ private static final int AUDIO_CACHE_SIZE = 60;
+ @VisibleForTesting
+ static final EncoderFactory DEFAULT_ENCODER_FACTORY = EncoderImpl::new;
+ private static final Executor AUDIO_EXECUTOR =
+ CameraXExecutors.newSequentialExecutor(CameraXExecutors.ioExecutor());
+
+ private final MutableStateObservable mStreamInfo;
+ // Used only by getExecutor()
+ private final Executor mUserProvidedExecutor;
+ // May be equivalent to mUserProvidedExecutor or an internal executor if the user did not
+ // provide an executor.
+ private final Executor mExecutor;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ final Executor mSequentialExecutor;
+ private final EncoderFactory mVideoEncoderFactory;
+ private final EncoderFactory mAudioEncoderFactory;
+ private final Object mLock = new Object();
+ private final boolean mEncoderNotUsePersistentInputSurface = DeviceQuirks.get(
+ EncoderNotUsePersistentInputSurfaceQuirk.class) != null;
+
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+ // Members only accessed when holding mLock //
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+ @GuardedBy("mLock")
+ private State mState = State.CONFIGURING;
+ // Tracks the underlying state when in a PENDING_* state. When not in a PENDING_* state, this
+ // should be null.
+ @GuardedBy("mLock")
+ private State mNonPendingState = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @GuardedBy("mLock")
+ int mStreamId = StreamInfo.STREAM_ID_ANY;
+ @GuardedBy("mLock")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ RecordingRecord mActiveRecordingRecord = null;
+ // A recording that will be started once the previous recording has finalized or the
+ // recorder has finished initializing.
+ @GuardedBy("mLock")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ RecordingRecord mPendingRecordingRecord = null;
+ @GuardedBy("mLock")
+ private long mLastGeneratedRecordingId = 0L;
+ //--------------------------------------------------------------------------------------------//
+
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+ // Members only accessed on mSequentialExecutor //
+ ////////////////////////////////////////////////////////////////////////////////////////////////
+ private RecordingRecord mInProgressRecording = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ boolean mInProgressRecordingStopping = false;
+ private SurfaceRequest.TransformationInfo mSurfaceTransformationInfo = null;
+ private CamcorderProfileProxy mResolvedCamcorderProfile = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ final List> mEncodingFutures = new ArrayList<>();
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Integer mAudioTrackIndex = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Integer mVideoTrackIndex = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ SurfaceRequest mLatestSurfaceRequest;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Timebase mVideoSourceTimebase;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Surface mLatestSurface = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Surface mActiveSurface = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ MediaMuxer mMediaMuxer = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ final MutableStateObservable mMediaSpec;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ AudioSource mAudioSource = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Encoder mVideoEncoder = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ OutputConfig mVideoOutputConfig = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Encoder mAudioEncoder = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ OutputConfig mAudioOutputConfig = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ AudioState mAudioState = AudioState.INITIALIZING;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @NonNull
+ Uri mOutputUri = Uri.EMPTY;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mRecordingBytes = 0L;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mRecordingDurationNs = 0L;
+ @VisibleForTesting
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mFirstRecordingVideoDataTimeUs = Long.MAX_VALUE;
+ @VisibleForTesting
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ int mFirstRecordingVideoBitrate = 0;
+ @VisibleForTesting
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Range mVideoEncoderBitrateRange = null;
+ @VisibleForTesting
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mFirstRecordingAudioDataTimeUs = Long.MAX_VALUE;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mPreviousRecordingVideoDataTimeUs = Long.MAX_VALUE;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mPreviousRecordingAudioDataTimeUs = Long.MAX_VALUE;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mFileSizeLimitInBytes = OutputOptions.FILE_SIZE_UNLIMITED;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ long mDurationLimitNs = OutputOptions.DURATION_UNLIMITED;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @VideoRecordError
+ int mRecordingStopError = ERROR_UNKNOWN;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Throwable mRecordingStopErrorCause = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ EncodedData mPendingFirstVideoData = null;
+ // A cache that hold audio data created before the muxer starts to prevent A/V out of sync in
+ // the beginning of the recording.
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @NonNull
+ final RingBuffer mPendingAudioRingBuffer = new ArrayRingBuffer<>(
+ AUDIO_CACHE_SIZE);
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ Throwable mAudioErrorCause = null;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ boolean mIsAudioSourceSilenced = false;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ SourceState mSourceState = SourceState.INACTIVE;
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ ScheduledFuture> mSourceNonStreamingTimeout = null;
+ // The Recorder has to be reset first before being configured again.
+ private boolean mNeedsReset = false;
+ @NonNull
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ VideoEncoderSession mVideoEncoderSession;
+ @Nullable
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ VideoEncoderSession mVideoEncoderSessionToRelease = null;
+ //--------------------------------------------------------------------------------------------//
+
+ Recorder(@Nullable Executor executor, @NonNull MediaSpec mediaSpec,
+ @NonNull EncoderFactory videoEncoderFactory,
+ @NonNull EncoderFactory audioEncoderFactory) {
+ mUserProvidedExecutor = executor;
+ mExecutor = executor != null ? executor : CameraXExecutors.ioExecutor();
+ mSequentialExecutor = CameraXExecutors.newSequentialExecutor(mExecutor);
+
+ mMediaSpec = MutableStateObservable.withInitialState(composeRecorderMediaSpec(mediaSpec));
+ mStreamInfo = MutableStateObservable.withInitialState(
+ StreamInfo.of(mStreamId, internalStateToStreamState(mState)));
+ mVideoEncoderFactory = videoEncoderFactory;
+ mAudioEncoderFactory = audioEncoderFactory;
+ mVideoEncoderSession =
+ new VideoEncoderSession(mVideoEncoderFactory, mSequentialExecutor, mExecutor);
+ }
+
+ @Override
+ public void onSurfaceRequested(@NonNull SurfaceRequest request) {
+ onSurfaceRequested(request, Timebase.UPTIME);
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @Override
+ public void onSurfaceRequested(@NonNull SurfaceRequest request, @NonNull Timebase timebase) {
+ synchronized (mLock) {
+ Logger.d(TAG, "Surface is requested in state: " + mState + ", Current surface: "
+ + mStreamId);
+ if (mState == State.ERROR) {
+ setState(State.CONFIGURING);
+ }
+ }
+ mSequentialExecutor.execute(() -> onSurfaceRequestedInternal(request, timebase));
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @Override
+ @NonNull
+ public Observable getMediaSpec() {
+ return mMediaSpec;
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @Override
+ @NonNull
+ public Observable getStreamInfo() {
+ return mStreamInfo;
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @Override
+ public void onSourceStateChanged(@NonNull SourceState newState) {
+ mSequentialExecutor.execute(() -> onSourceStateChangedInternal(newState));
+ }
+
+ /**
+ * Prepares a recording that will be saved to a {@link File}.
+ *
+ * The provided {@link FileOutputOptions} specifies the file to use.
+ *
+ *
Calling this method multiple times will generate multiple {@link PendingRecording}s,
+ * each of the recordings can be used to adjust per-recording settings individually. The
+ * recording will not begin until {@link PendingRecording#start(Executor, Consumer)} is called.
+ * Only a single pending recording can be started per {@link Recorder} instance.
+ *
+ * @param context the context used to enforce runtime permissions, interface with the media
+ * scanner service, and attribute access to permission protected data, such as
+ * audio. If using this context to audit audio
+ * access on API level 31+, a context created with
+ * {@link Context#createAttributionContext(String)} should be used.
+ * @param fileOutputOptions the options that configures how the output will be handled.
+ * @return a {@link PendingRecording} that is associated with this Recorder.
+ * @see FileOutputOptions
+ */
+ @NonNull
+ public PendingRecording prepareRecording(@NonNull Context context,
+ @NonNull FileOutputOptions fileOutputOptions) {
+ return prepareRecordingInternal(context, fileOutputOptions);
+ }
+
+ /**
+ * Prepares a recording that will be saved to a {@link ParcelFileDescriptor}.
+ *
+ *
The provided {@link FileDescriptorOutputOptions} specifies the
+ * {@link ParcelFileDescriptor} to use.
+ *
+ *
Currently, file descriptors as output destinations are not supported on pre-Android O
+ * (API 26) devices.
+ *
+ *
Calling this method multiple times will generate multiple {@link PendingRecording}s,
+ * each of the recordings can be used to adjust per-recording settings individually. The
+ * recording will not begin until {@link PendingRecording#start(Executor, Consumer)} is called.
+ * Only a single pending recording can be started per {@link Recorder} instance.
+ *
+ * @param context the context used to enforce runtime permissions, interface with the media
+ * scanner service, and attribute access to permission protected data, such as
+ * audio. If using this context to audit audio
+ * access on API level 31+, a context created with
+ * {@link Context#createAttributionContext(String)} should be used.
+ * @param fileDescriptorOutputOptions the options that configures how the output will be
+ * handled.
+ * @return a {@link PendingRecording} that is associated with this Recorder.
+ * @throws UnsupportedOperationException if this method is called on per-Android O (API 26)
+ * devices.
+ * @see FileDescriptorOutputOptions
+ */
+ @RequiresApi(26)
+ @NonNull
+ public PendingRecording prepareRecording(@NonNull Context context,
+ @NonNull FileDescriptorOutputOptions fileDescriptorOutputOptions) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) {
+ throw new UnsupportedOperationException(
+ "File descriptors as output destinations are not supported on pre-Android O "
+ + "(API 26) devices.");
+ }
+ return prepareRecordingInternal(context, fileDescriptorOutputOptions);
+ }
+
+ /**
+ * Prepares a recording that will be saved to a {@link MediaStore}.
+ *
+ *
The provided {@link MediaStoreOutputOptions} specifies the options which will be used
+ * to save the recording to a {@link MediaStore}.
+ *
+ *
Calling this method multiple times will generate multiple {@link PendingRecording}s,
+ * each of the recordings can be used to adjust per-recording settings individually. The
+ * recording will not begin until {@link PendingRecording#start(Executor, Consumer)} is called.
+ * Only a single pending recording can be started per {@link Recorder} instance.
+ *
+ * @param context the context used to enforce runtime permissions, interface with the media
+ * scanner service, and attribute access to permission protected data, such as
+ * audio. If using this context to audit audio
+ * access on API level 31+, a context created with
+ * {@link Context#createAttributionContext(String)} should be used.
+ * @param mediaStoreOutputOptions the options that configures how the output will be handled.
+ * @return a {@link PendingRecording} that is associated with this Recorder.
+ * @see MediaStoreOutputOptions
+ */
+ @NonNull
+ public PendingRecording prepareRecording(@NonNull Context context,
+ @NonNull MediaStoreOutputOptions mediaStoreOutputOptions) {
+ return prepareRecordingInternal(context, mediaStoreOutputOptions);
+ }
+
+ @NonNull
+ private PendingRecording prepareRecordingInternal(@NonNull Context context,
+ @NonNull OutputOptions options) {
+ Preconditions.checkNotNull(options, "The OutputOptions cannot be null.");
+ return new PendingRecording(context, this, options);
+ }
+
+ /**
+ * Gets the quality selector of this Recorder.
+ *
+ * @return the {@link QualitySelector} provided to
+ * {@link Builder#setQualitySelector(QualitySelector)} on the builder used to create this
+ * recorder, or the default value of {@link Recorder#DEFAULT_QUALITY_SELECTOR} if no quality
+ * selector was provided.
+ */
+ @NonNull
+ public QualitySelector getQualitySelector() {
+ return getObservableData(mMediaSpec).getVideoSpec().getQualitySelector();
+ }
+
+ /**
+ * Gets the audio source of this Recorder.
+ *
+ * @return the value provided to {@link Builder#setAudioSource(int)} on the builder used to
+ * create this recorder, or the default value of {@link AudioSpec#SOURCE_AUTO} if no source was
+ * set.
+ */
+ @AudioSpec.Source
+ int getAudioSource() {
+ return getObservableData(mMediaSpec).getAudioSpec().getSource();
+ }
+
+ /**
+ * Returns the executor provided to the builder for this recorder.
+ *
+ * @return the {@link Executor} provided to {@link Builder#setExecutor(Executor)} on the
+ * builder used to create this recorder. If no executor was provided, returns {code null}.
+ */
+ @Nullable
+ public Executor getExecutor() {
+ return mUserProvidedExecutor;
+ }
+
+ /**
+ * Gets the target video encoding bitrate of this Recorder.
+ *
+ * @return the value provided to {@link Builder#setTargetVideoEncodingBitRate(int)} on the
+ * builder used to create this recorder. Returns 0, if
+ * {@link Builder#setTargetVideoEncodingBitRate(int)} is not called.
+ */
+ public int getTargetVideoEncodingBitRate() {
+ return getObservableData(mMediaSpec).getVideoSpec().getBitrate().getLower();
+ }
+
+ /**
+ * Gets the aspect ratio of this Recorder.
+ *
+ * @return the value from {@link Builder#setAspectRatio(int)} or
+ * {@link AspectRatio#RATIO_DEFAULT} if not set.
+ */
+ @AspectRatio.Ratio
+ public int getAspectRatio() {
+ return getObservableData(mMediaSpec).getVideoSpec().getAspectRatio();
+ }
+
+ /**
+ * Starts a pending recording and returns an active recording instance.
+ *
+ *
If the Recorder is already running a recording, an {@link IllegalStateException} will
+ * be thrown when calling this method.
+ *
+ *
If the video encoder hasn't been setup with {@link #onSurfaceRequested(SurfaceRequest)}
+ * , the {@link PendingRecording} specified will be started once the video encoder setup
+ * completes. The recording will be considered active, so before it's finalized, an
+ * {@link IllegalStateException} will be thrown if this method is called for a second time.
+ *
+ *
If the video producer stops sending frames to the provided surface, the recording will
+ * be automatically finalized with {@link VideoRecordEvent.Finalize#ERROR_SOURCE_INACTIVE}.
+ * This can happen, for example, when the {@link VideoCapture} this Recorder is associated
+ * with is detached from the camera.
+ *
+ * @throws IllegalStateException if there's an active recording, or the audio is
+ * {@link PendingRecording#withAudioEnabled() enabled} for the
+ * recording but
+ * {@link android.Manifest.permission#RECORD_AUDIO} is not
+ * granted.
+ */
+ @NonNull
+ Recording start(@NonNull PendingRecording pendingRecording) {
+ Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
+ RecordingRecord alreadyInProgressRecording = null;
+ @VideoRecordError int error = ERROR_NONE;
+ Throwable errorCause = null;
+ long recordingId;
+ synchronized (mLock) {
+ recordingId = ++mLastGeneratedRecordingId;
+ switch (mState) {
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ alreadyInProgressRecording = mActiveRecordingRecord;
+ break;
+ case PENDING_PAUSED:
+ // Fall-through
+ case PENDING_RECORDING:
+ // There is already a recording pending that hasn't been stopped.
+ alreadyInProgressRecording =
+ Preconditions.checkNotNull(mPendingRecordingRecord);
+ break;
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ // Fall-through
+ case CONFIGURING:
+ // Fall-through
+ case ERROR:
+ // Fall-through
+ case IDLING:
+ if (mState == State.IDLING) {
+ Preconditions.checkState(
+ mActiveRecordingRecord == null
+ && mPendingRecordingRecord == null,
+ "Expected recorder to be idle but a recording is either "
+ + "pending or in progress.");
+ }
+ try {
+ RecordingRecord recordingRecord = RecordingRecord.from(pendingRecording,
+ recordingId);
+ recordingRecord.initializeRecording(
+ pendingRecording.getApplicationContext());
+ mPendingRecordingRecord = recordingRecord;
+ if (mState == State.IDLING) {
+ setState(State.PENDING_RECORDING);
+ mSequentialExecutor.execute(this::tryServicePendingRecording);
+ } else if (mState == State.ERROR) {
+ setState(State.PENDING_RECORDING);
+ // Retry initialization.
+ mSequentialExecutor.execute(() -> {
+ if (mLatestSurfaceRequest == null) {
+ throw new AssertionError(
+ "surface request is required to retry "
+ + "initialization.");
+ }
+ configureInternal(mLatestSurfaceRequest, mVideoSourceTimebase);
+ });
+ } else {
+ setState(State.PENDING_RECORDING);
+ // The recording will automatically start once the initialization
+ // completes.
+ }
+ } catch (IOException e) {
+ error = ERROR_INVALID_OUTPUT_OPTIONS;
+ errorCause = e;
+ }
+ break;
+ }
+ }
+
+ if (alreadyInProgressRecording != null) {
+ throw new IllegalStateException("A recording is already in progress. Previous "
+ + "recordings must be stopped before a new recording can be started.");
+ } else if (error != ERROR_NONE) {
+ Logger.e(TAG,
+ "Recording was started when the Recorder had encountered error " + errorCause);
+ // Immediately update the listener if the Recorder encountered an error.
+ finalizePendingRecording(RecordingRecord.from(pendingRecording, recordingId),
+ error, errorCause);
+ return Recording.createFinalizedFrom(pendingRecording, recordingId);
+ }
+
+ return Recording.from(pendingRecording, recordingId);
+ }
+
+ void pause(@NonNull Recording activeRecording) {
+ synchronized (mLock) {
+ if (!isSameRecording(activeRecording, mPendingRecordingRecord) && !isSameRecording(
+ activeRecording, mActiveRecordingRecord)) {
+ // If this Recording is no longer active, log and treat as a no-op.
+ // This is not technically an error since the recording can be finalized
+ // asynchronously.
+ Logger.d(TAG,
+ "pause() called on a recording that is no longer active: "
+ + activeRecording.getOutputOptions());
+ return;
+ }
+
+ switch (mState) {
+ case PENDING_RECORDING:
+ // The recording will automatically pause once the initialization completes.
+ setState(State.PENDING_PAUSED);
+ break;
+ case CONFIGURING:
+ // Fall-through
+ case IDLING:
+ throw new IllegalStateException("Called pause() from invalid state: " + mState);
+ case RECORDING:
+ setState(State.PAUSED);
+ RecordingRecord finalActiveRecordingRecord = mActiveRecordingRecord;
+ mSequentialExecutor.execute(() -> pauseInternal(finalActiveRecordingRecord));
+ break;
+ case PENDING_PAUSED:
+ // Fall-through
+ case PAUSED:
+ // No-op when the recording is already paused.
+ break;
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ // If recorder is resetting or stopping, then pause is a no-op.
+ break;
+ case ERROR:
+ // In an error state, the recording will already be finalized. Treat as a
+ // no-op in pause()
+ break;
+ }
+ }
+ }
+
+ void resume(@NonNull Recording activeRecording) {
+ synchronized (mLock) {
+ if (!isSameRecording(activeRecording, mPendingRecordingRecord) && !isSameRecording(
+ activeRecording, mActiveRecordingRecord)) {
+ // If this Recording is no longer active, log and treat as a no-op.
+ // This is not technically an error since the recording can be finalized
+ // asynchronously.
+ Logger.d(TAG,
+ "resume() called on a recording that is no longer active: "
+ + activeRecording.getOutputOptions());
+ return;
+ }
+ switch (mState) {
+ case PENDING_PAUSED:
+ // The recording will automatically start once the initialization completes.
+ setState(State.PENDING_RECORDING);
+ break;
+ case CONFIGURING:
+ // Should not be able to resume when initializing. Should be in a PENDING state.
+ // Fall-through
+ case IDLING:
+ throw new IllegalStateException("Called resume() from invalid state: "
+ + mState);
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ // If recorder is stopping or resetting, then resume is a no-op.
+ // Fall-through
+ case PENDING_RECORDING:
+ // Fall-through
+ case RECORDING:
+ // No-op when the recording is running.
+ break;
+ case PAUSED:
+ setState(State.RECORDING);
+ RecordingRecord finalActiveRecordingRecord = mActiveRecordingRecord;
+ mSequentialExecutor.execute(() -> resumeInternal(finalActiveRecordingRecord));
+ break;
+ case ERROR:
+ // In an error state, the recording will already be finalized. Treat as a
+ // no-op in resume()
+ break;
+ }
+ }
+ }
+
+ void stop(@NonNull Recording activeRecording) {
+ RecordingRecord pendingRecordingToFinalize = null;
+ synchronized (mLock) {
+ if (!isSameRecording(activeRecording, mPendingRecordingRecord) && !isSameRecording(
+ activeRecording, mActiveRecordingRecord)) {
+ // If this Recording is no longer active, log and treat as a no-op.
+ // This is not technically an error since the recording can be finalized
+ // asynchronously.
+ Logger.d(TAG,
+ "stop() called on a recording that is no longer active: "
+ + activeRecording.getOutputOptions());
+ return;
+ }
+ switch (mState) {
+ case PENDING_RECORDING:
+ // Fall-through
+ case PENDING_PAUSED:
+ // Immediately finalize pending recording since it never started.
+ Preconditions.checkState(isSameRecording(activeRecording,
+ mPendingRecordingRecord));
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ restoreNonPendingState(); // Equivalent to setState(mNonPendingState)
+ break;
+ case STOPPING:
+ // Fall-through
+ case RESETTING:
+ // We are already resetting, likely due to an error that stopped the recording.
+ // Ensure this is the current active recording and treat as a no-op. The
+ // active recording will be cleared once stop/reset is complete.
+ Preconditions.checkState(isSameRecording(activeRecording,
+ mActiveRecordingRecord));
+ break;
+ case CONFIGURING:
+ // Fall-through
+ case IDLING:
+ throw new IllegalStateException("Calling stop() while idling or initializing "
+ + "is invalid.");
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ setState(State.STOPPING);
+ long explicitlyStopTimeUs = TimeUnit.NANOSECONDS.toMicros(System.nanoTime());
+ RecordingRecord finalActiveRecordingRecord = mActiveRecordingRecord;
+ mSequentialExecutor.execute(() -> stopInternal(finalActiveRecordingRecord,
+ explicitlyStopTimeUs, ERROR_NONE, null));
+ break;
+ case ERROR:
+ // In an error state, the recording will already be finalized. Treat as a
+ // no-op in stop()
+ break;
+ }
+ }
+
+ if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, ERROR_NO_VALID_DATA,
+ new RuntimeException("Recording was stopped before any data could be "
+ + "produced."));
+ }
+ }
+
+ private void finalizePendingRecording(@NonNull RecordingRecord recordingToFinalize,
+ @VideoRecordError int error, @Nullable Throwable cause) {
+ recordingToFinalize.finalizeRecording(Uri.EMPTY);
+ recordingToFinalize.updateVideoRecordEvent(
+ VideoRecordEvent.finalizeWithError(
+ recordingToFinalize.getOutputOptions(),
+ RecordingStats.of(/*duration=*/0L,
+ /*bytes=*/0L,
+ AudioStats.of(AudioStats.AUDIO_STATE_DISABLED, mAudioErrorCause)),
+ OutputResults.of(Uri.EMPTY),
+ error,
+ cause));
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void onSurfaceRequestedInternal(@NonNull SurfaceRequest request,
+ @NonNull Timebase timebase) {
+ if (mLatestSurfaceRequest != null && !mLatestSurfaceRequest.isServiced()) {
+ mLatestSurfaceRequest.willNotProvideSurface();
+ }
+ configureInternal(mLatestSurfaceRequest = request, mVideoSourceTimebase = timebase);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ void onSourceStateChangedInternal(@NonNull SourceState newState) {
+ SourceState oldState = mSourceState;
+ mSourceState = newState;
+ if (oldState != newState) {
+ Logger.d(TAG, "Video source has transitioned to state: " + newState);
+ } else {
+ Logger.d(TAG, "Video source transitions to the same state: " + newState);
+ return;
+ }
+
+ if (newState == SourceState.INACTIVE) {
+ if (mActiveSurface == null) {
+ // If we're inactive and have no active surface, we'll reset the encoder directly.
+ // Otherwise, we'll wait for the active surface's surface request listener to
+ // reset the encoder.
+ requestReset(ERROR_SOURCE_INACTIVE, null);
+ } else {
+ // The source becomes inactive, the incoming new surface request has to be cached
+ // and be serviced after the Recorder is reset when receiving the previous
+ // surface request complete callback.
+ mNeedsReset = true;
+ if (mInProgressRecording != null) {
+ // Stop any in progress recording with "source inactive" error
+ onInProgressRecordingInternalError(mInProgressRecording, ERROR_SOURCE_INACTIVE,
+ null);
+ }
+ }
+ } else if (newState == SourceState.ACTIVE_NON_STREAMING) {
+ // We are expecting the source to transition to NON_STREAMING state.
+ if (mSourceNonStreamingTimeout != null && mSourceNonStreamingTimeout.cancel(false)
+ && mVideoEncoder != null) {
+ notifyEncoderSourceStopped(mVideoEncoder);
+ }
+ }
+ }
+
+ /**
+ * Requests the Recorder to be reset.
+ *
+ *
If a recording is in progress, it will be stopped asynchronously and reset once it has
+ * been finalized.
+ *
+ *
The Recorder is expected to be reset when there's no active surface. Otherwise, wait for
+ * the surface request complete callback first.
+ */
+ @ExecutedBy("mSequentialExecutor")
+ void requestReset(@VideoRecordError int errorCode, @Nullable Throwable errorCause) {
+ boolean shouldReset = false;
+ boolean shouldStop = false;
+ synchronized (mLock) {
+ switch (mState) {
+ case PENDING_RECORDING:
+ // Fall-through
+ case PENDING_PAUSED:
+ // Fall-through
+ shouldReset = true;
+ updateNonPendingState(State.RESETTING);
+ break;
+ case ERROR:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case CONFIGURING:
+ shouldReset = true;
+ break;
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ if (mActiveRecordingRecord != mInProgressRecording) {
+ throw new AssertionError("In-progress recording does not match the active"
+ + " recording. Unable to reset encoder.");
+ }
+ // If there's an active recording, stop it first then release the resources
+ // at onRecordingFinalized().
+ shouldStop = true;
+ // Fall-through
+ case STOPPING:
+ // Already stopping. Set state to RESETTING so resources will be released once
+ // onRecordingFinalized() runs.
+ setState(State.RESETTING);
+ break;
+ case RESETTING:
+ // No-Op, the Recorder is already being reset.
+ break;
+ }
+ }
+
+ // These calls must not be posted to the executor to ensure they are executed inline on
+ // the sequential executor and the state changes above are correctly handled.
+ if (shouldReset) {
+ reset();
+ } else if (shouldStop) {
+ stopInternal(mInProgressRecording, Encoder.NO_TIMESTAMP, errorCode, errorCause);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+
+ private void configureInternal(@NonNull SurfaceRequest surfaceRequest,
+ @NonNull Timebase videoSourceTimebase) {
+ if (surfaceRequest.isServiced()) {
+ Logger.w(TAG, "Ignore the SurfaceRequest since it is already served.");
+ return;
+ }
+ surfaceRequest.setTransformationInfoListener(mSequentialExecutor,
+ (transformationInfo) -> mSurfaceTransformationInfo = transformationInfo);
+ Size surfaceSize = surfaceRequest.getResolution();
+ // Fetch and cache nearest camcorder profile, if one exists.
+ VideoCapabilities capabilities =
+ VideoCapabilities.from(surfaceRequest.getCamera().getCameraInfo());
+ Quality highestSupportedQuality = capabilities.findHighestSupportedQualityFor(surfaceSize);
+ Logger.d(TAG, "Using supported quality of " + highestSupportedQuality
+ + " for surface size " + surfaceSize);
+ if (highestSupportedQuality != Quality.NONE) {
+ mResolvedCamcorderProfile = capabilities.getProfile(highestSupportedQuality);
+ if (mResolvedCamcorderProfile == null) {
+ throw new AssertionError("Camera advertised available quality but did not "
+ + "produce CamcorderProfile for advertised quality.");
+ }
+ }
+ setupVideo(surfaceRequest, videoSourceTimebase);
+ }
+
+ @SuppressWarnings("ObjectToString")
+ @ExecutedBy("mSequentialExecutor")
+ private void setupVideo(@NonNull SurfaceRequest request, @NonNull Timebase timebase) {
+ safeToCloseVideoEncoder().addListener(() -> {
+ if (request.isServiced() || mVideoEncoderSession.isConfiguredSurfaceRequest(request)) {
+ Logger.w(TAG, "Ignore the SurfaceRequest " + request + " isServiced: "
+ + request.isServiced() + " VideoEncoderSession: " + mVideoEncoderSession);
+ return;
+ }
+ VideoEncoderSession videoEncoderSession =
+ new VideoEncoderSession(mVideoEncoderFactory, mSequentialExecutor, mExecutor);
+ MediaSpec mediaSpec = getObservableData(mMediaSpec);
+ ListenableFuture configureFuture =
+ videoEncoderSession.configure(request, timebase, mediaSpec,
+ mResolvedCamcorderProfile);
+ mVideoEncoderSession = videoEncoderSession;
+ Futures.addCallback(configureFuture, new FutureCallback() {
+ @Override
+ public void onSuccess(@Nullable Encoder result) {
+ Logger.d(TAG, "VideoEncoder is created. " + result);
+ if (result == null) {
+ return;
+ }
+ Preconditions.checkState(mVideoEncoderSession == videoEncoderSession);
+ Preconditions.checkState(mVideoEncoder == null);
+ onVideoEncoderReady(videoEncoderSession);
+ onConfigured();
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ Logger.d(TAG, "VideoEncoder Setup error: " + t);
+ onEncoderSetupError(t);
+ }
+ }, mSequentialExecutor);
+ }, mSequentialExecutor);
+ }
+
+ @NonNull
+ @ExecutedBy("mSequentialExecutor")
+ private ListenableFuture safeToCloseVideoEncoder() {
+ Logger.d(TAG, "Try to safely release video encoder: " + mVideoEncoder);
+ return mVideoEncoderSession.signalTermination();
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void onVideoEncoderReady(@NonNull VideoEncoderSession videoEncoderSession) {
+ mVideoEncoder = videoEncoderSession.getVideoEncoder();
+ mVideoEncoderBitrateRange =
+ ((VideoEncoderInfo) mVideoEncoder.getEncoderInfo()).getSupportedBitrateRange();
+ mFirstRecordingVideoBitrate = mVideoEncoder.getConfiguredBitrate();
+ mActiveSurface = videoEncoderSession.getActiveSurface();
+ setLatestSurface(mActiveSurface);
+
+ videoEncoderSession.setOnSurfaceUpdateListener(mSequentialExecutor, this::setLatestSurface);
+
+ Futures.addCallback(videoEncoderSession.getReadyToReleaseFuture(),
+ new FutureCallback() {
+ @Override
+ public void onSuccess(@Nullable Encoder result) {
+ Logger.d(TAG, "VideoEncoder can be released: " + result);
+ if (result == null) {
+ return;
+ }
+ if (mSourceNonStreamingTimeout != null
+ && mSourceNonStreamingTimeout.cancel(false)
+ && mVideoEncoder != null && mVideoEncoder == result) {
+ notifyEncoderSourceStopped(mVideoEncoder);
+ }
+
+ mVideoEncoderSessionToRelease = videoEncoderSession;
+ setLatestSurface(null);
+ requestReset(ERROR_SOURCE_INACTIVE, null);
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ Logger.d(TAG, "Error in ReadyToReleaseFuture: " + t);
+ }
+ }, mSequentialExecutor);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void onConfigured() {
+ RecordingRecord recordingToStart = null;
+ RecordingRecord pendingRecordingToFinalize = null;
+ @VideoRecordError int error = ERROR_NONE;
+ Throwable errorCause = null;
+ boolean startRecordingPaused = false;
+ synchronized (mLock) {
+ switch (mState) {
+ case IDLING:
+ // Fall-through
+ case RECORDING:
+ // Fall-through
+ case PAUSED:
+ // Fall-through
+ case RESETTING:
+ throw new AssertionError(
+ "Incorrectly invoke onConfigured() in state " + mState);
+ case STOPPING:
+ if (!mEncoderNotUsePersistentInputSurface) {
+ throw new AssertionError("Unexpectedly invoke onConfigured() in a "
+ + "STOPPING state when it's not waiting for a new surface.");
+ }
+ break;
+ case CONFIGURING:
+ setState(State.IDLING);
+ break;
+ case ERROR:
+ Logger.e(TAG,
+ "onConfigured() was invoked when the Recorder had encountered error");
+ break;
+ case PENDING_PAUSED:
+ startRecordingPaused = true;
+ // Fall through
+ case PENDING_RECORDING:
+ if (mActiveRecordingRecord != null) {
+ // Active recording is still finalizing. Pending recording will be
+ // serviced in onRecordingFinalized().
+ break;
+ }
+ if (mSourceState == SourceState.INACTIVE) {
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ restoreNonPendingState(); // Equivalent to setState(mNonPendingState)
+ error = ERROR_SOURCE_INACTIVE;
+ errorCause = PENDING_RECORDING_ERROR_CAUSE_SOURCE_INACTIVE;
+ } else {
+ recordingToStart = makePendingRecordingActiveLocked(mState);
+ }
+ break;
+ }
+ }
+
+ if (recordingToStart != null) {
+ // Start new active recording inline on sequential executor (but unlocked).
+ startRecording(recordingToStart, startRecordingPaused);
+ } else if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, error, errorCause);
+ }
+ }
+
+ @NonNull
+ private MediaSpec composeRecorderMediaSpec(@NonNull MediaSpec mediaSpec) {
+ MediaSpec.Builder mediaSpecBuilder = mediaSpec.toBuilder();
+
+ // Append default video configurations
+ VideoSpec videoSpec = mediaSpec.getVideoSpec();
+ if (videoSpec.getAspectRatio() == AspectRatio.RATIO_DEFAULT) {
+ mediaSpecBuilder.configureVideo(
+ builder -> builder.setAspectRatio(VIDEO_SPEC_DEFAULT.getAspectRatio()));
+ }
+
+ return mediaSpecBuilder.build();
+ }
+
+ private static boolean isSameRecording(@NonNull Recording activeRecording,
+ @Nullable RecordingRecord recordingRecord) {
+ if (recordingRecord == null) {
+ return false;
+ }
+
+ return activeRecording.getRecordingId() == recordingRecord.getRecordingId();
+ }
+
+ /**
+ * Setup audio related resources.
+ *
+ * @throws AudioSourceAccessException if the audio source failed to be setup.
+ * @throws InvalidConfigException if the audio encoder failed to be setup.
+ */
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ @ExecutedBy("mSequentialExecutor")
+ private void setupAudio(@NonNull RecordingRecord recordingToStart)
+ throws AudioSourceAccessException, InvalidConfigException {
+ MediaSpec mediaSpec = getObservableData(mMediaSpec);
+ // Resolve the audio mime info
+ MimeInfo audioMimeInfo = resolveAudioMimeInfo(mediaSpec, mResolvedCamcorderProfile);
+ Timebase audioSourceTimebase = Timebase.UPTIME;
+
+ // Select and create the audio source
+ AudioSource.Settings audioSourceSettings =
+ resolveAudioSourceSettings(audioMimeInfo, mediaSpec.getAudioSpec());
+ if (mAudioSource != null) {
+ releaseCurrentAudioSource();
+ }
+ // TODO: set audioSourceTimebase to AudioSource. Currently AudioSource hard code
+ // AudioTimestamp.TIMEBASE_MONOTONIC.
+ mAudioSource = setupAudioSource(recordingToStart, audioSourceSettings);
+ Logger.d(TAG, String.format("Set up new audio source: 0x%x", mAudioSource.hashCode()));
+
+ // Select and create the audio encoder
+ AudioEncoderConfig audioEncoderConfig = resolveAudioEncoderConfig(audioMimeInfo,
+ audioSourceTimebase, audioSourceSettings, mediaSpec.getAudioSpec());
+ mAudioEncoder = mAudioEncoderFactory.createEncoder(mExecutor, audioEncoderConfig);
+
+ // Connect the audio source to the audio encoder
+ Encoder.EncoderInput bufferProvider = mAudioEncoder.getInput();
+ if (!(bufferProvider instanceof Encoder.ByteBufferInput)) {
+ throw new AssertionError("The EncoderInput of audio isn't a ByteBufferInput.");
+ }
+ mAudioSource.setBufferProvider((Encoder.ByteBufferInput) bufferProvider);
+ }
+
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ @NonNull
+ private AudioSource setupAudioSource(@NonNull RecordingRecord recordingToStart,
+ @NonNull AudioSource.Settings audioSourceSettings)
+ throws AudioSourceAccessException {
+ return recordingToStart.performOneTimeAudioSourceCreation(audioSourceSettings,
+ AUDIO_EXECUTOR);
+ }
+
+ private void releaseCurrentAudioSource() {
+ if (mAudioSource == null) {
+ throw new AssertionError("Cannot release null audio source.");
+ }
+ AudioSource audioSource = mAudioSource;
+ mAudioSource = null;
+ Logger.d(TAG, String.format("Releasing audio source: 0x%x", audioSource.hashCode()));
+ // Run callback on direct executor since it is only logging
+ Futures.addCallback(audioSource.release(), new FutureCallback() {
+ @Override
+ public void onSuccess(@Nullable Void result) {
+ Logger.d(TAG, String.format("Released audio source successfully: 0x%x",
+ audioSource.hashCode()));
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ Logger.d(TAG, String.format("An error occurred while attempting to "
+ + "release audio source: 0x%x", audioSource.hashCode()));
+ }
+ }, CameraXExecutors.directExecutor());
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void onEncoderSetupError(@Nullable Throwable cause) {
+ RecordingRecord pendingRecordingToFinalize = null;
+ synchronized (mLock) {
+ switch (mState) {
+ case PENDING_PAUSED:
+ // Fall-through
+ case PENDING_RECORDING:
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ // Fall-through
+ case CONFIGURING:
+ setStreamId(StreamInfo.STREAM_ID_ERROR);
+ setState(State.ERROR);
+ break;
+ case ERROR:
+ // Already in an error state. Ignore new error.
+ break;
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ throw new AssertionError("Encountered encoder setup error while in unexpected"
+ + " state " + mState + ": " + cause);
+ }
+ }
+
+ if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, ERROR_RECORDER_ERROR, cause);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void setupAndStartMediaMuxer(@NonNull RecordingRecord recordingToStart) {
+ if (mMediaMuxer != null) {
+ throw new AssertionError("Unable to set up media muxer when one already exists.");
+ }
+
+ if (isAudioEnabled() && mPendingAudioRingBuffer.isEmpty()) {
+ throw new AssertionError("Audio is enabled but no audio sample is ready. Cannot start"
+ + " media muxer.");
+ }
+
+ if (mPendingFirstVideoData == null) {
+ throw new AssertionError("Media muxer cannot be started without an encoded video "
+ + "frame.");
+ }
+
+ try (EncodedData videoDataToWrite = mPendingFirstVideoData) {
+ mPendingFirstVideoData = null;
+ List audioDataToWrite = getAudioDataToWriteAndClearCache(
+ videoDataToWrite.getPresentationTimeUs()
+ );
+ // Make sure we can write the first audio and video data without hitting the file size
+ // limit. Otherwise we will be left with a malformed (empty) track on stop.
+ long firstDataSize = videoDataToWrite.size();
+ for (EncodedData data : audioDataToWrite) {
+ firstDataSize += data.size();
+ }
+ if (mFileSizeLimitInBytes != OutputOptions.FILE_SIZE_UNLIMITED
+ && firstDataSize > mFileSizeLimitInBytes) {
+ Logger.d(TAG,
+ String.format("Initial data exceeds file size limit %d > %d", firstDataSize,
+ mFileSizeLimitInBytes));
+ onInProgressRecordingInternalError(recordingToStart,
+ ERROR_FILE_SIZE_LIMIT_REACHED, null);
+ return;
+ }
+
+ MediaMuxer mediaMuxer;
+ try {
+ MediaSpec mediaSpec = getObservableData(mMediaSpec);
+ int muxerOutputFormat =
+ mediaSpec.getOutputFormat() == MediaSpec.OUTPUT_FORMAT_AUTO
+ ? supportedMuxerFormatOrDefaultFrom(mResolvedCamcorderProfile,
+ MediaSpec.outputFormatToMuxerFormat(
+ MEDIA_SPEC_DEFAULT.getOutputFormat()))
+ : MediaSpec.outputFormatToMuxerFormat(mediaSpec.getOutputFormat());
+ mediaMuxer = recordingToStart.performOneTimeMediaMuxerCreation(muxerOutputFormat,
+ uri -> mOutputUri = uri);
+ } catch (IOException e) {
+ onInProgressRecordingInternalError(recordingToStart, ERROR_INVALID_OUTPUT_OPTIONS,
+ e);
+ return;
+ }
+
+ if (mSurfaceTransformationInfo != null) {
+ mediaMuxer.setOrientationHint(mSurfaceTransformationInfo.getRotationDegrees());
+ }
+ Location location = recordingToStart.getOutputOptions().getLocation();
+ if (location != null) {
+ try {
+ Pair geoLocation =
+ CorrectNegativeLatLongForMediaMuxer.adjustGeoLocation(
+ location.getLatitude(), location.getLongitude());
+ mediaMuxer.setLocation((float) geoLocation.first.doubleValue(),
+ (float) geoLocation.second.doubleValue());
+ } catch (IllegalArgumentException e) {
+ mediaMuxer.release();
+ onInProgressRecordingInternalError(recordingToStart,
+ ERROR_INVALID_OUTPUT_OPTIONS, e);
+ return;
+ }
+ }
+
+ mVideoTrackIndex = mediaMuxer.addTrack(mVideoOutputConfig.getMediaFormat());
+ if (isAudioEnabled()) {
+ mAudioTrackIndex = mediaMuxer.addTrack(mAudioOutputConfig.getMediaFormat());
+ }
+ mediaMuxer.start();
+
+ // MediaMuxer is successfully initialized, transfer the ownership to Recorder.
+ mMediaMuxer = mediaMuxer;
+
+ // Write first data to ensure tracks are not empty
+ writeVideoData(videoDataToWrite, recordingToStart);
+ for (EncodedData data : audioDataToWrite) {
+ writeAudioData(data, recordingToStart);
+ }
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @NonNull
+ private List getAudioDataToWriteAndClearCache(long firstVideoDataTimeUs) {
+ List res = new ArrayList<>();
+
+ while (!mPendingAudioRingBuffer.isEmpty()) {
+ EncodedData data = mPendingAudioRingBuffer.dequeue();
+
+ // Add all audio data that has timestamp greater than or equal to the first video data
+ // timestamp.
+ if (data.getPresentationTimeUs() >= firstVideoDataTimeUs) {
+ res.add(data);
+ }
+ }
+
+ return res;
+ }
+
+ @SuppressLint("MissingPermission")
+ @ExecutedBy("mSequentialExecutor")
+ private void startInternal(@NonNull RecordingRecord recordingToStart) {
+ if (mInProgressRecording != null) {
+ throw new AssertionError("Attempted to start a new recording while another was in "
+ + "progress.");
+ }
+
+ if (recordingToStart.getOutputOptions().getFileSizeLimit() > 0) {
+ // Use %95 of the given file size limit as the criteria, which refers to the
+ // MPEG4Writer.cpp in libstagefright.
+ mFileSizeLimitInBytes = Math.round(
+ recordingToStart.getOutputOptions().getFileSizeLimit() * 0.95);
+ Logger.d(TAG, "File size limit in bytes: " + mFileSizeLimitInBytes);
+ } else {
+ mFileSizeLimitInBytes = OutputOptions.FILE_SIZE_UNLIMITED;
+ }
+
+ if (recordingToStart.getOutputOptions().getDurationLimitMillis() > 0) {
+ mDurationLimitNs = TimeUnit.MILLISECONDS.toNanos(
+ recordingToStart.getOutputOptions().getDurationLimitMillis());
+ Logger.d(TAG, "Duration limit in nanoseconds: " + mDurationLimitNs);
+ } else {
+ mDurationLimitNs = OutputOptions.DURATION_UNLIMITED;
+ }
+
+ mInProgressRecording = recordingToStart;
+
+ // Configure audio based on the current audio state.
+ switch (mAudioState) {
+ case ERROR_ENCODER:
+ // Fall-through
+ case ERROR_SOURCE:
+ // Fall-through
+ case ACTIVE:
+ // Fall-through
+ case DISABLED:
+ throw new AssertionError(
+ "Incorrectly invoke startInternal in audio state " + mAudioState);
+ case IDLING:
+ setAudioState(recordingToStart.hasAudioEnabled() ? AudioState.ACTIVE
+ : AudioState.DISABLED);
+ break;
+ case INITIALIZING:
+ if (recordingToStart.hasAudioEnabled()) {
+ if (!isAudioSupported()) {
+ throw new AssertionError(
+ "The Recorder doesn't support recording with audio");
+ }
+ try {
+ setupAudio(recordingToStart);
+ setAudioState(AudioState.ACTIVE);
+ } catch (AudioSourceAccessException | InvalidConfigException e) {
+ Logger.e(TAG, "Unable to create audio resource with error: ", e);
+ AudioState audioState;
+ if (e instanceof InvalidConfigException) {
+ audioState = AudioState.ERROR_ENCODER;
+ } else {
+ audioState = AudioState.ERROR_SOURCE;
+ }
+ setAudioState(audioState);
+ mAudioErrorCause = e;
+ }
+ }
+ break;
+ }
+
+ initEncoderAndAudioSourceCallbacks(recordingToStart);
+ if (isAudioEnabled()) {
+ mAudioSource.start();
+ mAudioEncoder.start();
+ }
+ mVideoEncoder.start();
+
+ mInProgressRecording.updateVideoRecordEvent(VideoRecordEvent.start(
+ mInProgressRecording.getOutputOptions(),
+ getInProgressRecordingStats()));
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void initEncoderAndAudioSourceCallbacks(@NonNull RecordingRecord recordingToStart) {
+ mEncodingFutures.add(CallbackToFutureAdapter.getFuture(
+ completer -> {
+ mVideoEncoder.setEncoderCallback(new EncoderCallback() {
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeStart() {
+ // No-op.
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeStop() {
+ completer.set(null);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeError(@NonNull EncodeException e) {
+ completer.setException(e);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodedData(@NonNull EncodedData encodedData) {
+ // If the media muxer doesn't yet exist, we may need to create and
+ // start it. Otherwise we can write the data.
+ if (mMediaMuxer == null) {
+ if (!mInProgressRecordingStopping) {
+ // Clear any previously pending video data since we now
+ // have newer data.
+ boolean cachedDataDropped = false;
+ if (mPendingFirstVideoData != null) {
+ cachedDataDropped = true;
+ mPendingFirstVideoData.close();
+ mPendingFirstVideoData = null;
+ }
+
+ if (encodedData.isKeyFrame()) {
+ // We have a keyframe. Cache it in case we need to wait
+ // for audio data.
+ mPendingFirstVideoData = encodedData;
+ // If first pending audio data exists or audio is
+ // disabled, we can start the muxer.
+ if (!isAudioEnabled()
+ || !mPendingAudioRingBuffer.isEmpty()) {
+ Logger.d(TAG, "Received video keyframe. Starting "
+ + "muxer...");
+ setupAndStartMediaMuxer(recordingToStart);
+ } else {
+ if (cachedDataDropped) {
+ Logger.d(TAG, "Replaced cached video keyframe "
+ + "with newer keyframe.");
+ } else {
+ Logger.d(TAG, "Cached video keyframe while we wait "
+ + "for first audio sample before starting "
+ + "muxer.");
+ }
+ }
+ } else {
+ // If the video data is not a key frame,
+ // MediaMuxer#writeSampleData will drop it. It will
+ // cause incorrect estimated record bytes and should
+ // be dropped.
+ if (cachedDataDropped) {
+ Logger.d(TAG, "Dropped cached keyframe since we have "
+ + "new video data and have not yet received "
+ + "audio data.");
+ }
+ Logger.d(TAG, "Dropped video data since muxer has not yet "
+ + "started and data is not a keyframe.");
+ mVideoEncoder.requestKeyFrame();
+ encodedData.close();
+ }
+ } else {
+ // Recording is stopping before muxer has been started.
+ Logger.d(TAG, "Drop video data since recording is stopping.");
+ encodedData.close();
+ }
+ } else {
+ // MediaMuxer is already started, write the data.
+ try (EncodedData videoDataToWrite = encodedData) {
+ writeVideoData(videoDataToWrite, recordingToStart);
+ }
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onOutputConfigUpdate(@NonNull OutputConfig outputConfig) {
+ mVideoOutputConfig = outputConfig;
+ }
+ }, mSequentialExecutor);
+ return "videoEncodingFuture";
+ }));
+
+ if (isAudioEnabled()) {
+ mEncodingFutures.add(CallbackToFutureAdapter.getFuture(
+ completer -> {
+ Consumer audioErrorConsumer = throwable -> {
+ if (mAudioErrorCause == null) {
+ // If the audio source or encoder encounters error, update the
+ // status event to notify users. Then continue recording without
+ // audio data.
+ if (throwable instanceof EncodeException) {
+ setAudioState(AudioState.ERROR_ENCODER);
+ } else {
+ setAudioState(AudioState.ERROR_SOURCE);
+ }
+ mAudioErrorCause = throwable;
+ updateInProgressStatusEvent();
+ completer.set(null);
+ }
+ };
+
+ mAudioSource.setAudioSourceCallback(mSequentialExecutor,
+ new AudioSource.AudioSourceCallback() {
+ @Override
+ public void onSilenced(boolean silenced) {
+ if (mIsAudioSourceSilenced != silenced) {
+ mIsAudioSourceSilenced = silenced;
+ mAudioErrorCause = silenced ? new IllegalStateException(
+ "The audio source has been silenced.") : null;
+ updateInProgressStatusEvent();
+ } else {
+ Logger.w(TAG, "Audio source silenced transitions"
+ + " to the same state " + silenced);
+ }
+ }
+
+ @Override
+ public void onError(@NonNull Throwable throwable) {
+ Logger.e(TAG, "Error occurred after audio source started.",
+ throwable);
+ if (throwable instanceof AudioSourceAccessException) {
+ audioErrorConsumer.accept(throwable);
+ }
+ }
+ });
+
+ mAudioEncoder.setEncoderCallback(new EncoderCallback() {
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeStart() {
+ // No-op.
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeStop() {
+ completer.set(null);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodeError(@NonNull EncodeException e) {
+ if (mAudioErrorCause == null) {
+ audioErrorConsumer.accept(e);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onEncodedData(@NonNull EncodedData encodedData) {
+ if (mAudioState == AudioState.DISABLED) {
+ throw new AssertionError(
+ "Audio is not enabled but audio encoded data is "
+ + "produced.");
+ }
+
+ // If the media muxer doesn't yet exist, we may need to create and
+ // start it. Otherwise we can write the data.
+ if (mMediaMuxer == null) {
+ if (!mInProgressRecordingStopping) {
+ // BufferCopiedEncodedData is used to copy the content of
+ // the encoded data, preventing byte buffers of the media
+ // codec from being occupied. Also, since the resources of
+ // BufferCopiedEncodedData will be automatically released
+ // by garbage collection, there is no need to call its
+ // close() function.
+ mPendingAudioRingBuffer.enqueue(
+ new BufferCopiedEncodedData(encodedData));
+
+ if (mPendingFirstVideoData != null) {
+ // Both audio and data are ready. Start the muxer.
+ Logger.d(TAG, "Received audio data. Starting muxer...");
+ setupAndStartMediaMuxer(recordingToStart);
+ } else {
+ Logger.d(TAG, "Cached audio data while we wait"
+ + " for video keyframe before starting muxer.");
+ }
+ } else {
+ // Recording is stopping before muxer has been started.
+ Logger.d(TAG,
+ "Drop audio data since recording is stopping.");
+ }
+ encodedData.close();
+ } else {
+ try (EncodedData audioDataToWrite = encodedData) {
+ writeAudioData(audioDataToWrite, recordingToStart);
+ }
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @Override
+ public void onOutputConfigUpdate(@NonNull OutputConfig outputConfig) {
+ mAudioOutputConfig = outputConfig;
+ }
+ }, mSequentialExecutor);
+ return "audioEncodingFuture";
+ }));
+ }
+
+ Futures.addCallback(Futures.allAsList(mEncodingFutures),
+ new FutureCallback>() {
+ @Override
+ public void onSuccess(@Nullable List result) {
+ Logger.d(TAG, "Encodings end successfully.");
+ finalizeInProgressRecording(mRecordingStopError, mRecordingStopErrorCause);
+ }
+
+ @Override
+ public void onFailure(@NonNull Throwable t) {
+ Logger.d(TAG, "Encodings end with error: " + t);
+ // If the media muxer hasn't been set up, assume the encoding fails
+ // because of no valid data has been produced.
+ finalizeInProgressRecording(
+ mMediaMuxer == null ? ERROR_NO_VALID_DATA : ERROR_ENCODING_FAILED,
+ t);
+ }
+ },
+ // Can use direct executor since completers are always completed on sequential
+ // executor.
+ CameraXExecutors.directExecutor());
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void writeVideoData(@NonNull EncodedData encodedData,
+ @NonNull RecordingRecord recording) {
+ if (mVideoTrackIndex == null) {
+ // Throw an exception if the data comes before the track is added.
+ throw new AssertionError(
+ "Video data comes before the track is added to MediaMuxer.");
+ }
+
+ long newRecordingBytes = mRecordingBytes + encodedData.size();
+ if (mFileSizeLimitInBytes != OutputOptions.FILE_SIZE_UNLIMITED
+ && newRecordingBytes > mFileSizeLimitInBytes) {
+ Logger.d(TAG,
+ String.format("Reach file size limit %d > %d", newRecordingBytes,
+ mFileSizeLimitInBytes));
+ onInProgressRecordingInternalError(recording, ERROR_FILE_SIZE_LIMIT_REACHED, null);
+ return;
+ }
+
+ long newRecordingDurationNs = 0L;
+ long currentPresentationTimeUs = encodedData.getPresentationTimeUs();
+
+ if (mFirstRecordingVideoDataTimeUs == Long.MAX_VALUE) {
+ mFirstRecordingVideoDataTimeUs = currentPresentationTimeUs;
+ Logger.d(TAG, String.format("First video time: %d (%s)", mFirstRecordingVideoDataTimeUs,
+ readableUs(mFirstRecordingVideoDataTimeUs)));
+ } else {
+ newRecordingDurationNs = TimeUnit.MICROSECONDS.toNanos(
+ currentPresentationTimeUs - Math.min(mFirstRecordingVideoDataTimeUs,
+ mFirstRecordingAudioDataTimeUs));
+ Preconditions.checkState(mPreviousRecordingVideoDataTimeUs != Long.MAX_VALUE, "There "
+ + "should be a previous data for adjusting the duration.");
+ // We currently don't send an additional empty buffer (bufferInfo.size = 0) with
+ // MediaCodec.BUFFER_FLAG_END_OF_STREAM to let the muxer know the duration of the
+ // last data, so it will be assumed to have the same duration as the data before it. So
+ // add the estimated value to the duration to ensure the final duration will not
+ // exceed the limit.
+ long adjustedDurationNs = newRecordingDurationNs + TimeUnit.MICROSECONDS.toNanos(
+ currentPresentationTimeUs - mPreviousRecordingVideoDataTimeUs);
+ if (mDurationLimitNs != OutputOptions.DURATION_UNLIMITED
+ && adjustedDurationNs > mDurationLimitNs) {
+ Logger.d(TAG, String.format("Video data reaches duration limit %d > %d",
+ adjustedDurationNs, mDurationLimitNs));
+ onInProgressRecordingInternalError(recording, ERROR_DURATION_LIMIT_REACHED, null);
+ return;
+ }
+ }
+
+ mMediaMuxer.writeSampleData(mVideoTrackIndex, encodedData.getByteBuffer(),
+ encodedData.getBufferInfo());
+
+ mRecordingBytes = newRecordingBytes;
+ mRecordingDurationNs = newRecordingDurationNs;
+ mPreviousRecordingVideoDataTimeUs = currentPresentationTimeUs;
+
+ updateInProgressStatusEvent();
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void writeAudioData(@NonNull EncodedData encodedData,
+ @NonNull RecordingRecord recording) {
+
+ long newRecordingBytes = mRecordingBytes + encodedData.size();
+ if (mFileSizeLimitInBytes != OutputOptions.FILE_SIZE_UNLIMITED
+ && newRecordingBytes > mFileSizeLimitInBytes) {
+ Logger.d(TAG,
+ String.format("Reach file size limit %d > %d",
+ newRecordingBytes,
+ mFileSizeLimitInBytes));
+ onInProgressRecordingInternalError(recording, ERROR_FILE_SIZE_LIMIT_REACHED, null);
+ return;
+ }
+
+ long newRecordingDurationNs = 0L;
+ long currentPresentationTimeUs = encodedData.getPresentationTimeUs();
+ if (mFirstRecordingAudioDataTimeUs == Long.MAX_VALUE) {
+ mFirstRecordingAudioDataTimeUs = currentPresentationTimeUs;
+ Logger.d(TAG, String.format("First audio time: %d (%s)", mFirstRecordingAudioDataTimeUs,
+ readableUs(mFirstRecordingAudioDataTimeUs)));
+ } else {
+ newRecordingDurationNs = TimeUnit.MICROSECONDS.toNanos(
+ currentPresentationTimeUs - Math.min(mFirstRecordingVideoDataTimeUs,
+ mFirstRecordingAudioDataTimeUs));
+ Preconditions.checkState(mPreviousRecordingAudioDataTimeUs != Long.MAX_VALUE, "There "
+ + "should be a previous data for adjusting the duration.");
+ // We currently don't send an additional empty buffer (bufferInfo.size = 0) with
+ // MediaCodec.BUFFER_FLAG_END_OF_STREAM to let the muxer know the duration of the
+ // last data, so it will be assumed to have the same duration as the data before it. So
+ // add the estimated value to the duration to ensure the final duration will not
+ // exceed the limit.
+ long adjustedDurationNs = newRecordingDurationNs + TimeUnit.MICROSECONDS.toNanos(
+ currentPresentationTimeUs - mPreviousRecordingAudioDataTimeUs);
+ if (mDurationLimitNs != OutputOptions.DURATION_UNLIMITED
+ && adjustedDurationNs > mDurationLimitNs) {
+ Logger.d(TAG, String.format("Audio data reaches duration limit %d > %d",
+ adjustedDurationNs, mDurationLimitNs));
+ onInProgressRecordingInternalError(recording, ERROR_DURATION_LIMIT_REACHED, null);
+ return;
+ }
+ }
+
+ mMediaMuxer.writeSampleData(mAudioTrackIndex,
+ encodedData.getByteBuffer(),
+ encodedData.getBufferInfo());
+
+ mRecordingBytes = newRecordingBytes;
+ mPreviousRecordingAudioDataTimeUs = currentPresentationTimeUs;
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void pauseInternal(@NonNull RecordingRecord recordingToPause) {
+ // Only pause recording if recording is in-progress and it is not stopping.
+ if (mInProgressRecording == recordingToPause && !mInProgressRecordingStopping) {
+ if (isAudioEnabled()) {
+ mAudioEncoder.pause();
+ }
+ mVideoEncoder.pause();
+
+ mInProgressRecording.updateVideoRecordEvent(VideoRecordEvent.pause(
+ mInProgressRecording.getOutputOptions(),
+ getInProgressRecordingStats()));
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void resumeInternal(@NonNull RecordingRecord recordingToResume) {
+ // Only resume recording if recording is in-progress and it is not stopping.
+ if (mInProgressRecording == recordingToResume && !mInProgressRecordingStopping) {
+ if (isAudioEnabled()) {
+ mAudioEncoder.start();
+ }
+ mVideoEncoder.start();
+
+ mInProgressRecording.updateVideoRecordEvent(VideoRecordEvent.resume(
+ mInProgressRecording.getOutputOptions(),
+ getInProgressRecordingStats()));
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ void stopInternal(@NonNull RecordingRecord recordingToStop,
+ long explicitlyStopTime, @VideoRecordError int stopError,
+ @Nullable Throwable errorCause) {
+ // Only stop recording if recording is in-progress and it is not already stopping.
+ if (mInProgressRecording == recordingToStop && !mInProgressRecordingStopping) {
+ mInProgressRecordingStopping = true;
+ mRecordingStopError = stopError;
+ mRecordingStopErrorCause = errorCause;
+ if (isAudioEnabled()) {
+ clearPendingAudioRingBuffer();
+ mAudioEncoder.stop(explicitlyStopTime);
+ }
+ if (mPendingFirstVideoData != null) {
+ mPendingFirstVideoData.close();
+ mPendingFirstVideoData = null;
+ }
+
+ if (mSourceState != SourceState.ACTIVE_NON_STREAMING) {
+ // As b/197047288, if the source is still ACTIVE, we will wait for the source to
+ // become non-streaming before notifying the encoder the source has stopped.
+ // Similarly, if the source is already INACTIVE, we won't know that the source
+ // has stopped until the surface request callback, so we'll wait for that.
+ // In both cases, we set a timeout to ensure the source is always signalled on
+ // devices that require it and to act as a flag that we need to signal the source
+ // stopped.
+ Encoder finalVideoEncoder = mVideoEncoder;
+ mSourceNonStreamingTimeout = CameraXExecutors.mainThreadExecutor().schedule(
+ () -> mSequentialExecutor.execute(() -> {
+ Logger.d(TAG, "The source didn't become non-streaming "
+ + "before timeout. Waited " + SOURCE_NON_STREAMING_TIMEOUT_MS
+ + "ms");
+ if (DeviceQuirks.get(
+ DeactivateEncoderSurfaceBeforeStopEncoderQuirk.class)
+ != null) {
+ // Even in the case of timeout, we tell the encoder the source has
+ // stopped because devices with this quirk require that the codec
+ // produce a new surface.
+ notifyEncoderSourceStopped(finalVideoEncoder);
+ }
+ }), SOURCE_NON_STREAMING_TIMEOUT_MS, TimeUnit.MILLISECONDS);
+ } else {
+ // Source is already non-streaming. Signal source is stopped right away.
+ notifyEncoderSourceStopped(mVideoEncoder);
+ }
+
+ // Stop the encoder. This will tell the encoder to stop encoding new data. We'll notify
+ // the encoder when the source has actually stopped in the FutureCallback.
+ // If the recording is explicitly stopped by the user, pass the stop timestamp to the
+ // encoder so that the encoding can be stop as close as to the actual stop time.
+ mVideoEncoder.stop(explicitlyStopTime);
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ static void notifyEncoderSourceStopped(@NonNull Encoder encoder) {
+ if (encoder instanceof EncoderImpl) {
+ ((EncoderImpl) encoder).signalSourceStopped();
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void clearPendingAudioRingBuffer() {
+ while (!mPendingAudioRingBuffer.isEmpty()) {
+ mPendingAudioRingBuffer.dequeue();
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void reset() {
+ if (mAudioEncoder != null) {
+ Logger.d(TAG, "Releasing audio encoder.");
+ mAudioEncoder.release();
+ mAudioEncoder = null;
+ mAudioOutputConfig = null;
+ }
+ tryReleaseVideoEncoder();
+ if (mAudioSource != null) {
+ releaseCurrentAudioSource();
+ }
+
+ setAudioState(AudioState.INITIALIZING);
+ onReset();
+ }
+
+ @SuppressWarnings("FutureReturnValueIgnored")
+ @ExecutedBy("mSequentialExecutor")
+ private void tryReleaseVideoEncoder() {
+ if (mVideoEncoderSessionToRelease != null) {
+ Preconditions.checkState(
+ mVideoEncoderSessionToRelease.getVideoEncoder() == mVideoEncoder);
+
+ Logger.d(TAG, "Releasing video encoder: " + mVideoEncoder);
+ mVideoEncoderSessionToRelease.terminateNow();
+ mVideoEncoderSessionToRelease = null;
+ mVideoEncoder = null;
+ mVideoOutputConfig = null;
+ setLatestSurface(null);
+ } else {
+ safeToCloseVideoEncoder();
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void onReset() {
+ synchronized (mLock) {
+ switch (mState) {
+ case PENDING_PAUSED:
+ // Fall-through
+ case PENDING_RECORDING:
+ updateNonPendingState(State.CONFIGURING);
+ break;
+ case ERROR:
+ // Fall-through
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case RESETTING:
+ // Fall-through
+ case STOPPING:
+ setState(State.CONFIGURING);
+ break;
+ case CONFIGURING:
+ // No-op
+ break;
+ }
+ }
+
+ mNeedsReset = false;
+
+ // If the latest surface request hasn't been serviced, use it to re-configure the Recorder.
+ if (mLatestSurfaceRequest != null && !mLatestSurfaceRequest.isServiced()) {
+ configureInternal(mLatestSurfaceRequest, mVideoSourceTimebase);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private int internalAudioStateToAudioStatsState(@NonNull AudioState audioState) {
+ switch (audioState) {
+ case DISABLED:
+ // Fall-through
+ case INITIALIZING:
+ // Audio will not be initialized until the first recording with audio enabled is
+ // started. So if the audio state is INITIALIZING, consider the audio is disabled.
+ return AudioStats.AUDIO_STATE_DISABLED;
+ case ACTIVE:
+ if (mIsAudioSourceSilenced) {
+ return AudioStats.AUDIO_STATE_SOURCE_SILENCED;
+ } else {
+ return AudioStats.AUDIO_STATE_ACTIVE;
+ }
+ case ERROR_ENCODER:
+ return AudioStats.AUDIO_STATE_ENCODER_ERROR;
+ case ERROR_SOURCE:
+ return AudioStats.AUDIO_STATE_SOURCE_ERROR;
+ case IDLING:
+ // AudioStats should not be produced when audio is in IDLING state.
+ break;
+ }
+ // Should not reach.
+ throw new AssertionError("Invalid internal audio state: " + audioState);
+ }
+
+ @NonNull
+ private StreamState internalStateToStreamState(@NonNull State state) {
+ // Stopping state should be treated as inactive on certain chipsets. See b/196039619.
+ DeactivateEncoderSurfaceBeforeStopEncoderQuirk quirk =
+ DeviceQuirks.get(DeactivateEncoderSurfaceBeforeStopEncoderQuirk.class);
+ return state == State.RECORDING || (state == State.STOPPING && quirk == null)
+ ? StreamState.ACTIVE : StreamState.INACTIVE;
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ boolean isAudioEnabled() {
+ return mAudioState == AudioState.ACTIVE;
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ void finalizeInProgressRecording(@VideoRecordError int error, @Nullable Throwable throwable) {
+ if (mInProgressRecording == null) {
+ throw new AssertionError("Attempted to finalize in-progress recording, but no "
+ + "recording is in progress.");
+ }
+
+ @VideoRecordError int errorToSend = error;
+ if (mMediaMuxer != null) {
+ try {
+ mMediaMuxer.stop();
+ mMediaMuxer.release();
+ } catch (IllegalStateException e) {
+ Logger.e(TAG, "MediaMuxer failed to stop or release with error: " + e.getMessage());
+ if (errorToSend == ERROR_NONE) {
+ errorToSend = ERROR_UNKNOWN;
+ }
+ }
+ mMediaMuxer = null;
+ } else if (errorToSend == ERROR_NONE) {
+ // Muxer was never started, so recording has no data.
+ errorToSend = ERROR_NO_VALID_DATA;
+ }
+
+ mInProgressRecording.finalizeRecording(mOutputUri);
+
+ OutputOptions outputOptions = mInProgressRecording.getOutputOptions();
+ RecordingStats stats = getInProgressRecordingStats();
+ OutputResults outputResults = OutputResults.of(mOutputUri);
+ mInProgressRecording.updateVideoRecordEvent(errorToSend == ERROR_NONE
+ ? VideoRecordEvent.finalize(
+ outputOptions,
+ stats,
+ outputResults)
+ : VideoRecordEvent.finalizeWithError(
+ outputOptions,
+ stats,
+ outputResults,
+ errorToSend,
+ throwable));
+
+ RecordingRecord finalizedRecording = mInProgressRecording;
+ mInProgressRecording = null;
+ mInProgressRecordingStopping = false;
+ mAudioTrackIndex = null;
+ mVideoTrackIndex = null;
+ mEncodingFutures.clear();
+ mOutputUri = Uri.EMPTY;
+ mRecordingBytes = 0L;
+ mRecordingDurationNs = 0L;
+ mFirstRecordingVideoDataTimeUs = Long.MAX_VALUE;
+ mFirstRecordingAudioDataTimeUs = Long.MAX_VALUE;
+ mPreviousRecordingVideoDataTimeUs = Long.MAX_VALUE;
+ mPreviousRecordingAudioDataTimeUs = Long.MAX_VALUE;
+ mRecordingStopError = ERROR_UNKNOWN;
+ mRecordingStopErrorCause = null;
+ mAudioErrorCause = null;
+ clearPendingAudioRingBuffer();
+
+ switch (mAudioState) {
+ case IDLING:
+ throw new AssertionError(
+ "Incorrectly finalize recording when audio state is IDLING");
+ case INITIALIZING:
+ // No-op, the audio hasn't been initialized. Keep it in INITIALIZING state.
+ break;
+ case DISABLED:
+ // Fall-through
+ case ACTIVE:
+ setAudioState(AudioState.IDLING);
+ mAudioSource.stop();
+ break;
+ case ERROR_ENCODER:
+ // Fall-through
+ case ERROR_SOURCE:
+ // Reset audio state to INITIALIZING if the audio encoder encountered error, so
+ // that it can be setup again when the next recording with audio enabled is started.
+ setAudioState(AudioState.INITIALIZING);
+ break;
+ }
+
+ onRecordingFinalized(finalizedRecording);
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ private void onRecordingFinalized(@NonNull RecordingRecord finalizedRecording) {
+ boolean needsReset = false;
+ boolean startRecordingPaused = false;
+ boolean needsConfigure = false;
+ RecordingRecord recordingToStart = null;
+ RecordingRecord pendingRecordingToFinalize = null;
+ @VideoRecordError int error = ERROR_NONE;
+ Throwable errorCause = null;
+ synchronized (mLock) {
+ if (mActiveRecordingRecord != finalizedRecording) {
+ throw new AssertionError("Active recording did not match finalized recording on "
+ + "finalize.");
+ }
+
+ mActiveRecordingRecord = null;
+ switch (mState) {
+ case RESETTING:
+ needsReset = true;
+ break;
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ // If finalized while in a RECORDING or PAUSED state, then the recording was
+ // likely finalized due to an error.
+ // Fall-through
+ case STOPPING:
+ if (mEncoderNotUsePersistentInputSurface) {
+ // If the encoder doesn't use persistent input surface, the active
+ // surface will become invalid after a recording is finalized. If there's
+ // an unserviced surface request, configure with it directly, otherwise
+ // wait for a new surface update.
+ mActiveSurface = null;
+ if (mLatestSurfaceRequest != null && !mLatestSurfaceRequest.isServiced()) {
+ needsConfigure = true;
+ }
+ setState(State.CONFIGURING);
+ } else {
+ setState(State.IDLING);
+ }
+ break;
+ case PENDING_PAUSED:
+ startRecordingPaused = true;
+ // Fall-through
+ case PENDING_RECORDING:
+ if (mSourceState == SourceState.INACTIVE) {
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ setState(State.CONFIGURING);
+ error = ERROR_SOURCE_INACTIVE;
+ errorCause = PENDING_RECORDING_ERROR_CAUSE_SOURCE_INACTIVE;
+ } else if (mEncoderNotUsePersistentInputSurface) {
+ // If the encoder doesn't use persistent input surface, the active
+ // surface will become invalid after a recording is finalized. If there's
+ // an unserviced surface request, configure with it directly, otherwise
+ // wait for a new surface update.
+ mActiveSurface = null;
+ if (mLatestSurfaceRequest != null && !mLatestSurfaceRequest.isServiced()) {
+ needsConfigure = true;
+ }
+ updateNonPendingState(State.CONFIGURING);
+ } else if (mVideoEncoder != null) {
+ // If there's no VideoEncoder, it may need to wait for the new
+ // VideoEncoder to be configured.
+ recordingToStart = makePendingRecordingActiveLocked(mState);
+ }
+ break;
+ case ERROR:
+ // Error state is non-recoverable. Nothing to do here.
+ break;
+ case CONFIGURING:
+ // No-op, the Recorder has been reset before the recording is finalized. So
+ // keep the state in CONFIGURING.
+ break;
+ case IDLING:
+ throw new AssertionError("Unexpected state on finalize of recording: "
+ + mState);
+ }
+ }
+
+ // Perform required actions from state changes inline on sequential executor but unlocked.
+ if (needsConfigure) {
+ configureInternal(mLatestSurfaceRequest, mVideoSourceTimebase);
+ } else if (needsReset) {
+ reset();
+ } else if (recordingToStart != null) {
+ // A pending recording will only be started if we're not waiting for a new surface.
+ // Otherwise the recording will be started after receiving a new surface request.
+ if (mEncoderNotUsePersistentInputSurface) {
+ throw new AssertionError("Attempt to start a pending recording while the Recorder"
+ + " is waiting for a new surface request.");
+ }
+ startRecording(recordingToStart, startRecordingPaused);
+ } else if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, error, errorCause);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ void onInProgressRecordingInternalError(@NonNull RecordingRecord recording,
+ @VideoRecordError int error, @Nullable Throwable cause) {
+ if (recording != mInProgressRecording) {
+ throw new AssertionError("Internal error occurred on recording that is not the current "
+ + "in-progress recording.");
+ }
+
+ boolean needsStop = false;
+ synchronized (mLock) {
+ switch (mState) {
+ case PAUSED:
+ // Fall-through
+ case RECORDING:
+ setState(State.STOPPING);
+ needsStop = true;
+ // Fall-through
+ case STOPPING:
+ // Fall-through
+ case RESETTING:
+ // Fall-through
+ case PENDING_RECORDING:
+ // Fall-through
+ case PENDING_PAUSED:
+ // Fall-through
+ if (recording != mActiveRecordingRecord) {
+ throw new AssertionError("Internal error occurred for recording but it is"
+ + " not the active recording.");
+ }
+ break;
+ case CONFIGURING:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case ERROR:
+ throw new AssertionError("In-progress recording error occurred while in "
+ + "unexpected state: " + mState);
+ }
+ }
+
+ if (needsStop) {
+ stopInternal(recording, Encoder.NO_TIMESTAMP, error, cause);
+ }
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ void tryServicePendingRecording() {
+ boolean startRecordingPaused = false;
+ RecordingRecord recordingToStart = null;
+ RecordingRecord pendingRecordingToFinalize = null;
+ @VideoRecordError int error = ERROR_NONE;
+ Throwable errorCause = null;
+ synchronized (mLock) {
+ switch (mState) {
+ case PENDING_PAUSED:
+ startRecordingPaused = true;
+ // Fall-through
+ case PENDING_RECORDING:
+ if (mActiveRecordingRecord != null || mNeedsReset) {
+ // Active recording is still finalizing or the Recorder is expected to be
+ // reset. Pending recording will be serviced in onRecordingFinalized() or
+ // in onReset().
+ break;
+ }
+ if (mSourceState == SourceState.INACTIVE) {
+ pendingRecordingToFinalize = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ restoreNonPendingState(); // Equivalent to setState(mNonPendingState)
+ error = ERROR_SOURCE_INACTIVE;
+ errorCause = PENDING_RECORDING_ERROR_CAUSE_SOURCE_INACTIVE;
+ } else if (mVideoEncoder != null) {
+ // If there's no VideoEncoder, it may need to wait for the new
+ // VideoEncoder to be configured.
+ recordingToStart = makePendingRecordingActiveLocked(mState);
+ }
+ break;
+ case CONFIGURING:
+ // Fall-through
+ case IDLING:
+ // Fall-through
+ case RECORDING:
+ // Fall-through
+ case PAUSED:
+ // Fall-through
+ case STOPPING:
+ // Fall-through
+ case RESETTING:
+ // Fall-through
+ case ERROR:
+ break;
+ }
+ }
+
+ if (recordingToStart != null) {
+ // Start new active recording inline on sequential executor (but unlocked).
+ startRecording(recordingToStart, startRecordingPaused);
+ } else if (pendingRecordingToFinalize != null) {
+ finalizePendingRecording(pendingRecordingToFinalize, error, errorCause);
+ }
+ }
+
+ /**
+ * Makes the pending recording active and returns the new active recording.
+ *
+ * This method will not actually start the recording. It is up to the caller to start the
+ * returned recording. However, the Recorder.State will be updated to reflect what the state
+ * should be after the recording is started. This allows the recording to be started when no
+ * longer under lock.
+ */
+ @GuardedBy("mLock")
+ @NonNull
+ private RecordingRecord makePendingRecordingActiveLocked(@NonNull State state) {
+ boolean startRecordingPaused = false;
+ if (state == State.PENDING_PAUSED) {
+ startRecordingPaused = true;
+ } else if (state != State.PENDING_RECORDING) {
+ throw new AssertionError("makePendingRecordingActiveLocked() can only be called from "
+ + "a pending state.");
+ }
+ if (mActiveRecordingRecord != null) {
+ throw new AssertionError("Cannot make pending recording active because another "
+ + "recording is already active.");
+ }
+ if (mPendingRecordingRecord == null) {
+ throw new AssertionError("Pending recording should exist when in a PENDING"
+ + " state.");
+ }
+ // Swap the pending recording to the active recording and start it
+ RecordingRecord recordingToStart = mActiveRecordingRecord = mPendingRecordingRecord;
+ mPendingRecordingRecord = null;
+ // Start recording if start() has been called before video encoder is setup.
+ if (startRecordingPaused) {
+ setState(State.PAUSED);
+ } else {
+ setState(State.RECORDING);
+ }
+
+ return recordingToStart;
+ }
+
+ /**
+ * Actually starts a recording on the sequential executor.
+ *
+ *
This is intended to be called while unlocked on the sequential executor. It should only
+ * be called immediately after a pending recording has just been made active. The recording
+ * passed to this method should be the newly-made-active recording.
+ */
+ @ExecutedBy("mSequentialExecutor")
+ private void startRecording(@NonNull RecordingRecord recordingToStart,
+ boolean startRecordingPaused) {
+ // Start pending recording inline since we are already on sequential executor.
+ startInternal(recordingToStart);
+ if (startRecordingPaused) {
+ pauseInternal(recordingToStart);
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ void updateInProgressStatusEvent() {
+ if (mInProgressRecording != null) {
+ mInProgressRecording.updateVideoRecordEvent(
+ VideoRecordEvent.status(
+ mInProgressRecording.getOutputOptions(),
+ getInProgressRecordingStats()));
+ }
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ @NonNull
+ RecordingStats getInProgressRecordingStats() {
+ return RecordingStats.of(mRecordingDurationNs, mRecordingBytes,
+ AudioStats.of(internalAudioStateToAudioStatsState(mAudioState), mAudioErrorCause));
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ T getObservableData(@NonNull StateObservable observable) {
+ ListenableFuture future = observable.fetchData();
+ try {
+ // A StateObservable always has a state available and the future got from fetchData()
+ // will complete immediately.
+ return future.get();
+ } catch (ExecutionException | InterruptedException e) {
+ throw new IllegalStateException(e);
+ }
+ }
+
+ boolean isAudioSupported() {
+ return getObservableData(mMediaSpec).getAudioSpec().getChannelCount()
+ != AudioSpec.CHANNEL_COUNT_NONE;
+ }
+
+ @GuardedBy("mLock")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void setState(@NonNull State state) {
+ // If we're attempt to transition to the same state, then we likely have a logic error.
+ // All state transitions should be intentional, so throw an AssertionError here.
+ if (mState == state) {
+ throw new AssertionError("Attempted to transition to state " + state + ", but "
+ + "Recorder is already in state " + state);
+ }
+
+ Logger.d(TAG, "Transitioning Recorder internal state: " + mState + " --> " + state);
+ // If we are transitioning from a non-pending state to a pending state, we need to store
+ // the non-pending state so we can transition back if the pending recording is stopped
+ // before it becomes active.
+ StreamInfo.StreamState streamState = null;
+ if (PENDING_STATES.contains(state)) {
+ if (!PENDING_STATES.contains(mState)) {
+ if (!VALID_NON_PENDING_STATES_WHILE_PENDING.contains(mState)) {
+ throw new AssertionError(
+ "Invalid state transition. Should not be transitioning "
+ + "to a PENDING state from state " + mState);
+ }
+ mNonPendingState = mState;
+ streamState = internalStateToStreamState(mNonPendingState);
+ }
+ } else if (mNonPendingState != null) {
+ // Transitioning out of a pending state. Clear the non-pending state.
+ mNonPendingState = null;
+ }
+
+ mState = state;
+ if (streamState == null) {
+ streamState = internalStateToStreamState(mState);
+ }
+ mStreamInfo.setState(StreamInfo.of(mStreamId, streamState));
+ }
+
+ @ExecutedBy("mSequentialExecutor")
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ void setLatestSurface(@Nullable Surface surface) {
+ if (mLatestSurface == surface) {
+ return;
+ }
+ mLatestSurface = surface;
+ synchronized (mLock) {
+ setStreamId(surface != null ? surface.hashCode() : StreamInfo.STREAM_ID_ANY);
+ }
+ }
+
+ @GuardedBy("mLock")
+ private void setStreamId(int streamId) {
+ if (mStreamId == streamId) {
+ return;
+ }
+ Logger.d(TAG, "Transitioning streamId: " + mStreamId + " --> " + streamId);
+ mStreamId = streamId;
+ mStreamInfo.setState(StreamInfo.of(streamId, internalStateToStreamState(mState)));
+ }
+
+ /**
+ * Updates the non-pending state while in a pending state.
+ *
+ * If called from a non-pending state, an assertion error will be thrown.
+ */
+ @GuardedBy("mLock")
+ private void updateNonPendingState(@NonNull State state) {
+ if (!PENDING_STATES.contains(mState)) {
+ throw new AssertionError("Can only updated non-pending state from a pending state, "
+ + "but state is " + mState);
+ }
+
+ if (!VALID_NON_PENDING_STATES_WHILE_PENDING.contains(state)) {
+ throw new AssertionError(
+ "Invalid state transition. State is not a valid non-pending state while in a "
+ + "pending state: " + state);
+ }
+
+ if (mNonPendingState != state) {
+ mNonPendingState = state;
+ mStreamInfo.setState(
+ StreamInfo.of(mStreamId, internalStateToStreamState(state)));
+ }
+ }
+
+ /**
+ * Convenience for restoring the state to the non-pending state.
+ *
+ *
This is equivalent to calling setState(mNonPendingState), but performs a few safety
+ * checks. This can only be called while in a pending state.
+ */
+ @GuardedBy("mLock")
+ private void restoreNonPendingState() {
+ if (!PENDING_STATES.contains(mState)) {
+ throw new AssertionError("Cannot restore non-pending state when in state " + mState);
+ }
+
+ setState(mNonPendingState);
+ }
+
+ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
+ @ExecutedBy("mSequentialExecutor")
+ void setAudioState(@NonNull AudioState audioState) {
+ Logger.d(TAG, "Transitioning audio state: " + mAudioState + " --> " + audioState);
+ mAudioState = audioState;
+ }
+
+ private static int supportedMuxerFormatOrDefaultFrom(
+ @Nullable CamcorderProfileProxy profileProxy, int defaultMuxerFormat) {
+ if (profileProxy != null) {
+ switch (profileProxy.getFileFormat()) {
+ case MediaRecorder.OutputFormat.MPEG_4:
+ return MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
+ case MediaRecorder.OutputFormat.WEBM:
+ return MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM;
+ case MediaRecorder.OutputFormat.THREE_GPP:
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) {
+ // MediaMuxer does not support 3GPP on pre-Android O(API 26) devices.
+ return MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
+ } else {
+ return MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP;
+ }
+ default:
+ break;
+ }
+ }
+ return defaultMuxerFormat;
+ }
+
+ @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+ @AutoValue
+ abstract static class RecordingRecord implements AutoCloseable {
+
+ private final CloseGuardHelper mCloseGuard = CloseGuardHelper.create();
+
+ private final AtomicBoolean mInitialized = new AtomicBoolean(false);
+
+ private final AtomicReference mMediaMuxerSupplier =
+ new AtomicReference<>(null);
+
+ private final AtomicReference mAudioSourceSupplier =
+ new AtomicReference<>(null);
+
+ private final AtomicReference> mRecordingFinalizer =
+ new AtomicReference<>(ignored -> {
+ /* no-op by default */
+ });
+
+ @NonNull
+ static RecordingRecord from(@NonNull PendingRecording pendingRecording, long recordingId) {
+ return new AutoValue_Recorder_RecordingRecord(
+ pendingRecording.getOutputOptions(),
+ pendingRecording.getListenerExecutor(),
+ pendingRecording.getEventListener(),
+ pendingRecording.isAudioEnabled(),
+ recordingId
+ );
+ }
+
+ @NonNull
+ abstract OutputOptions getOutputOptions();
+
+ @Nullable
+ abstract Executor getCallbackExecutor();
+
+ @Nullable
+ abstract Consumer getEventListener();
+
+ abstract boolean hasAudioEnabled();
+
+ abstract long getRecordingId();
+
+ /**
+ * Performs initialization for this recording.
+ *
+ * @throws AssertionError if this recording has already been initialized.
+ * @throws IOException if it fails to duplicate the file descriptor when the
+ * {@link #getOutputOptions() OutputOptions} is {@link FileDescriptorOutputOptions}.
+ */
+ void initializeRecording(@NonNull Context context) throws IOException {
+ if (mInitialized.getAndSet(true)) {
+ throw new AssertionError("Recording " + this + " has already been initialized");
+ }
+ OutputOptions outputOptions = getOutputOptions();
+
+ final ParcelFileDescriptor dupedParcelFileDescriptor;
+ if (outputOptions instanceof FileDescriptorOutputOptions) {
+ // Duplicate ParcelFileDescriptor to make input descriptor can be safely closed,
+ // or throw an IOException if it fails.
+ dupedParcelFileDescriptor =
+ ((FileDescriptorOutputOptions) outputOptions)
+ .getParcelFileDescriptor().dup();
+ } else {
+ dupedParcelFileDescriptor = null;
+ }
+
+ mCloseGuard.open("finalizeRecording");
+
+ MediaMuxerSupplier mediaMuxerSupplier =
+ (muxerOutputFormat, outputUriCreatedCallback) -> {
+ MediaMuxer mediaMuxer;
+ Uri outputUri = Uri.EMPTY;
+ if (outputOptions instanceof FileOutputOptions) {
+ FileOutputOptions fileOutputOptions = (FileOutputOptions) outputOptions;
+ File file = fileOutputOptions.getFile();
+ if (!OutputUtil.createParentFolder(file)) {
+ Logger.w(TAG,
+ "Failed to create folder for " + file.getAbsolutePath());
+ }
+ mediaMuxer = new MediaMuxer(file.getAbsolutePath(), muxerOutputFormat);
+ outputUri = Uri.fromFile(file);
+ } else if (outputOptions instanceof FileDescriptorOutputOptions) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ // Use dup'd ParcelFileDescriptor to prevent the descriptor in
+ // OutputOptions from being closed.
+ mediaMuxer = Api26Impl.createMediaMuxer(
+ dupedParcelFileDescriptor.getFileDescriptor(),
+ muxerOutputFormat);
+ } else {
+ throw new IOException(
+ "MediaMuxer doesn't accept FileDescriptor as output "
+ + "destination.");
+ }
+ } else if (outputOptions instanceof MediaStoreOutputOptions) {
+ MediaStoreOutputOptions mediaStoreOutputOptions =
+ (MediaStoreOutputOptions) outputOptions;
+
+ ContentValues contentValues =
+ new ContentValues(mediaStoreOutputOptions.getContentValues());
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ // Toggle on pending status for the video file.
+ contentValues.put(MediaStore.Video.Media.IS_PENDING, PENDING);
+ }
+ outputUri = mediaStoreOutputOptions.getContentResolver().insert(
+ mediaStoreOutputOptions.getCollectionUri(), contentValues);
+ if (outputUri == null) {
+ throw new IOException("Unable to create MediaStore entry.");
+ }
+
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) {
+ String path = OutputUtil.getAbsolutePathFromUri(
+ mediaStoreOutputOptions.getContentResolver(),
+ outputUri, MEDIA_COLUMN);
+ if (path == null) {
+ throw new IOException(
+ "Unable to get path from uri " + outputUri);
+ }
+ if (!OutputUtil.createParentFolder(new File(path))) {
+ Logger.w(TAG, "Failed to create folder for " + path);
+ }
+ mediaMuxer = new MediaMuxer(path, muxerOutputFormat);
+ } else {
+ ParcelFileDescriptor fileDescriptor =
+ mediaStoreOutputOptions.getContentResolver()
+ .openFileDescriptor(outputUri, "rw");
+ mediaMuxer = Api26Impl.createMediaMuxer(
+ fileDescriptor.getFileDescriptor(),
+ muxerOutputFormat);
+ fileDescriptor.close();
+ }
+ } else {
+ throw new AssertionError(
+ "Invalid output options type: "
+ + outputOptions.getClass().getSimpleName());
+ }
+ outputUriCreatedCallback.accept(outputUri);
+ return mediaMuxer;
+ };
+ mMediaMuxerSupplier.set(mediaMuxerSupplier);
+
+ Consumer recordingFinalizer = null;
+ if (hasAudioEnabled()) {
+ if (Build.VERSION.SDK_INT >= 31) {
+ // Use anonymous inner class instead of lambda since we need to propagate
+ // permission requirements
+ @SuppressWarnings("Convert2Lambda")
+ AudioSourceSupplier audioSourceSupplier = new AudioSourceSupplier() {
+ @NonNull
+ @Override
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ public AudioSource get(@NonNull AudioSource.Settings settings,
+ @NonNull Executor executor)
+ throws AudioSourceAccessException {
+ // Context will only be held in local scope of the supplier so it will
+ // not be retained after performOneTimeAudioSourceCreation() is called.
+ return new AudioSource(settings, executor, context);
+ }
+ };
+ mAudioSourceSupplier.set(audioSourceSupplier);
+ } else {
+ // Use anonymous inner class instead of lambda since we need to propagate
+ // permission requirements
+ @SuppressWarnings("Convert2Lambda")
+ AudioSourceSupplier audioSourceSupplier = new AudioSourceSupplier() {
+ @NonNull
+ @Override
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ public AudioSource get(@NonNull AudioSource.Settings settings,
+ @NonNull Executor executor)
+ throws AudioSourceAccessException {
+ // Do not set (or retain) context on other API levels
+ return new AudioSource(settings, executor, null);
+ }
+ };
+ mAudioSourceSupplier.set(audioSourceSupplier);
+ }
+ }
+
+ if (outputOptions instanceof MediaStoreOutputOptions) {
+ MediaStoreOutputOptions mediaStoreOutputOptions =
+ (MediaStoreOutputOptions) outputOptions;
+ // TODO(b/201946954): Investigate whether we should add a setting to disable
+ // scan/update to allow users to perform it themselves.
+ if (Build.VERSION.SDK_INT >= 29) {
+ recordingFinalizer = outputUri -> {
+ if (outputUri.equals(Uri.EMPTY)) {
+ return;
+ }
+ ContentValues contentValues = new ContentValues();
+ contentValues.put(MediaStore.Video.Media.IS_PENDING, NOT_PENDING);
+ mediaStoreOutputOptions.getContentResolver().update(outputUri,
+ contentValues, null, null);
+ };
+ } else {
+ // Context will only be held in local scope of the consumer so it will not be
+ // retained after finalizeOutputFile() is called.
+ recordingFinalizer = outputUri -> {
+ if (outputUri.equals(Uri.EMPTY)) {
+ return;
+ }
+ String filePath = OutputUtil.getAbsolutePathFromUri(
+ mediaStoreOutputOptions.getContentResolver(), outputUri,
+ MEDIA_COLUMN);
+ if (filePath != null) {
+ // Use null mime type list to have MediaScanner derive mime type from
+ // extension
+ MediaScannerConnection.scanFile(context,
+ new String[]{filePath}, /*mimeTypes=*/null, (path, uri) -> {
+ if (uri == null) {
+ Logger.e(TAG, String.format("File scanning operation "
+ + "failed [path: %s]", path));
+ } else {
+ Logger.d(TAG, String.format("File scan completed "
+ + "successfully [path: %s, URI: %s]", path,
+ uri));
+ }
+ });
+ } else {
+ Logger.d(TAG,
+ "Skipping media scanner scan. Unable to retrieve file path "
+ + "from URI: " + outputUri);
+ }
+ };
+ }
+ } else if (outputOptions instanceof FileDescriptorOutputOptions) {
+ recordingFinalizer = ignored -> {
+ try {
+ // dupedParcelFileDescriptor should be non-null.
+ dupedParcelFileDescriptor.close();
+ } catch (IOException e) {
+ // IOException is not expected to be thrown while closing
+ // ParcelFileDescriptor.
+ Logger.e(TAG, "Failed to close dup'd ParcelFileDescriptor", e);
+ }
+ };
+ }
+
+ if (recordingFinalizer != null) {
+ mRecordingFinalizer.set(recordingFinalizer);
+ }
+ }
+
+ /**
+ * Updates the recording status and callback to users.
+ */
+ void updateVideoRecordEvent(@NonNull VideoRecordEvent event) {
+ if (!Objects.equals(event.getOutputOptions(), getOutputOptions())) {
+ throw new AssertionError("Attempted to update event listener with event from "
+ + "incorrect recording [Recording: " + event.getOutputOptions()
+ + ", Expected: " + getOutputOptions() + "]");
+ }
+ String message = "Sending VideoRecordEvent " + event.getClass().getSimpleName();
+ if (event instanceof VideoRecordEvent.Finalize) {
+ VideoRecordEvent.Finalize finalizeEvent = (VideoRecordEvent.Finalize) event;
+ if (finalizeEvent.hasError()) {
+ message += String.format(" [error: %s]",
+ VideoRecordEvent.Finalize.errorToString(
+ finalizeEvent.getError()));
+ }
+ }
+ Logger.d(TAG, message);
+ if (getCallbackExecutor() != null && getEventListener() != null) {
+ try {
+ getCallbackExecutor().execute(() -> getEventListener().accept(event));
+ } catch (RejectedExecutionException e) {
+ Logger.e(TAG, "The callback executor is invalid.", e);
+ }
+ }
+ }
+
+ /**
+ * Creates an {@link AudioSource} for this recording.
+ *
+ * An audio source can only be created once per recording, so subsequent calls to this
+ * method will throw an {@link AssertionError}.
+ *
+ *
Calling this method when audio is not enabled for this recording will also throw an
+ * {@link AssertionError}.
+ */
+ @NonNull
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ AudioSource performOneTimeAudioSourceCreation(
+ @NonNull AudioSource.Settings settings, @NonNull Executor audioSourceExecutor)
+ throws AudioSourceAccessException {
+ if (!hasAudioEnabled()) {
+ throw new AssertionError("Recording does not have audio enabled. Unable to create"
+ + " audio source for recording " + this);
+ }
+
+ AudioSourceSupplier audioSourceSupplier = mAudioSourceSupplier.getAndSet(null);
+ if (audioSourceSupplier == null) {
+ throw new AssertionError("One-time audio source creation has already occurred for"
+ + " recording " + this);
+ }
+
+ return audioSourceSupplier.get(settings, audioSourceExecutor);
+ }
+
+ /**
+ * Creates a {@link MediaMuxer} for this recording.
+ *
+ *
A media muxer can only be created once per recording, so subsequent calls to this
+ * method will throw an {@link AssertionError}.
+ *
+ * @param muxerOutputFormat the output file format.
+ * @param outputUriCreatedCallback A callback that will send the returned media muxer's
+ * output {@link Uri}. It will be {@link Uri#EMPTY} if the
+ * {@link #getOutputOptions() OutputOptions} is
+ * {@link FileDescriptorOutputOptions}.
+ * Note: This callback will be called inline.
+ * @return the media muxer.
+ * @throws IOException if the creation of the media mixer fails.
+ * @throws AssertionError if the recording is not initialized or subsequent calls to this
+ * method.
+ */
+ @NonNull
+ MediaMuxer performOneTimeMediaMuxerCreation(int muxerOutputFormat,
+ @NonNull Consumer outputUriCreatedCallback) throws IOException {
+ if (!mInitialized.get()) {
+ throw new AssertionError("Recording " + this + " has not been initialized");
+ }
+ MediaMuxerSupplier mediaMuxerSupplier = mMediaMuxerSupplier.getAndSet(null);
+ if (mediaMuxerSupplier == null) {
+ throw new AssertionError("One-time media muxer creation has already occurred for"
+ + " recording " + this);
+ }
+ return mediaMuxerSupplier.get(muxerOutputFormat, outputUriCreatedCallback);
+ }
+
+ /**
+ * Performs final operations required to finalize this recording.
+ *
+ * Recording finalization can only occur once. Any subsequent calls to this method or
+ * {@link #close()} will throw an {@link AssertionError}.
+ *
+ *
Finalizing an uninitialized recording is no-op.
+ *
+ * @param uri The uri of the output file.
+ */
+ void finalizeRecording(@NonNull Uri uri) {
+ if (!mInitialized.get()) {
+ return;
+ }
+ finalizeRecordingInternal(mRecordingFinalizer.getAndSet(null), uri);
+ }
+
+ /**
+ * Close this recording, as if calling {@link #finalizeRecording(Uri)} with parameter
+ * {@link Uri#EMPTY}.
+ *
+ *
This method is equivalent to calling {@link #finalizeRecording(Uri)} with parameter
+ * {@link Uri#EMPTY}.
+ *
+ *
Recording finalization can only occur once. Any subsequent calls to this method or
+ * {@link #finalizeRecording(Uri)} will throw an {@link AssertionError}.
+ *
+ *
Closing an uninitialized recording is no-op.
+ */
+ @Override
+ public void close() {
+ finalizeRecording(Uri.EMPTY);
+ }
+
+ @Override
+ @SuppressWarnings("GenericException") // super.finalize() throws Throwable
+ protected void finalize() throws Throwable {
+ try {
+ mCloseGuard.warnIfOpen();
+ Consumer finalizer = mRecordingFinalizer.getAndSet(null);
+ if (finalizer != null) {
+ finalizeRecordingInternal(finalizer, Uri.EMPTY);
+ }
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private void finalizeRecordingInternal(@Nullable Consumer finalizer,
+ @NonNull Uri uri) {
+ if (finalizer == null) {
+ throw new AssertionError(
+ "Recording " + this + " has already been finalized");
+ }
+ mCloseGuard.close();
+ finalizer.accept(uri);
+ }
+
+ private interface MediaMuxerSupplier {
+ @NonNull
+ MediaMuxer get(int muxerOutputFormat, @NonNull Consumer outputUriCreatedCallback)
+ throws IOException;
+ }
+
+ private interface AudioSourceSupplier {
+ @RequiresPermission(Manifest.permission.RECORD_AUDIO)
+ @NonNull
+ AudioSource get(@NonNull AudioSource.Settings settings,
+ @NonNull Executor audioSourceExecutor) throws AudioSourceAccessException;
+ }
+ }
+
+ /**
+ * Builder class for {@link Recorder} objects.
+ */
+ @RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+ public static final class Builder {
+
+ private final MediaSpec.Builder mMediaSpecBuilder;
+ private Executor mExecutor = null;
+ private EncoderFactory mVideoEncoderFactory = DEFAULT_ENCODER_FACTORY;
+ private EncoderFactory mAudioEncoderFactory = DEFAULT_ENCODER_FACTORY;
+
+ /**
+ * Constructor for {@code Recorder.Builder}.
+ *
+ * Creates a builder which is pre-populated with appropriate default configuration
+ * options.
+ */
+ public Builder() {
+ mMediaSpecBuilder = MediaSpec.builder();
+ }
+
+ /**
+ * Sets the {@link Executor} that runs the Recorder background task.
+ *
+ *
The executor is used to run the Recorder tasks, the audio encoding and the video
+ * encoding. For the best performance, it's recommended to be an {@link Executor} that is
+ * capable of running at least two tasks concurrently, such as a
+ * {@link java.util.concurrent.ThreadPoolExecutor} backed by 2 or more threads.
+ *
+ *
If not set, the Recorder will be run on the IO executor internally managed by CameraX.
+ */
+ @NonNull
+ public Builder setExecutor(@NonNull Executor executor) {
+ Preconditions.checkNotNull(executor, "The specified executor can't be null.");
+ mExecutor = executor;
+ return this;
+ }
+
+ // Usually users can use the CameraX predefined configuration for creating a recorder. We
+ // may see which options of MediaSpec to be exposed.
+
+ /**
+ * Sets the {@link QualitySelector} of this Recorder.
+ *
+ *
The provided quality selector is used to select the resolution of the recording
+ * depending on the resolutions supported by the camera and codec capabilities.
+ *
+ *
If no quality selector is provided, the default is
+ * {@link Recorder#DEFAULT_QUALITY_SELECTOR}.
+ *
+ *
{@link #setAspectRatio(int)} can be used with to specify the intended video aspect
+ * ratio.
+ *
+ * @see QualitySelector
+ * @see #setAspectRatio(int)
+ */
+ @NonNull
+ public Builder setQualitySelector(@NonNull QualitySelector qualitySelector) {
+ Preconditions.checkNotNull(qualitySelector,
+ "The specified quality selector can't be null.");
+ mMediaSpecBuilder.configureVideo(
+ builder -> builder.setQualitySelector(qualitySelector));
+ return this;
+ }
+
+ /**
+ * Sets the intended video encoding bitrate for recording.
+ *
+ *
The target video encoding bitrate attempts to keep the actual video encoding
+ * bitrate close to the requested {@code bitrate}. Bitrate may vary during a recording
+ * depending on the scene
+ * being recorded.
+ *
+ *
Additional checks will be performed on the requested {@code bitrate} to make sure the
+ * specified bitrate is applicable, and sometimes the passed bitrate will be changed
+ * internally to ensure the video recording can proceed smoothly based on the
+ * capabilities of the platform.
+ *
+ *
This API only affects the video stream and should not be considered the
+ * target for the entire recording. The audio stream's bitrate is not affected by this API.
+ *
+ *
If this method isn't called, an appropriate bitrate for normal video
+ * recording is selected by default. Only call this method if a custom bitrate is desired.
+ *
+ * @param bitrate the target video encoding bitrate in bits per second.
+ * @throws IllegalArgumentException if bitrate is 0 or less.
+ */
+ @NonNull
+ public Builder setTargetVideoEncodingBitRate(@IntRange(from = 1) int bitrate) {
+ if (bitrate <= 0) {
+ throw new IllegalArgumentException("The requested target bitrate " + bitrate
+ + " is not supported. Target bitrate must be greater than 0.");
+ }
+
+ mMediaSpecBuilder.configureVideo(
+ builder -> builder.setBitrate(new Range<>(bitrate, bitrate)));
+ return this;
+ }
+
+ /**
+ * Sets the video aspect ratio of this Recorder.
+ *
+ *
The final video resolution will be based on the input aspect ratio and the
+ * QualitySelector in {@link #setQualitySelector(QualitySelector)}. Both settings will be
+ * respected. For example, if the aspect ratio is 4:3 and the preferred quality in
+ * QualitySelector is HD, then a HD quality resolution with 4:3 aspect ratio such as
+ * 1280x960 or 960x720 will be used. CameraX will choose an appropriate one depending on
+ * the resolutions supported by the camera and the codec capabilities. With this setting,
+ * no other aspect ratios (such as 16:9) will be used, nor any other qualities (such as
+ * UHD, FHD and SD). If no resolution with the settings can be found, it will fail to
+ * bind VideoCapture. Therefore, a recommended way is to provide a flexible
+ * QualitySelector if there is no specific video quality requirement, such as the setting
+ * in {@link Recorder#DEFAULT_QUALITY_SELECTOR}.
+ *
+ *
The default value is {@link AspectRatio#RATIO_DEFAULT}. If no aspect ratio is set, the
+ * selected resolution will be based only on the QualitySelector.
+ *
+ * @param aspectRatio the aspect ratio. Possible values are {@link AspectRatio#RATIO_4_3}
+ * and {@link AspectRatio#RATIO_16_9}.
+ *
+ * @see #setQualitySelector(QualitySelector)
+ */
+ @NonNull
+ public Builder setAspectRatio(@AspectRatio.Ratio int aspectRatio) {
+ mMediaSpecBuilder.configureVideo(builder -> builder.setAspectRatio(aspectRatio));
+ return this;
+ }
+
+ /**
+ * Sets the audio source for recordings with audio enabled.
+ *
+ *
This will only set the source of audio for recordings, but audio must still be
+ * enabled on a per-recording basis with {@link PendingRecording#withAudioEnabled()}
+ * before starting the recording.
+ *
+ * @param source The audio source to use. One of {@link AudioSpec#SOURCE_AUTO} or
+ * {@link AudioSpec#SOURCE_CAMCORDER}. Default is
+ * {@link AudioSpec#SOURCE_AUTO}.
+ */
+ @NonNull
+ Builder setAudioSource(@AudioSpec.Source int source) {
+ mMediaSpecBuilder.configureAudio(builder -> builder.setSource(source));
+ return this;
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @NonNull
+ Builder setVideoEncoderFactory(@NonNull EncoderFactory videoEncoderFactory) {
+ mVideoEncoderFactory = videoEncoderFactory;
+ return this;
+ }
+
+ /** @hide */
+ @RestrictTo(RestrictTo.Scope.LIBRARY)
+ @NonNull
+ Builder setAudioEncoderFactory(@NonNull EncoderFactory audioEncoderFactory) {
+ mAudioEncoderFactory = audioEncoderFactory;
+ return this;
+ }
+
+ /**
+ * Builds the {@link Recorder} instance.
+ *
+ *
The {code build()} method can be called multiple times, generating a new
+ * {@link Recorder} instance each time. The returned instance is configured with the
+ * options set on this builder.
+ */
+ @NonNull
+ public Recorder build() {
+ return new Recorder(mExecutor, mMediaSpecBuilder.build(), mVideoEncoderFactory,
+ mAudioEncoderFactory);
+ }
+ }
+}
+
diff --git a/app/src/main/java/androidx/camera/video/originals/Recording.java b/app/src/main/java/androidx/camera/video/originals/Recording.java
new file mode 100644
index 0000000..68a1c53
--- /dev/null
+++ b/app/src/main/java/androidx/camera/video/originals/Recording.java
@@ -0,0 +1,217 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package androidx.camera.video;
+
+import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+import androidx.annotation.RestrictTo;
+import androidx.camera.core.impl.utils.CloseGuardHelper;
+import androidx.core.util.Consumer;
+import androidx.core.util.Preconditions;
+
+import java.util.concurrent.Executor;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * Provides controls for the currently active recording.
+ *
+ *
An active recording is created by starting a pending recording with
+ * {@link PendingRecording#start(Executor, Consumer)}. If there are no errors starting the
+ * recording, upon creation, an active recording will provide controls to pause, resume or stop a
+ * recording. If errors occur while starting the recording, the active recording will be
+ * instantiated in a {@link VideoRecordEvent.Finalize finalized} state, and all controls will be
+ * no-ops. The state of the recording can be observed by the video record event listener provided
+ * to {@link PendingRecording#start(Executor, Consumer)} when starting the recording.
+ *
+ *
Either {@link #stop()} or {@link #close()} can be called when it is desired to
+ * stop the recording. If {@link #stop()} or {@link #close()} are not called on this object
+ * before it is no longer referenced, it will be automatically stopped at a future point in time
+ * when the object is garbage collected, and no new recordings can be started from the same
+ * {@link Recorder} that generated the object until that occurs.
+ */
+@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
+public final class Recording implements AutoCloseable {
+
+ // Indicates the recording has been explicitly stopped by users.
+ private final AtomicBoolean mIsClosed = new AtomicBoolean(false);
+ private final Recorder mRecorder;
+ private final long mRecordingId;
+ private final OutputOptions mOutputOptions;
+ private final CloseGuardHelper mCloseGuard = CloseGuardHelper.create();
+
+ Recording(@NonNull Recorder recorder, long recordingId, @NonNull OutputOptions options,
+ boolean finalizedOnCreation) {
+ mRecorder = recorder;
+ mRecordingId = recordingId;
+ mOutputOptions = options;
+
+ if (finalizedOnCreation) {
+ mIsClosed.set(true);
+ } else {
+ mCloseGuard.open("stop");
+ }
+ }
+
+ /**
+ * Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID.
+ *
+ *
The recording ID is expected to be unique to the recorder that generated the pending
+ * recording.
+ */
+ @NonNull
+ static SucklessRecording from(@NonNull SucklessPendingRecording pendingRecording, long recordingId) {
+ Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
+ return new SucklessRecording(pendingRecording.getRecorder(),
+ recordingId,
+ pendingRecording.getOutputOptions(),
+ /*finalizedOnCreation=*/false);
+ }
+
+ /**
+ * Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID in a
+ * finalized state.
+ *
+ *
This can be used if there was an error setting up the active recording and it would not
+ * be able to be started.
+ *
+ *
The recording ID is expected to be unique to the recorder that generated the pending
+ * recording.
+ */
+ @NonNull
+ static SucklessRecording createFinalizedFrom(@NonNull SucklessPendingRecording pendingRecording,
+ long recordingId) {
+ Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
+ return new SucklessRecording(pendingRecording.getRecorder(),
+ recordingId,
+ pendingRecording.getOutputOptions(),
+ /*finalizedOnCreation=*/true);
+ }
+
+ @NonNull
+ OutputOptions getOutputOptions() {
+ return mOutputOptions;
+ }
+
+ /**
+ * Pauses the current recording if active.
+ *
+ *
Successful pausing of a recording will generate a {@link VideoRecordEvent.Pause} event
+ * which will be sent to the listener passed to
+ * {@link PendingRecording#start(Executor, Consumer)}.
+ *
+ *
If the recording has already been paused or has been finalized internally, this is a
+ * no-op.
+ *
+ * @throws IllegalStateException if the recording has been stopped with
+ * {@link #close()} or {@link #stop()}.
+ */
+ public void pause() {
+ if (mIsClosed.get()) {
+ throw new IllegalStateException("The recording has been stopped.");
+ }
+ mRecorder.pause(this);
+ }
+
+ /**
+ * Resumes the current recording if paused.
+ *
+ *
Successful resuming of a recording will generate a {@link VideoRecordEvent.Resume} event
+ * which will be sent to the listener passed to
+ * {@link PendingRecording#start(Executor, Consumer)}.
+ *
+ *
If the recording is active or has been finalized internally, this is a no-op.
+ *
+ * @throws IllegalStateException if the recording has been stopped with
+ * {@link #close()} or {@link #stop()}.
+ */
+ public void resume() {
+ if (mIsClosed.get()) {
+ throw new IllegalStateException("The recording has been stopped.");
+ }
+ mRecorder.resume(this);
+ }
+
+ /**
+ * Stops the recording, as if calling {@link #close()}.
+ *
+ *
This method is equivalent to calling {@link #close()}.
+ */
+ public void stop() {
+ close();
+ }
+
+ /**
+ * Close this recording.
+ *
+ *
Once {@link #stop()} or {@code close()} called, all methods for controlling the state of
+ * this recording besides {@link #stop()} or {@code close()} will throw an
+ * {@link IllegalStateException}.
+ *
+ *
Once an active recording has been closed, the next recording can be started with
+ * {@link PendingRecording#start(Executor, Consumer)}.
+ *
+ *
This method is idempotent; if the recording has already been closed or has been
+ * finalized internally, calling {@link #stop()} or {@code close()} is a no-op.
+ *
+ *
This method is invoked automatically on active recording instances managed by the {@code
+ * try-with-resources} statement.
+ */
+ @Override
+ public void close() {
+ mCloseGuard.close();
+ if (mIsClosed.getAndSet(true)) {
+ return;
+ }
+ mRecorder.stop(this);
+ }
+
+ @Override
+ @SuppressWarnings("GenericException") // super.finalize() throws Throwable
+ protected void finalize() throws Throwable {
+ try {
+ mCloseGuard.warnIfOpen();
+ stop();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /** Returns the recording ID which is unique to the recorder that generated this recording. */
+ long getRecordingId() {
+ return mRecordingId;
+ }
+
+ /**
+ * Returns whether the recording is closed.
+ *
+ *
The returned value does not reflect the state of the recording; it only reflects
+ * whether {@link #stop()} or {@link #close()} was called on this object.
+ *
+ *
The state of the recording should be checked from the listener passed to
+ * {@link PendingRecording#start(Executor, Consumer)}. Once the active recording is
+ * stopped, a {@link VideoRecordEvent.Finalize} event will be sent to the listener.
+ *
+ * @hide
+ */
+ @RestrictTo(LIBRARY_GROUP)
+ public boolean isClosed() {
+ return mIsClosed.get();
+ }
+}
+
diff --git a/app/src/main/java/sushi/hardcore/droidfs/CameraActivity.kt b/app/src/main/java/sushi/hardcore/droidfs/CameraActivity.kt
index bd96e35..5cc8b19 100644
--- a/app/src/main/java/sushi/hardcore/droidfs/CameraActivity.kt
+++ b/app/src/main/java/sushi/hardcore/droidfs/CameraActivity.kt
@@ -19,10 +19,24 @@ import android.widget.RelativeLayout
import android.widget.Toast
import androidx.annotation.RequiresApi
import androidx.camera.camera2.interop.Camera2CameraInfo
-import androidx.camera.core.*
+import androidx.camera.core.AspectRatio
+import androidx.camera.core.Camera
+import androidx.camera.core.CameraSelector
+import androidx.camera.core.FocusMeteringAction
+import androidx.camera.core.ImageCapture
+import androidx.camera.core.ImageCaptureException
+import androidx.camera.core.Preview
+import androidx.camera.core.UseCase
import androidx.camera.extensions.ExtensionMode
import androidx.camera.extensions.ExtensionsManager
import androidx.camera.lifecycle.ProcessCameraProvider
+import androidx.camera.video.MuxerOutputOptions
+import androidx.camera.video.Quality
+import androidx.camera.video.QualitySelector
+import androidx.camera.video.SucklessRecorder
+import androidx.camera.video.SucklessRecording
+import androidx.camera.video.VideoCapture
+import androidx.camera.video.VideoRecordEvent
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import androidx.lifecycle.lifecycleScope
@@ -32,8 +46,8 @@ import sushi.hardcore.droidfs.databinding.ActivityCameraBinding
import sushi.hardcore.droidfs.filesystems.EncryptedVolume
import sushi.hardcore.droidfs.util.IntentUtils
import sushi.hardcore.droidfs.util.PathUtils
+import sushi.hardcore.droidfs.video_recording.FFmpegMuxer
import sushi.hardcore.droidfs.video_recording.SeekableWriter
-import sushi.hardcore.droidfs.video_recording.VideoCapture
import sushi.hardcore.droidfs.widgets.CustomAlertDialogBuilder
import sushi.hardcore.droidfs.widgets.EditTextDialog
import java.io.ByteArrayInputStream
@@ -42,6 +56,7 @@ import java.text.SimpleDateFormat
import java.util.*
import java.util.concurrent.Executor
+@SuppressLint("RestrictedApi")
class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
companion object {
private const val CAMERA_PERMISSION_REQUEST_CODE = 0
@@ -73,11 +88,17 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
private lateinit var cameraSelector: CameraSelector
private val cameraPreview = Preview.Builder().build()
private var imageCapture: ImageCapture? = null
- private var videoCapture: VideoCapture? = null
+ private var videoCapture: VideoCapture? = null
+ private var videoRecorder: SucklessRecorder? = null
+ private var videoRecording: SucklessRecording? = null
private var camera: Camera? = null
private var resolutions: List? = null
private var currentResolutionIndex: Int = 0
private var currentResolution: Size? = null
+ private val aspectRatios = arrayOf(AspectRatio.RATIO_16_9, AspectRatio.RATIO_4_3)
+ private var currentAspectRatioIndex = 0
+ private var qualities: List? = null
+ private var currentQualityIndex = -1
private var captureMode = ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY
private var isBackCamera = true
private var isInVideoMode = false
@@ -118,50 +139,76 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
}
binding.imageCaptureMode.setOnClickListener {
- val currentIndex = if (captureMode == ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY) {
- 0
- } else {
- 1
- }
- CustomAlertDialogBuilder(this, theme)
- .setTitle(R.string.camera_optimization)
- .setSingleChoiceItems(arrayOf(getString(R.string.maximize_quality), getString(R.string.minimize_latency)), currentIndex) { dialog, which ->
- val resId: Int
- val newCaptureMode = if (which == 0) {
- resId = R.drawable.icon_high_quality
- ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY
- } else {
- resId = R.drawable.icon_speed
- ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY
- }
- if (newCaptureMode != captureMode) {
- captureMode = newCaptureMode
- binding.imageCaptureMode.setImageResource(resId)
- if (!isInVideoMode) {
- cameraProvider.unbind(imageCapture)
- refreshImageCapture()
- cameraProvider.bindToLifecycle(this, cameraSelector, imageCapture)
+ if (isInVideoMode) {
+ qualities?.let { qualities ->
+ val qualityNames = qualities.map {
+ when (it) {
+ Quality.UHD -> "UHD"
+ Quality.FHD -> "FHD"
+ Quality.HD -> "HD"
+ Quality.SD -> "SD"
+ else -> throw IllegalArgumentException("Invalid quality: $it")
}
- }
- dialog.dismiss()
+ }.toTypedArray()
+ CustomAlertDialogBuilder(this, theme)
+ .setTitle("Choose quality:")
+ .setSingleChoiceItems(qualityNames, currentQualityIndex) { dialog, which ->
+ currentQualityIndex = which
+ rebindUseCases()
+ dialog.dismiss()
+ }
+ .setNegativeButton(R.string.cancel, null)
+ .show()
}
- .setNegativeButton(R.string.cancel, null)
- .show()
- }
- binding.imageRatio.setOnClickListener {
- resolutions?.let {
+ } else {
CustomAlertDialogBuilder(this, theme)
- .setTitle(R.string.choose_resolution)
- .setSingleChoiceItems(it.map { size -> size.toString() }.toTypedArray(), currentResolutionIndex) { dialog, which ->
- currentResolution = resolutions!![which]
- currentResolutionIndex = which
- setupCamera()
+ .setTitle(R.string.camera_optimization)
+ .setSingleChoiceItems(
+ arrayOf(getString(R.string.maximize_quality), getString(R.string.minimize_latency)),
+ if (captureMode == ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY) 0 else 1
+ ) { dialog, which ->
+ val newCaptureMode = if (which == 0) {
+ ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY
+ } else {
+ ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY
+ }
+ if (newCaptureMode != captureMode) {
+ captureMode = newCaptureMode
+ setCaptureModeIcon()
+ rebindUseCases()
+ }
dialog.dismiss()
}
.setNegativeButton(R.string.cancel, null)
.show()
}
}
+ binding.imageRatio.setOnClickListener {
+ if (isInVideoMode) {
+ CustomAlertDialogBuilder(this, theme)
+ .setTitle("Aspect ratio:")
+ .setSingleChoiceItems(arrayOf("16:9", "4:3"), currentAspectRatioIndex) { dialog, which ->
+ currentAspectRatioIndex = which
+ rebindUseCases()
+ dialog.dismiss()
+ }
+ .setNegativeButton(R.string.cancel, null)
+ .show()
+ } else {
+ resolutions?.let {
+ CustomAlertDialogBuilder(this, theme)
+ .setTitle(R.string.choose_resolution)
+ .setSingleChoiceItems(it.map { size -> size.toString() }.toTypedArray(), currentResolutionIndex) { dialog, which ->
+ currentResolution = resolutions!![which]
+ currentResolutionIndex = which
+ rebindUseCases()
+ dialog.dismiss()
+ }
+ .setNegativeButton(R.string.cancel, null)
+ .show()
+ }
+ }
+ }
binding.imageTimer.setOnClickListener {
with (EditTextDialog(this, R.string.enter_timer_duration) {
try {
@@ -207,7 +254,7 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
}
binding.imageModeSwitch.setOnClickListener {
isInVideoMode = !isInVideoMode
- setupCamera()
+ rebindUseCases()
binding.imageFlash.setImageResource(if (isInVideoMode) {
binding.recordVideoButton.visibility = View.VISIBLE
binding.takePhotoButton.visibility = View.GONE
@@ -219,6 +266,7 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
binding.imageModeSwitch.setImageDrawable(ContextCompat.getDrawable(this, R.drawable.icon_photo)?.mutate()?.also {
it.setTint(ContextCompat.getColor(this, R.color.neutralIconTint))
})
+ setCaptureModeIcon()
imageCapture?.flashMode = ImageCapture.FLASH_MODE_OFF
R.drawable.icon_flash_off
} else {
@@ -243,6 +291,7 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
true
}
resolutions = null
+ qualities = null
setupCamera()
}
binding.takePhotoButton.onClick = ::onClickTakePhoto
@@ -299,6 +348,18 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
}
}
+ private fun setCaptureModeIcon() {
+ binding.imageCaptureMode.setImageResource(if (isInVideoMode) {
+ R.drawable.icon_high_quality
+ } else {
+ if (captureMode == ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY) {
+ R.drawable.icon_speed
+ } else {
+ R.drawable.icon_high_quality
+ }
+ })
+ }
+
private fun adaptPreviewSize(resolution: Size) {
val screenWidth = resources.displayMetrics.widthPixels
val screenHeight = resources.displayMetrics.heightPixels
@@ -327,43 +388,49 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
}
private fun refreshVideoCapture() {
- videoCapture = VideoCapture.Builder().apply {
- currentResolution?.let {
- setTargetResolution(it)
- }
- }.build()
+ val recorderBuilder = SucklessRecorder.Builder()
+ .setExecutor(executor)
+ .setAspectRatio(aspectRatios[currentAspectRatioIndex])
+ if (currentQualityIndex != -1) {
+ recorderBuilder.setQualitySelector(QualitySelector.from(qualities!![currentQualityIndex]))
+ }
+ videoRecorder = recorderBuilder.build()
+ videoCapture = VideoCapture.withOutput(videoRecorder!!)
+ }
+
+ private fun rebindUseCases(): UseCase {
+ cameraProvider.unbindAll()
+ val currentUseCase = (if (isInVideoMode) {
+ refreshVideoCapture()
+ camera = cameraProvider.bindToLifecycle(this, cameraSelector, cameraPreview, videoCapture)
+ if (qualities == null) {
+ qualities = QualitySelector.getSupportedQualities(camera!!.cameraInfo)
+ }
+ videoCapture
+ } else {
+ refreshImageCapture()
+ camera = cameraProvider.bindToLifecycle(this, cameraSelector, cameraPreview, imageCapture)
+ if (resolutions == null) {
+ val info = Camera2CameraInfo.from(camera!!.cameraInfo)
+ val cameraManager = getSystemService(Context.CAMERA_SERVICE) as CameraManager
+ val characteristics = cameraManager.getCameraCharacteristics(info.cameraId)
+ characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)?.let { streamConfigurationMap ->
+ resolutions = streamConfigurationMap.getOutputSizes(imageCapture!!.imageFormat).map { it.swap() }
+ }
+ }
+ imageCapture
+ })!!
+ adaptPreviewSize(currentUseCase.attachedSurfaceResolution!!.swap())
+ return currentUseCase
}
- @SuppressLint("RestrictedApi")
private fun setupCamera() {
if (permissionsGranted && ::extensionsManager.isInitialized && ::cameraProvider.isInitialized) {
cameraSelector = if (isBackCamera){ CameraSelector.DEFAULT_BACK_CAMERA } else { CameraSelector.DEFAULT_FRONT_CAMERA }
if (extensionsManager.isExtensionAvailable(cameraSelector, ExtensionMode.AUTO)) {
cameraSelector = extensionsManager.getExtensionEnabledCameraSelector(cameraSelector, ExtensionMode.AUTO)
}
-
- cameraProvider.unbindAll()
-
- val currentUseCase = (if (isInVideoMode) {
- refreshVideoCapture()
- camera = cameraProvider.bindToLifecycle(this, cameraSelector, cameraPreview, videoCapture)
- videoCapture
- } else {
- refreshImageCapture()
- camera = cameraProvider.bindToLifecycle(this, cameraSelector, cameraPreview, imageCapture)
- imageCapture
- })!!
-
- adaptPreviewSize(currentResolution ?: currentUseCase.attachedSurfaceResolution!!.swap())
-
- if (resolutions == null) {
- val info = Camera2CameraInfo.from(camera!!.cameraInfo)
- val cameraManager = getSystemService(Context.CAMERA_SERVICE) as CameraManager
- val characteristics = cameraManager.getCameraCharacteristics(info.cameraId)
- characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)?.let { streamConfigurationMap ->
- resolutions = streamConfigurationMap.getOutputSizes(currentUseCase.imageFormat).map { it.swap() }
- }
- }
+ rebindUseCases()
}
}
@@ -431,36 +498,60 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
@SuppressLint("MissingPermission")
private fun onClickRecordVideo() {
if (isRecording) {
- videoCapture?.stopRecording()
- isRecording = false
+ videoRecording?.stop()
} else if (!isWaitingForTimer) {
val path = getOutputPath(true)
startTimerThen {
- val fileHandle = encryptedVolume.openFile(path)
- videoCapture?.startRecording(VideoCapture.OutputFileOptions(object : SeekableWriter {
- var offset = 0L
- override fun write(byteArray: ByteArray) {
- offset += encryptedVolume.write(fileHandle, offset, byteArray, 0, byteArray.size.toLong())
+ var withAudio = true
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
+ withAudio = false
}
- override fun seek(offset: Long) {
- this.offset = offset
+ }
+ videoRecording = videoRecorder?.prepareRecording(
+ this,
+ MuxerOutputOptions(
+ FFmpegMuxer(object : SeekableWriter {
+ private val fileHandle = encryptedVolume.openFile(path)
+ private var offset = 0L
+
+ override fun close() {
+ encryptedVolume.closeFile(fileHandle)
+ }
+
+ override fun seek(offset: Long) {
+ this.offset = offset
+ }
+
+ override fun write(buffer: ByteArray) {
+ offset += encryptedVolume.write(fileHandle, offset, buffer, 0, buffer.size.toLong())
+ }
+ })
+ )
+ )?.apply {
+ if (withAudio) {
+ withAudioEnabled()
}
- override fun close() {
- encryptedVolume.closeFile(fileHandle)
+ }?.start(executor) {
+ when (it) {
+ is VideoRecordEvent.Start -> {
+ binding.recordVideoButton.setImageResource(R.drawable.stop_recording_video_button)
+ isRecording = true
+ }
+ is VideoRecordEvent.Finalize -> {
+ if (it.hasError()) {
+ it.cause?.printStackTrace()
+ Toast.makeText(applicationContext, it.cause?.message, Toast.LENGTH_SHORT).show()
+ videoRecording?.close()
+ videoRecording = null
+ } else {
+ Toast.makeText(applicationContext, getString(R.string.video_save_success, path), Toast.LENGTH_SHORT).show()
+ }
+ binding.recordVideoButton.setImageResource(R.drawable.record_video_button)
+ isRecording = false
+ }
}
- }), executor, object : VideoCapture.OnVideoSavedCallback {
- override fun onVideoSaved() {
- Toast.makeText(applicationContext, getString(R.string.video_save_success, path), Toast.LENGTH_SHORT).show()
- binding.recordVideoButton.setImageResource(R.drawable.record_video_button)
- }
- override fun onError(videoCaptureError: Int, message: String, cause: Throwable?) {
- Toast.makeText(applicationContext, message, Toast.LENGTH_SHORT).show()
- cause?.printStackTrace()
- binding.recordVideoButton.setImageResource(R.drawable.record_video_button)
- }
- })
- binding.recordVideoButton.setImageResource(R.drawable.stop_recording_video_button)
- isRecording = true
+ }
}
}
}
diff --git a/app/src/main/java/sushi/hardcore/droidfs/video_recording/MediaMuxer.kt b/app/src/main/java/sushi/hardcore/droidfs/video_recording/FFmpegMuxer.kt
similarity index 50%
rename from app/src/main/java/sushi/hardcore/droidfs/video_recording/MediaMuxer.kt
rename to app/src/main/java/sushi/hardcore/droidfs/video_recording/FFmpegMuxer.kt
index 2cd6d35..940a512 100644
--- a/app/src/main/java/sushi/hardcore/droidfs/video_recording/MediaMuxer.kt
+++ b/app/src/main/java/sushi/hardcore/droidfs/video_recording/FFmpegMuxer.kt
@@ -2,9 +2,10 @@ package sushi.hardcore.droidfs.video_recording
import android.media.MediaCodec
import android.media.MediaFormat
+import androidx.camera.video.MediaMuxer
import java.nio.ByteBuffer
-class MediaMuxer(val writer: SeekableWriter) {
+class FFmpegMuxer(val writer: SeekableWriter): MediaMuxer {
external fun allocContext(): Long
external fun addVideoTrack(formatContext: Long, bitrate: Int, width: Int, height: Int, orientationHint: Int): Int
external fun addAudioTrack(formatContext: Long, bitrate: Int, sampleRate: Int, channelCount: Int): Int
@@ -13,75 +14,70 @@ class MediaMuxer(val writer: SeekableWriter) {
external fun writeTrailer(formatContext: Long)
external fun release(formatContext: Long)
- companion object {
- const val VIDEO_TRACK_INDEX = 0
- const val AUDIO_TRACK_INDEX = 1
- }
-
var formatContext: Long?
- var orientationHint = 0
- var realVideoTrackIndex: Int? = null
- var audioFrameSize: Int? = null
- var firstPts: Long? = null
- private var audioPts = 0L
+ var orientation = 0
+ private var videoTrackIndex: Int? = null
+ private var audioTrackIndex: Int? = null
+ private var firstPts: Long? = null
init {
System.loadLibrary("mux")
formatContext = allocContext()
}
- fun writeSampleData(trackIndex: Int, buffer: ByteBuffer, bufferInfo: MediaCodec.BufferInfo) {
+ override fun writeSampleData(trackIndex: Int, buffer: ByteBuffer, bufferInfo: MediaCodec.BufferInfo) {
val byteArray = ByteArray(bufferInfo.size)
buffer.get(byteArray)
if (firstPts == null) {
firstPts = bufferInfo.presentationTimeUs
}
- if (trackIndex == AUDIO_TRACK_INDEX) {
- writePacket(formatContext!!, byteArray, audioPts, -1, false)
- audioPts += audioFrameSize!!
- } else {
- writePacket(
- formatContext!!, byteArray, bufferInfo.presentationTimeUs - firstPts!!, realVideoTrackIndex!!,
- bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME != 0
- )
- }
+ writePacket(
+ formatContext!!, byteArray, bufferInfo.presentationTimeUs - firstPts!!, trackIndex,
+ bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME != 0
+ )
}
- fun addTrack(format: MediaFormat): Int {
- val mime = format.getString("mime")!!.split('/')
- val bitrate = format.getInteger("bitrate")
+ override fun addTrack(mediaFormat: MediaFormat): Int {
+ val mime = mediaFormat.getString("mime")!!.split('/')
+ val bitrate = mediaFormat.getInteger("bitrate")
return if (mime[0] == "audio") {
- audioFrameSize = addAudioTrack(
+ addAudioTrack(
formatContext!!,
bitrate,
- format.getInteger("sample-rate"),
- format.getInteger("channel-count")
- )
- AUDIO_TRACK_INDEX
+ mediaFormat.getInteger("sample-rate"),
+ mediaFormat.getInteger("channel-count")
+ ).also {
+ audioTrackIndex = it
+ }
} else {
- realVideoTrackIndex = addVideoTrack(
+ addVideoTrack(
formatContext!!,
bitrate,
- format.getInteger("width"),
- format.getInteger("height"),
- orientationHint
- )
- VIDEO_TRACK_INDEX
+ mediaFormat.getInteger("width"),
+ mediaFormat.getInteger("height"),
+ orientation
+ ).also {
+ videoTrackIndex = it
+ }
}
}
- fun start() {
+ override fun start() {
writeHeaders(formatContext!!)
}
- fun stop() {
+ override fun stop() {
writeTrailer(formatContext!!)
}
- fun release() {
+
+ override fun setOrientationHint(degree: Int) {
+ orientation = degree
+ }
+
+ override fun release() {
writer.close()
release(formatContext!!)
firstPts = null
- audioPts = 0
formatContext = null
}
diff --git a/app/src/main/java/sushi/hardcore/droidfs/video_recording/SeekableWriter.kt b/app/src/main/java/sushi/hardcore/droidfs/video_recording/SeekableWriter.kt
index e0b1c4f..8159529 100644
--- a/app/src/main/java/sushi/hardcore/droidfs/video_recording/SeekableWriter.kt
+++ b/app/src/main/java/sushi/hardcore/droidfs/video_recording/SeekableWriter.kt
@@ -1,7 +1,7 @@
package sushi.hardcore.droidfs.video_recording
interface SeekableWriter {
- fun write(byteArray: ByteArray)
+ fun write(buffer: ByteArray)
fun seek(offset: Long)
fun close()
}
\ No newline at end of file
diff --git a/app/src/main/java/sushi/hardcore/droidfs/video_recording/VideoCapture.java b/app/src/main/java/sushi/hardcore/droidfs/video_recording/VideoCapture.java
deleted file mode 100644
index 94c44ec..0000000
--- a/app/src/main/java/sushi/hardcore/droidfs/video_recording/VideoCapture.java
+++ /dev/null
@@ -1,1778 +0,0 @@
-/*
- * Copyright (C) 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package sushi.hardcore.droidfs.video_recording;
-
-import static androidx.camera.core.impl.ImageOutputConfig.OPTION_DEFAULT_RESOLUTION;
-import static androidx.camera.core.impl.ImageOutputConfig.OPTION_MAX_RESOLUTION;
-import static androidx.camera.core.impl.ImageOutputConfig.OPTION_SUPPORTED_RESOLUTIONS;
-import static androidx.camera.core.impl.ImageOutputConfig.OPTION_TARGET_ASPECT_RATIO;
-import static androidx.camera.core.impl.ImageOutputConfig.OPTION_TARGET_RESOLUTION;
-import static androidx.camera.core.impl.ImageOutputConfig.OPTION_TARGET_ROTATION;
-import static androidx.camera.core.impl.UseCaseConfig.OPTION_CAMERA_SELECTOR;
-import static androidx.camera.core.impl.UseCaseConfig.OPTION_CAPTURE_CONFIG_UNPACKER;
-import static androidx.camera.core.impl.UseCaseConfig.OPTION_DEFAULT_CAPTURE_CONFIG;
-import static androidx.camera.core.impl.UseCaseConfig.OPTION_DEFAULT_SESSION_CONFIG;
-import static androidx.camera.core.impl.UseCaseConfig.OPTION_SESSION_CONFIG_UNPACKER;
-import static androidx.camera.core.impl.UseCaseConfig.OPTION_SURFACE_OCCUPANCY_PRIORITY;
-import static androidx.camera.core.impl.UseCaseConfig.OPTION_ZSL_DISABLED;
-import static androidx.camera.core.impl.VideoCaptureConfig.OPTION_AUDIO_BIT_RATE;
-import static androidx.camera.core.impl.VideoCaptureConfig.OPTION_AUDIO_CHANNEL_COUNT;
-import static androidx.camera.core.impl.VideoCaptureConfig.OPTION_AUDIO_MIN_BUFFER_SIZE;
-import static androidx.camera.core.impl.VideoCaptureConfig.OPTION_AUDIO_SAMPLE_RATE;
-import static androidx.camera.core.impl.VideoCaptureConfig.OPTION_BIT_RATE;
-import static androidx.camera.core.impl.VideoCaptureConfig.OPTION_INTRA_FRAME_INTERVAL;
-import static androidx.camera.core.impl.VideoCaptureConfig.OPTION_VIDEO_FRAME_RATE;
-import static androidx.camera.core.internal.TargetConfig.OPTION_TARGET_CLASS;
-import static androidx.camera.core.internal.TargetConfig.OPTION_TARGET_NAME;
-import static androidx.camera.core.internal.ThreadConfig.OPTION_BACKGROUND_EXECUTOR;
-import static androidx.camera.core.internal.UseCaseEventConfig.OPTION_USE_CASE_EVENT_CALLBACK;
-
-import android.Manifest;
-import android.media.AudioFormat;
-import android.media.AudioRecord;
-import android.media.CamcorderProfile;
-import android.media.MediaCodec;
-import android.media.MediaCodec.BufferInfo;
-import android.media.MediaCodecInfo;
-import android.media.MediaCodecInfo.CodecCapabilities;
-import android.media.MediaFormat;
-import android.media.MediaRecorder.AudioSource;
-import android.os.Bundle;
-import android.os.Handler;
-import android.os.HandlerThread;
-import android.os.Looper;
-import android.os.ParcelFileDescriptor;
-import android.provider.MediaStore;
-import android.util.Pair;
-import android.util.Size;
-import android.view.Display;
-import android.view.Surface;
-
-import androidx.annotation.DoNotInline;
-import androidx.annotation.GuardedBy;
-import androidx.annotation.IntDef;
-import androidx.annotation.NonNull;
-import androidx.annotation.Nullable;
-import androidx.annotation.RequiresApi;
-import androidx.annotation.RequiresPermission;
-import androidx.annotation.RestrictTo;
-import androidx.annotation.RestrictTo.Scope;
-import androidx.annotation.UiThread;
-import androidx.annotation.VisibleForTesting;
-import androidx.camera.core.AspectRatio;
-import androidx.camera.core.CameraSelector;
-import androidx.camera.core.CameraXThreads;
-import androidx.camera.core.ImageCapture;
-import androidx.camera.core.Logger;
-import androidx.camera.core.UseCase;
-import androidx.camera.core.impl.CameraInternal;
-import androidx.camera.core.impl.CaptureConfig;
-import androidx.camera.core.impl.Config;
-import androidx.camera.core.impl.ConfigProvider;
-import androidx.camera.core.impl.DeferrableSurface;
-import androidx.camera.core.impl.ImageOutputConfig;
-import androidx.camera.core.impl.ImageOutputConfig.RotationValue;
-import androidx.camera.core.impl.ImmediateSurface;
-import androidx.camera.core.impl.MutableConfig;
-import androidx.camera.core.impl.MutableOptionsBundle;
-import androidx.camera.core.impl.OptionsBundle;
-import androidx.camera.core.impl.SessionConfig;
-import androidx.camera.core.impl.UseCaseConfig;
-import androidx.camera.core.impl.UseCaseConfigFactory;
-import androidx.camera.core.impl.utils.executor.CameraXExecutors;
-import androidx.camera.core.internal.ThreadConfig;
-import androidx.concurrent.futures.CallbackToFutureAdapter;
-import androidx.concurrent.futures.CallbackToFutureAdapter.Completer;
-import androidx.core.util.Preconditions;
-
-import com.google.common.util.concurrent.ListenableFuture;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.nio.ByteBuffer;
-import java.util.List;
-import java.util.UUID;
-import java.util.concurrent.Executor;
-import java.util.concurrent.RejectedExecutionException;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicReference;
-
-/**
- * A use case for taking a video.
- *
- * This class is designed for simple video capturing. It gives basic configuration of the
- * recorded video such as resolution and file format.
- *
- * @hide
- */
-@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-@RestrictTo(Scope.LIBRARY_GROUP)
-public final class VideoCapture extends UseCase {
-
- ////////////////////////////////////////////////////////////////////////////////////////////
- // [UseCase lifetime constant] - Stays constant for the lifetime of the UseCase. Which means
- // they could be created in the constructor.
- ////////////////////////////////////////////////////////////////////////////////////////////
-
- /**
- * An unknown error occurred.
- *
- *
See message parameter in onError callback or log for more details.
- */
- public static final int ERROR_UNKNOWN = 0;
- /**
- * An error occurred with encoder state, either when trying to change state or when an
- * unexpected state change occurred.
- */
- public static final int ERROR_ENCODER = 1;
- /** An error with muxer state such as during creation or when stopping. */
- public static final int ERROR_MUXER = 2;
- /**
- * An error indicating start recording was called when video recording is still in progress.
- */
- public static final int ERROR_RECORDING_IN_PROGRESS = 3;
- /**
- * An error indicating the file saving operations.
- */
- public static final int ERROR_FILE_IO = 4;
- /**
- * An error indicating this VideoCapture is not bound to a camera.
- */
- public static final int ERROR_INVALID_CAMERA = 5;
- /**
- * An error indicating the video file is too short.
- *
The output file will be deleted if the OutputFileOptions is backed by File or uri.
- */
- public static final int ERROR_RECORDING_TOO_SHORT = 6;
-
- /**
- * Provides a static configuration with implementation-agnostic options.
- *
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- public static final Defaults DEFAULT_CONFIG = new Defaults();
- private static final String TAG = "VideoCapture";
- /** Amount of time to wait for dequeuing a buffer from the videoEncoder. */
- private static final int DEQUE_TIMEOUT_USEC = 10000;
- /** Android preferred mime type for AVC video. */
- private static final String VIDEO_MIME_TYPE = "video/avc";
- private static final String AUDIO_MIME_TYPE = "audio/mp4a-latm";
- /** Camcorder profiles quality list */
- private static final int[] CamcorderQuality = {
- CamcorderProfile.QUALITY_2160P,
- CamcorderProfile.QUALITY_1080P,
- CamcorderProfile.QUALITY_720P,
- CamcorderProfile.QUALITY_480P
- };
-
- private final BufferInfo mVideoBufferInfo = new BufferInfo();
- private final Object mMuxerLock = new Object();
- private final AtomicBoolean mEndOfVideoStreamSignal = new AtomicBoolean(true);
- private final AtomicBoolean mEndOfAudioStreamSignal = new AtomicBoolean(true);
- private final AtomicBoolean mEndOfAudioVideoSignal = new AtomicBoolean(true);
- private final BufferInfo mAudioBufferInfo = new BufferInfo();
- /** For record the first sample written time. */
- @VisibleForTesting(otherwise = VisibleForTesting.PRIVATE)
- public final AtomicBoolean mIsFirstVideoKeyFrameWrite = new AtomicBoolean(false);
- @VisibleForTesting(otherwise = VisibleForTesting.PRIVATE)
- public final AtomicBoolean mIsFirstAudioSampleWrite = new AtomicBoolean(false);
-
- ////////////////////////////////////////////////////////////////////////////////////////////
- // [UseCase attached constant] - Is only valid when the UseCase is attached to a camera.
- ////////////////////////////////////////////////////////////////////////////////////////////
-
- /** Thread on which all encoding occurs. */
- private HandlerThread mVideoHandlerThread;
- private Handler mVideoHandler;
- /** Thread on which audio encoding occurs. */
- private HandlerThread mAudioHandlerThread;
- private Handler mAudioHandler;
-
- @NonNull
- MediaCodec mVideoEncoder;
- @NonNull
- private MediaCodec mAudioEncoder;
- @Nullable
- private ListenableFuture mRecordingFuture = null;
- @NonNull
- private SessionConfig.Builder mSessionConfigBuilder = new SessionConfig.Builder();
-
- ////////////////////////////////////////////////////////////////////////////////////////////
- // [UseCase attached dynamic] - Can change but is only available when the UseCase is attached.
- ////////////////////////////////////////////////////////////////////////////////////////////
-
- /** The muxer that writes the encoding data to file. */
- @GuardedBy("mMuxerLock")
- private MediaMuxer mMuxer;
- private final AtomicBoolean mMuxerStarted = new AtomicBoolean(false);
- /** The index of the video track used by the muxer. */
- @GuardedBy("mMuxerLock")
- private int mVideoTrackIndex;
- /** The index of the audio track used by the muxer. */
- @GuardedBy("mMuxerLock")
- private int mAudioTrackIndex;
- /** Surface the camera writes to, which the videoEncoder uses as input. */
- Surface mCameraSurface;
-
- /** audio raw data */
- @Nullable
- private volatile AudioRecord mAudioRecorder;
- private volatile int mAudioBufferSize;
- private volatile boolean mIsRecording = false;
- private int mAudioChannelCount;
- private int mAudioSampleRate;
- private int mAudioBitRate;
- private DeferrableSurface mDeferrableSurface;
- @SuppressWarnings("WeakerAccess") /* synthetic accessor */
- private volatile ParcelFileDescriptor mParcelFileDescriptor;
- private final AtomicBoolean mIsAudioEnabled = new AtomicBoolean(true);
-
- private VideoEncoderInitStatus mVideoEncoderInitStatus =
- VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_UNINITIALIZED;
- @Nullable
- private Throwable mVideoEncoderErrorMessage;
-
- /**
- * Creates a new video capture use case from the given configuration.
- *
- * @param config for this use case instance
- */
- VideoCapture(@NonNull VideoCaptureConfig config) {
- super(config);
- }
-
- /** Creates a {@link MediaFormat} using parameters from the configuration */
- private static MediaFormat createVideoMediaFormat(VideoCaptureConfig config, Size resolution) {
- MediaFormat format =
- MediaFormat.createVideoFormat(
- VIDEO_MIME_TYPE, resolution.getWidth(), resolution.getHeight());
- format.setInteger(MediaFormat.KEY_COLOR_FORMAT, CodecCapabilities.COLOR_FormatSurface);
- format.setInteger(MediaFormat.KEY_BIT_RATE, config.getBitRate());
- format.setInteger(MediaFormat.KEY_FRAME_RATE, config.getVideoFrameRate());
- format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, config.getIFrameInterval());
-
- return format;
- }
-
- /**
- * {@inheritDoc}
- *
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @Nullable
- public UseCaseConfig> getDefaultConfig(boolean applyDefaultConfig,
- @NonNull UseCaseConfigFactory factory) {
- Config captureConfig = factory.getConfig(
- UseCaseConfigFactory.CaptureType.VIDEO_CAPTURE,
- ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY);
-
- if (applyDefaultConfig) {
- captureConfig = Config.mergeConfigs(captureConfig, DEFAULT_CONFIG.getConfig());
- }
-
- return captureConfig == null ? null :
- getUseCaseConfigBuilder(captureConfig).getUseCaseConfig();
- }
-
- /**
- * {@inheritDoc}
- *
- * @hide
- */
- @SuppressWarnings("WrongConstant")
- @Override
- @RestrictTo(Scope.LIBRARY_GROUP)
- public void onAttached() {
- mVideoHandlerThread = new HandlerThread(CameraXThreads.TAG + "video encoding thread");
- mAudioHandlerThread = new HandlerThread(CameraXThreads.TAG + "audio encoding thread");
-
- // video thread start
- mVideoHandlerThread.start();
- mVideoHandler = new Handler(mVideoHandlerThread.getLooper());
-
- // audio thread start
- mAudioHandlerThread.start();
- mAudioHandler = new Handler(mAudioHandlerThread.getLooper());
- }
-
- /**
- * {@inheritDoc}
- *
- * @hide
- */
- @Override
- @RequiresPermission(Manifest.permission.RECORD_AUDIO)
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- protected Size onSuggestedResolutionUpdated(@NonNull Size suggestedResolution) {
- if (mCameraSurface != null) {
- mVideoEncoder.stop();
- mVideoEncoder.release();
- mAudioEncoder.stop();
- mAudioEncoder.release();
- releaseCameraSurface(false);
- }
-
- try {
- mVideoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
- mAudioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE);
- } catch (IOException e) {
- throw new IllegalStateException("Unable to create MediaCodec due to: " + e.getCause());
- }
-
- setupEncoder(getCameraId(), suggestedResolution);
- // VideoCapture has to be active to apply SessionConfig's template type.
- notifyActive();
- return suggestedResolution;
- }
-
- /**
- * Starts recording video, which continues until {@link VideoCapture#stopRecording()} is
- * called.
- *
- * StartRecording() is asynchronous. User needs to check if any error occurs by setting the
- * {@link OnVideoSavedCallback#onError(int, String, Throwable)}.
- *
- * @param outputFileOptions Location to save the video capture
- * @param executor The executor in which the callback methods will be run.
- * @param callback Callback for when the recorded video saving completion or failure.
- */
- @SuppressWarnings("ObjectToString")
- @RequiresPermission(Manifest.permission.RECORD_AUDIO)
- public void startRecording(
- @NonNull OutputFileOptions outputFileOptions, @NonNull Executor executor,
- @NonNull OnVideoSavedCallback callback) {
- if (Looper.getMainLooper() != Looper.myLooper()) {
- CameraXExecutors.mainThreadExecutor().execute(() -> startRecording(outputFileOptions,
- executor, callback));
- return;
- }
- Logger.i(TAG, "startRecording");
- mIsFirstVideoKeyFrameWrite.set(false);
- mIsFirstAudioSampleWrite.set(false);
-
- OnVideoSavedCallback postListener = new VideoSavedListenerWrapper(executor, callback);
-
- CameraInternal attachedCamera = getCamera();
- if (attachedCamera == null) {
- // Not bound. Notify callback.
- postListener.onError(ERROR_INVALID_CAMERA,
- "Not bound to a Camera [" + VideoCapture.this + "]", null);
- return;
- }
-
- // Check video encoder initialization status, if there is any error happened
- // return error callback directly.
- if (mVideoEncoderInitStatus
- == VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_INSUFFICIENT_RESOURCE
- || mVideoEncoderInitStatus
- == VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_INITIALIZED_FAILED
- || mVideoEncoderInitStatus
- == VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_RESOURCE_RECLAIMED) {
- postListener.onError(ERROR_ENCODER, "Video encoder initialization failed before start"
- + " recording ", mVideoEncoderErrorMessage);
- return;
- }
-
- if (!mEndOfAudioVideoSignal.get()) {
- postListener.onError(
- ERROR_RECORDING_IN_PROGRESS, "It is still in video recording!",
- null);
- return;
- }
-
- if (mIsAudioEnabled.get()) {
- try {
- // Audio input start
- if (mAudioRecorder.getState() == AudioRecord.STATE_INITIALIZED) {
- mAudioRecorder.startRecording();
- }
- } catch (IllegalStateException e) {
- // Disable the audio if the audio input cannot start. And Continue the recording
- // without audio.
- Logger.i(TAG,
- "AudioRecorder cannot start recording, disable audio." + e.getMessage());
- mIsAudioEnabled.set(false);
- releaseAudioInputResource();
- }
-
- // Gets the AudioRecorder's state
- if (mAudioRecorder.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
- Logger.i(TAG,
- "AudioRecorder startRecording failed - incorrect state: "
- + mAudioRecorder.getRecordingState());
- mIsAudioEnabled.set(false);
- releaseAudioInputResource();
- }
- }
-
- AtomicReference> recordingCompleterRef = new AtomicReference<>();
- mRecordingFuture = CallbackToFutureAdapter.getFuture(
- completer -> {
- recordingCompleterRef.set(completer);
- return "startRecording";
- });
- Completer recordingCompleter =
- Preconditions.checkNotNull(recordingCompleterRef.get());
-
- mRecordingFuture.addListener(() -> {
- mRecordingFuture = null;
- // Do the setup of the videoEncoder at the end of video recording instead of at the
- // start of recording because it requires attaching a new Surface. This causes a
- // glitch so we don't want that to incur latency at the start of capture.
- if (getCamera() != null) {
- // Ensure the use case is bound. Asynchronous stopping procedure may occur after
- // the use case is unbound, i.e. after onDetached().
- setupEncoder(getCameraId(), getAttachedSurfaceResolution());
- notifyReset();
- }
- }, CameraXExecutors.mainThreadExecutor());
-
- try {
- // video encoder start
- Logger.i(TAG, "videoEncoder start");
- mVideoEncoder.start();
-
- // audio encoder start
- if (mIsAudioEnabled.get()) {
- Logger.i(TAG, "audioEncoder start");
- mAudioEncoder.start();
- }
- } catch (IllegalStateException e) {
- recordingCompleter.set(null);
- postListener.onError(ERROR_ENCODER, "Audio/Video encoder start fail", e);
- return;
- }
-
- synchronized (mMuxerLock) {
- mMuxer = new MediaMuxer(outputFileOptions.mWriter);
- mMuxer.setOrientationHint(getRelativeRotation(attachedCamera));
- }
-
- mEndOfVideoStreamSignal.set(false);
- mEndOfAudioStreamSignal.set(false);
- mEndOfAudioVideoSignal.set(false);
- mIsRecording = true;
-
- // Attach Surface to repeating request.
- mSessionConfigBuilder.clearSurfaces();
- mSessionConfigBuilder.addSurface(mDeferrableSurface);
- updateSessionConfig(mSessionConfigBuilder.build());
- notifyUpdated();
-
- if (mIsAudioEnabled.get()) {
- mAudioHandler.post(() -> audioEncode(postListener));
- }
-
- String cameraId = getCameraId();
- Size resolution = getAttachedSurfaceResolution();
- mVideoHandler.post(
- () -> {
- boolean errorOccurred = videoEncode(postListener, cameraId, resolution,
- outputFileOptions);
- if (!errorOccurred) {
- postListener.onVideoSaved();
- }
- recordingCompleter.set(null);
- });
- }
-
- /**
- * Stops recording video, this must be called after {@link
- * VideoCapture#startRecording(OutputFileOptions, Executor, OnVideoSavedCallback)} is
- * called.
- *
- * stopRecording() is asynchronous API. User need to check if {@link
- * OnVideoSavedCallback#onVideoSaved()} or
- * {@link OnVideoSavedCallback#onError(int, String, Throwable)} be called
- * before startRecording.
- */
- public void stopRecording() {
- if (Looper.getMainLooper() != Looper.myLooper()) {
- CameraXExecutors.mainThreadExecutor().execute(() -> stopRecording());
- return;
- }
- Logger.i(TAG, "stopRecording");
-
- mSessionConfigBuilder.clearSurfaces();
- mSessionConfigBuilder.addNonRepeatingSurface(mDeferrableSurface);
- updateSessionConfig(mSessionConfigBuilder.build());
- notifyUpdated();
-
- if (mIsRecording) {
- if (mIsAudioEnabled.get()) {
- // Stop audio encoder thread, and wait video encoder and muxer stop.
- mEndOfAudioStreamSignal.set(true);
- } else {
- // Audio is disabled, stop video encoder thread directly.
- mEndOfVideoStreamSignal.set(true);
- }
- }
- }
-
- /**
- * {@inheritDoc}
- *
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- public void onDetached() {
- stopRecording();
-
- if (mRecordingFuture != null) {
- mRecordingFuture.addListener(() -> releaseResources(),
- CameraXExecutors.mainThreadExecutor());
- } else {
- releaseResources();
- }
- }
-
- private void releaseResources() {
- mVideoHandlerThread.quitSafely();
-
- // audio encoder release
- releaseAudioInputResource();
-
- if (mCameraSurface != null) {
- releaseCameraSurface(true);
- }
- }
-
- private void releaseAudioInputResource() {
- mAudioHandlerThread.quitSafely();
- if (mAudioEncoder != null) {
- mAudioEncoder.release();
- mAudioEncoder = null;
- }
-
- if (mAudioRecorder != null) {
- mAudioRecorder.release();
- mAudioRecorder = null;
- }
- }
-
- /**
- * {@inheritDoc}
- *
- * @hide
- */
- @NonNull
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- public UseCaseConfig.Builder, ?, ?> getUseCaseConfigBuilder(@NonNull Config config) {
- return Builder.fromConfig(config);
- }
-
- /**
- * {@inheritDoc}
- *
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @UiThread
- @Override
- public void onStateDetached() {
- stopRecording();
- }
-
- @UiThread
- private void releaseCameraSurface(final boolean releaseVideoEncoder) {
- if (mDeferrableSurface == null) {
- return;
- }
-
- final MediaCodec videoEncoder = mVideoEncoder;
-
- // Calling close should allow termination future to complete and close the surface with
- // the listener that was added after constructing the DeferrableSurface.
- mDeferrableSurface.close();
- mDeferrableSurface.getTerminationFuture().addListener(
- () -> {
- if (releaseVideoEncoder && videoEncoder != null) {
- videoEncoder.release();
- }
- }, CameraXExecutors.mainThreadExecutor());
-
- if (releaseVideoEncoder) {
- mVideoEncoder = null;
- }
- mCameraSurface = null;
- mDeferrableSurface = null;
- }
-
- /**
- * Sets the desired rotation of the output video.
- *
- *
In most cases this should be set to the current rotation returned by {@link
- * Display#getRotation()}.
- *
- * @param rotation Desired rotation of the output video.
- */
- public void setTargetRotation(@RotationValue int rotation) {
- setTargetRotationInternal(rotation);
- }
-
- /**
- * Setup the {@link MediaCodec} for encoding video from a camera {@link Surface} and encoding
- * audio from selected audio source.
- */
- @UiThread
- @SuppressWarnings("WeakerAccess") /* synthetic accessor */
- @RequiresPermission(Manifest.permission.RECORD_AUDIO)
- void setupEncoder(@NonNull String cameraId, @NonNull Size resolution) {
- VideoCaptureConfig config = (VideoCaptureConfig) getCurrentConfig();
-
- // video encoder setup
- mVideoEncoder.reset();
- mVideoEncoderInitStatus = VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_UNINITIALIZED;
-
- // Configures a Video encoder, if there is any exception, will abort follow up actions
- try {
- mVideoEncoder.configure(
- createVideoMediaFormat(config, resolution), /*surface*/
- null, /*crypto*/
- null,
- MediaCodec.CONFIGURE_FLAG_ENCODE);
- } catch (MediaCodec.CodecException e) {
- int errorCode = 0;
- if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
- errorCode = Api23Impl.getCodecExceptionErrorCode(e);
- String diagnosticInfo = e.getDiagnosticInfo();
- if (errorCode == MediaCodec.CodecException.ERROR_INSUFFICIENT_RESOURCE) {
- Logger.i(TAG,
- "CodecException: code: " + errorCode + " diagnostic: "
- + diagnosticInfo);
- mVideoEncoderInitStatus =
- VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_INSUFFICIENT_RESOURCE;
- } else if (errorCode == MediaCodec.CodecException.ERROR_RECLAIMED) {
- Logger.i(TAG,
- "CodecException: code: " + errorCode + " diagnostic: "
- + diagnosticInfo);
- mVideoEncoderInitStatus =
- VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_RESOURCE_RECLAIMED;
- }
- } else {
- mVideoEncoderInitStatus =
- VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_INITIALIZED_FAILED;
- }
- mVideoEncoderErrorMessage = e;
- return;
- } catch (IllegalArgumentException | IllegalStateException e) {
- mVideoEncoderInitStatus =
- VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_INITIALIZED_FAILED;
- mVideoEncoderErrorMessage = e;
- return;
- }
-
- if (mCameraSurface != null) {
- releaseCameraSurface(false);
- }
- Surface cameraSurface = mVideoEncoder.createInputSurface();
- mCameraSurface = cameraSurface;
-
- mSessionConfigBuilder = SessionConfig.Builder.createFrom(config);
-
- if (mDeferrableSurface != null) {
- mDeferrableSurface.close();
- }
- mDeferrableSurface = new ImmediateSurface(mCameraSurface, resolution, getImageFormat());
- mDeferrableSurface.getTerminationFuture().addListener(
- cameraSurface::release, CameraXExecutors.mainThreadExecutor()
- );
-
- mSessionConfigBuilder.addNonRepeatingSurface(mDeferrableSurface);
-
- mSessionConfigBuilder.addErrorListener(new SessionConfig.ErrorListener() {
- @Override
- @RequiresPermission(Manifest.permission.RECORD_AUDIO)
- public void onError(@NonNull SessionConfig sessionConfig,
- @NonNull SessionConfig.SessionError error) {
- // Ensure the attached camera has not changed before calling setupEncoder.
- // TODO(b/143915543): Ensure this never gets called by a camera that is not attached
- // to this use case so we don't need to do this check.
- if (isCurrentCamera(cameraId)) {
- // Only reset the pipeline when the bound camera is the same.
- setupEncoder(cameraId, resolution);
- notifyReset();
- }
- }
- });
-
- updateSessionConfig(mSessionConfigBuilder.build());
-
- // audio encoder setup
- // reset audio inout flag
- mIsAudioEnabled.set(true);
-
- setAudioParametersByCamcorderProfile(resolution, cameraId);
- mAudioEncoder.reset();
- mAudioEncoder.configure(
- createAudioMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
-
- if (mAudioRecorder != null) {
- mAudioRecorder.release();
- }
- mAudioRecorder = autoConfigAudioRecordSource(config);
- // check mAudioRecorder
- if (mAudioRecorder == null) {
- Logger.e(TAG, "AudioRecord object cannot initialized correctly!");
- mIsAudioEnabled.set(false);
- }
-
- synchronized (mMuxerLock) {
- mVideoTrackIndex = -1;
- mAudioTrackIndex = -1;
- }
- mIsRecording = false;
- }
-
- /**
- * Write a buffer that has been encoded to file.
- *
- * @param bufferIndex the index of the buffer in the videoEncoder that has available data
- * @return returns true if this buffer is the end of the stream
- */
- private boolean writeVideoEncodedBuffer(int bufferIndex) {
- if (bufferIndex < 0) {
- Logger.e(TAG, "Output buffer should not have negative index: " + bufferIndex);
- return false;
- }
- // Get data from buffer
- ByteBuffer outputBuffer = mVideoEncoder.getOutputBuffer(bufferIndex);
-
- // Check if buffer is valid, if not then return
- if (outputBuffer == null) {
- Logger.d(TAG, "OutputBuffer was null.");
- return false;
- }
-
- // Write data to mMuxer if available
- if (mMuxerStarted.get()) {
- if (mVideoBufferInfo.size > 0) {
- outputBuffer.position(mVideoBufferInfo.offset);
- outputBuffer.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size);
- mVideoBufferInfo.presentationTimeUs = (System.nanoTime() / 1000);
-
- synchronized (mMuxerLock) {
- if (!mIsFirstVideoKeyFrameWrite.get()) {
- boolean isKeyFrame =
- (mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
- if (isKeyFrame) {
- Logger.i(TAG,
- "First video key frame written.");
- mIsFirstVideoKeyFrameWrite.set(true);
- } else {
- // Request a sync frame immediately
- final Bundle syncFrame = new Bundle();
- syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
- mVideoEncoder.setParameters(syncFrame);
- }
- }
- mMuxer.writeSampleData(mVideoTrackIndex, outputBuffer, mVideoBufferInfo);
- }
- } else {
- Logger.i(TAG, "mVideoBufferInfo.size <= 0, index " + bufferIndex);
- }
- }
-
- // Release data
- mVideoEncoder.releaseOutputBuffer(bufferIndex, false);
-
- // Return true if EOS is set
- return (mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
- }
-
- private boolean writeAudioEncodedBuffer(int bufferIndex) {
- ByteBuffer buffer = getOutputBuffer(mAudioEncoder, bufferIndex);
- buffer.position(mAudioBufferInfo.offset);
- if (mMuxerStarted.get()) {
- try {
- if (mAudioBufferInfo.size > 0 && mAudioBufferInfo.presentationTimeUs > 0) {
- synchronized (mMuxerLock) {
- if (!mIsFirstAudioSampleWrite.get()) {
- Logger.i(TAG, "First audio sample written.");
- mIsFirstAudioSampleWrite.set(true);
- }
- mMuxer.writeSampleData(mAudioTrackIndex, buffer, mAudioBufferInfo);
- }
- } else {
- Logger.i(TAG, "mAudioBufferInfo size: " + mAudioBufferInfo.size + " "
- + "presentationTimeUs: " + mAudioBufferInfo.presentationTimeUs);
- }
- } catch (Exception e) {
- Logger.e(
- TAG,
- "audio error:size="
- + mAudioBufferInfo.size
- + "/offset="
- + mAudioBufferInfo.offset
- + "/timeUs="
- + mAudioBufferInfo.presentationTimeUs);
- e.printStackTrace();
- }
- }
- mAudioEncoder.releaseOutputBuffer(bufferIndex, false);
- return (mAudioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
- }
-
- /**
- * Encoding which runs indefinitely until end of stream is signaled. This should not run on the
- * main thread otherwise it will cause the application to block.
- *
- * @return returns {@code true} if an error condition occurred, otherwise returns {@code false}
- */
- boolean videoEncode(@NonNull OnVideoSavedCallback videoSavedCallback, @NonNull String cameraId,
- @NonNull Size resolution,
- @NonNull OutputFileOptions outputFileOptions) {
- // Main encoding loop. Exits on end of stream.
- boolean errorOccurred = false;
- boolean videoEos = false;
- while (!videoEos && !errorOccurred) {
- // Check for end of stream from main thread
- if (mEndOfVideoStreamSignal.get()) {
- mVideoEncoder.signalEndOfInputStream();
- mEndOfVideoStreamSignal.set(false);
- }
-
- // Deque buffer to check for processing step
- int outputBufferId =
- mVideoEncoder.dequeueOutputBuffer(mVideoBufferInfo, DEQUE_TIMEOUT_USEC);
- switch (outputBufferId) {
- case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
- if (mMuxerStarted.get()) {
- videoSavedCallback.onError(
- ERROR_ENCODER,
- "Unexpected change in video encoding format.",
- null);
- errorOccurred = true;
- }
-
- synchronized (mMuxerLock) {
- mVideoTrackIndex = mMuxer.addTrack(mVideoEncoder.getOutputFormat());
-
- if ((mIsAudioEnabled.get() && mAudioTrackIndex >= 0
- && mVideoTrackIndex >= 0)
- || (!mIsAudioEnabled.get() && mVideoTrackIndex >= 0)) {
- Logger.i(TAG, "MediaMuxer started on video encode thread and audio "
- + "enabled: " + mIsAudioEnabled);
- mMuxer.start();
- mMuxerStarted.set(true);
- }
- }
- break;
- case MediaCodec.INFO_TRY_AGAIN_LATER:
- // Timed out. Just wait until next attempt to deque.
- break;
- default:
- videoEos = writeVideoEncodedBuffer(outputBufferId);
- }
- }
-
- try {
- Logger.i(TAG, "videoEncoder stop");
- mVideoEncoder.stop();
- } catch (IllegalStateException e) {
- videoSavedCallback.onError(ERROR_ENCODER,
- "Video encoder stop failed!", e);
- errorOccurred = true;
- }
-
- try {
- // new MediaMuxer instance required for each new file written, and release current one.
- synchronized (mMuxerLock) {
- if (mMuxer != null) {
- if (mMuxerStarted.get()) {
- Logger.i(TAG, "Muxer already started");
- mMuxer.stop();
- }
- mMuxer.release();
- mMuxer = null;
- }
- }
-
- // A final checking for recording result, if the recorded file has no key
- // frame, then the video file is not playable, needs to call
- // onError() and will be removed.
-
- boolean checkResult = removeRecordingResultIfNoVideoKeyFrameArrived(outputFileOptions);
-
- if (!checkResult) {
- videoSavedCallback.onError(ERROR_RECORDING_TOO_SHORT,
- "The file has no video key frame.", null);
- errorOccurred = true;
- }
- } catch (IllegalStateException e) {
- // The video encoder has not got the key frame yet.
- Logger.i(TAG, "muxer stop IllegalStateException: " + System.currentTimeMillis());
- Logger.i(TAG,
- "muxer stop exception, mIsFirstVideoKeyFrameWrite: "
- + mIsFirstVideoKeyFrameWrite.get());
- if (mIsFirstVideoKeyFrameWrite.get()) {
- // If muxer throws IllegalStateException at this moment and also the key frame
- // has received, this will reported as a Muxer stop failed.
- // Otherwise, this error will be ERROR_RECORDING_TOO_SHORT.
- videoSavedCallback.onError(ERROR_MUXER, "Muxer stop failed!", e);
- } else {
- videoSavedCallback.onError(ERROR_RECORDING_TOO_SHORT,
- "The file has no video key frame.", null);
- }
- errorOccurred = true;
- }
-
- if (mParcelFileDescriptor != null) {
- try {
- mParcelFileDescriptor.close();
- mParcelFileDescriptor = null;
- } catch (IOException e) {
- videoSavedCallback.onError(ERROR_MUXER, "File descriptor close failed!", e);
- errorOccurred = true;
- }
- }
-
- mMuxerStarted.set(false);
-
- // notify the UI thread that the video recording has finished
- mEndOfAudioVideoSignal.set(true);
- mIsFirstVideoKeyFrameWrite.set(false);
-
- Logger.i(TAG, "Video encode thread end.");
- return errorOccurred;
- }
-
- boolean audioEncode(OnVideoSavedCallback videoSavedCallback) {
- // Audio encoding loop. Exits on end of stream.
- boolean audioEos = false;
- int outIndex;
- long lastAudioTimestamp = 0;
- while (!audioEos && mIsRecording) {
- // Check for end of stream from main thread
- if (mEndOfAudioStreamSignal.get()) {
- mEndOfAudioStreamSignal.set(false);
- mIsRecording = false;
- }
-
- // get audio deque input buffer
- if (mAudioEncoder != null && mAudioRecorder != null) {
- try {
- int index = mAudioEncoder.dequeueInputBuffer(-1);
- if (index >= 0) {
- final ByteBuffer buffer = getInputBuffer(mAudioEncoder, index);
- buffer.clear();
- int length = mAudioRecorder.read(buffer, mAudioBufferSize);
- if (length > 0) {
- mAudioEncoder.queueInputBuffer(
- index,
- 0,
- length,
- (System.nanoTime() / 1000),
- mIsRecording ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
- }
- }
- } catch (MediaCodec.CodecException e) {
- Logger.i(TAG, "audio dequeueInputBuffer CodecException " + e.getMessage());
- } catch (IllegalStateException e) {
- Logger.i(TAG,
- "audio dequeueInputBuffer IllegalStateException " + e.getMessage());
- }
-
- // start to dequeue audio output buffer
- do {
- outIndex = mAudioEncoder.dequeueOutputBuffer(mAudioBufferInfo, 0);
- switch (outIndex) {
- case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
- synchronized (mMuxerLock) {
- mAudioTrackIndex = mMuxer.addTrack(mAudioEncoder.getOutputFormat());
- if (mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0) {
- Logger.i(TAG, "MediaMuxer start on audio encoder thread.");
- mMuxer.start();
- mMuxerStarted.set(true);
- }
- }
- break;
- case MediaCodec.INFO_TRY_AGAIN_LATER:
- break;
- default:
- // Drops out of order audio frame if the frame's earlier than last
- // frame.
- if (mAudioBufferInfo.presentationTimeUs > lastAudioTimestamp) {
- audioEos = writeAudioEncodedBuffer(outIndex);
- lastAudioTimestamp = mAudioBufferInfo.presentationTimeUs;
- } else {
- Logger.w(TAG,
- "Drops frame, current frame's timestamp "
- + mAudioBufferInfo.presentationTimeUs
- + " is earlier that last frame "
- + lastAudioTimestamp);
- // Releases this frame from output buffer
- mAudioEncoder.releaseOutputBuffer(outIndex, false);
- }
- }
- } while (outIndex >= 0 && !audioEos); // end of dequeue output buffer
- }
- } // end of while loop
-
- // Audio Stop
- try {
- Logger.i(TAG, "audioRecorder stop");
- mAudioRecorder.stop();
- } catch (IllegalStateException e) {
- videoSavedCallback.onError(
- ERROR_ENCODER, "Audio recorder stop failed!", e);
- }
-
- try {
- mAudioEncoder.stop();
- } catch (IllegalStateException e) {
- videoSavedCallback.onError(ERROR_ENCODER,
- "Audio encoder stop failed!", e);
- }
-
- Logger.i(TAG, "Audio encode thread end");
- // Use AtomicBoolean to signal because MediaCodec.signalEndOfInputStream() is not thread
- // safe
- mEndOfVideoStreamSignal.set(true);
-
- return false;
- }
-
- private ByteBuffer getInputBuffer(MediaCodec codec, int index) {
- return codec.getInputBuffer(index);
- }
-
- private ByteBuffer getOutputBuffer(MediaCodec codec, int index) {
- return codec.getOutputBuffer(index);
- }
-
- /** Creates a {@link MediaFormat} using parameters for audio from the configuration */
- private MediaFormat createAudioMediaFormat() {
- MediaFormat format =
- MediaFormat.createAudioFormat(AUDIO_MIME_TYPE, mAudioSampleRate,
- mAudioChannelCount);
- format.setInteger(
- MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
- format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitRate);
-
- return format;
- }
-
- /** Create a AudioRecord object to get raw data */
- @RequiresPermission(Manifest.permission.RECORD_AUDIO)
- private AudioRecord autoConfigAudioRecordSource(VideoCaptureConfig config) {
- // Use channel count to determine stereo vs mono
- int channelConfig =
- mAudioChannelCount == 1
- ? AudioFormat.CHANNEL_IN_MONO
- : AudioFormat.CHANNEL_IN_STEREO;
-
- try {
- // Use only ENCODING_PCM_16BIT because it mandatory supported.
- int bufferSize =
- AudioRecord.getMinBufferSize(mAudioSampleRate, channelConfig,
- AudioFormat.ENCODING_PCM_16BIT);
-
- if (bufferSize <= 0) {
- bufferSize = config.getAudioMinBufferSize();
- }
-
- AudioRecord recorder =
- new AudioRecord(
- AudioSource.CAMCORDER,
- mAudioSampleRate,
- channelConfig,
- AudioFormat.ENCODING_PCM_16BIT,
- bufferSize * 2);
-
- if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
- mAudioBufferSize = bufferSize;
- Logger.i(
- TAG,
- "source: "
- + AudioSource.CAMCORDER
- + " audioSampleRate: "
- + mAudioSampleRate
- + " channelConfig: "
- + channelConfig
- + " audioFormat: "
- + AudioFormat.ENCODING_PCM_16BIT
- + " bufferSize: "
- + bufferSize);
- return recorder;
- }
- } catch (Exception e) {
- Logger.e(TAG, "Exception, keep trying.", e);
- }
- return null;
- }
-
- /** Set audio record parameters by CamcorderProfile */
- @SuppressWarnings("deprecation")
- private void setAudioParametersByCamcorderProfile(Size currentResolution, String cameraId) {
- CamcorderProfile profile;
- boolean isCamcorderProfileFound = false;
-
- try {
- for (int quality : CamcorderQuality) {
- if (CamcorderProfile.hasProfile(Integer.parseInt(cameraId), quality)) {
- profile = CamcorderProfile.get(Integer.parseInt(cameraId), quality);
- if (currentResolution.getWidth() == profile.videoFrameWidth
- && currentResolution.getHeight() == profile.videoFrameHeight) {
- mAudioChannelCount = profile.audioChannels;
- mAudioSampleRate = profile.audioSampleRate;
- mAudioBitRate = profile.audioBitRate;
- isCamcorderProfileFound = true;
- break;
- }
- }
- }
- } catch (NumberFormatException e) {
- Logger.i(TAG, "The camera Id is not an integer because the camera may be a removable "
- + "device. Use the default values for the audio related settings.");
- }
-
- // In case no corresponding camcorder profile can be founded, * get default value from
- // VideoCaptureConfig.
- if (!isCamcorderProfileFound) {
- VideoCaptureConfig config = (VideoCaptureConfig) getCurrentConfig();
- mAudioChannelCount = config.getAudioChannelCount();
- mAudioSampleRate = config.getAudioSampleRate();
- mAudioBitRate = config.getAudioBitRate();
- }
- }
-
- private boolean removeRecordingResultIfNoVideoKeyFrameArrived(
- @NonNull OutputFileOptions outputFileOptions) {
- boolean checkKeyFrame;
-
- // 1. There should be one video key frame at least.
- Logger.i(TAG,
- "check Recording Result First Video Key Frame Write: "
- + mIsFirstVideoKeyFrameWrite.get());
- if (!mIsFirstVideoKeyFrameWrite.get()) {
- Logger.i(TAG, "The recording result has no key frame.");
- checkKeyFrame = false;
- } else {
- checkKeyFrame = true;
- }
-
- return checkKeyFrame;
- }
-
- /**
- * Describes the error that occurred during video capture operations.
- *
- *
This is a parameter sent to the error callback functions set in listeners such as {@link
- * VideoCapture.OnVideoSavedCallback#onError(int, String, Throwable)}.
- *
- *
See message parameter in onError callback or log for more details.
- *
- * @hide
- */
- @IntDef({ERROR_UNKNOWN, ERROR_ENCODER, ERROR_MUXER, ERROR_RECORDING_IN_PROGRESS,
- ERROR_FILE_IO, ERROR_INVALID_CAMERA, ERROR_RECORDING_TOO_SHORT})
- @Retention(RetentionPolicy.SOURCE)
- @RestrictTo(Scope.LIBRARY_GROUP)
- public @interface VideoCaptureError {
- }
-
- enum VideoEncoderInitStatus {
- VIDEO_ENCODER_INIT_STATUS_UNINITIALIZED,
- VIDEO_ENCODER_INIT_STATUS_INITIALIZED_FAILED,
- VIDEO_ENCODER_INIT_STATUS_INSUFFICIENT_RESOURCE,
- VIDEO_ENCODER_INIT_STATUS_RESOURCE_RECLAIMED,
- }
-
- /** Listener containing callbacks for video file I/O events. */
- public interface OnVideoSavedCallback {
- /** Called when the video has been successfully saved. */
- void onVideoSaved();
-
- /** Called when an error occurs while attempting to save the video. */
- void onError(@VideoCaptureError int videoCaptureError, @NonNull String message,
- @Nullable Throwable cause);
- }
-
- /**
- * Provides a base static default configuration for the VideoCapture
- *
- *
These values may be overridden by the implementation. They only provide a minimum set of
- * defaults that are implementation independent.
- *
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- public static final class Defaults
- implements ConfigProvider {
- private static final int DEFAULT_VIDEO_FRAME_RATE = 30;
- /** 8Mb/s the recommend rate for 30fps 1080p */
- private static final int DEFAULT_BIT_RATE = 8 * 1024 * 1024;
- /** Seconds between each key frame */
- private static final int DEFAULT_INTRA_FRAME_INTERVAL = 1;
- /** audio bit rate */
- private static final int DEFAULT_AUDIO_BIT_RATE = 64000;
- /** audio sample rate */
- private static final int DEFAULT_AUDIO_SAMPLE_RATE = 8000;
- /** audio channel count */
- private static final int DEFAULT_AUDIO_CHANNEL_COUNT = 1;
- /** audio default minimum buffer size */
- private static final int DEFAULT_AUDIO_MIN_BUFFER_SIZE = 1024;
- /** Current max resolution of VideoCapture is set as FHD */
- private static final Size DEFAULT_MAX_RESOLUTION = new Size(1920, 1080);
- /** Surface occupancy priority to this use case */
- private static final int DEFAULT_SURFACE_OCCUPANCY_PRIORITY = 3;
- private static final int DEFAULT_ASPECT_RATIO = AspectRatio.RATIO_16_9;
-
- private static final VideoCaptureConfig DEFAULT_CONFIG;
-
- static {
- Builder builder = new Builder()
- .setVideoFrameRate(DEFAULT_VIDEO_FRAME_RATE)
- .setBitRate(DEFAULT_BIT_RATE)
- .setIFrameInterval(DEFAULT_INTRA_FRAME_INTERVAL)
- .setAudioBitRate(DEFAULT_AUDIO_BIT_RATE)
- .setAudioSampleRate(DEFAULT_AUDIO_SAMPLE_RATE)
- .setAudioChannelCount(DEFAULT_AUDIO_CHANNEL_COUNT)
- .setAudioMinBufferSize(DEFAULT_AUDIO_MIN_BUFFER_SIZE)
- .setMaxResolution(DEFAULT_MAX_RESOLUTION)
- .setSurfaceOccupancyPriority(DEFAULT_SURFACE_OCCUPANCY_PRIORITY)
- .setTargetAspectRatio(DEFAULT_ASPECT_RATIO);
-
- DEFAULT_CONFIG = builder.getUseCaseConfig();
- }
-
- @NonNull
- @Override
- public VideoCaptureConfig getConfig() {
- return DEFAULT_CONFIG;
- }
- }
-
- private static final class VideoSavedListenerWrapper implements OnVideoSavedCallback {
-
- @NonNull
- Executor mExecutor;
- @NonNull
- OnVideoSavedCallback mOnVideoSavedCallback;
-
- VideoSavedListenerWrapper(@NonNull Executor executor,
- @NonNull OnVideoSavedCallback onVideoSavedCallback) {
- mExecutor = executor;
- mOnVideoSavedCallback = onVideoSavedCallback;
- }
-
- @Override
- public void onVideoSaved() {
- try {
- mExecutor.execute(() -> mOnVideoSavedCallback.onVideoSaved());
- } catch (RejectedExecutionException e) {
- Logger.e(TAG, "Unable to post to the supplied executor.");
- }
- }
-
- @Override
- public void onError(@VideoCaptureError int videoCaptureError, @NonNull String message,
- @Nullable Throwable cause) {
- try {
- mExecutor.execute(
- () -> mOnVideoSavedCallback.onError(videoCaptureError, message, cause));
- } catch (RejectedExecutionException e) {
- Logger.e(TAG, "Unable to post to the supplied executor.");
- }
- }
-
- }
-
- /** Builder for a {@link VideoCapture}. */
- @SuppressWarnings("ObjectToString")
- public static final class Builder
- implements
- UseCaseConfig.Builder,
- ImageOutputConfig.Builder,
- ThreadConfig.Builder {
-
- private final MutableOptionsBundle mMutableConfig;
-
- /** Creates a new Builder object. */
- public Builder() {
- this(MutableOptionsBundle.create());
- }
-
- private Builder(@NonNull MutableOptionsBundle mutableConfig) {
- mMutableConfig = mutableConfig;
-
- Class> oldConfigClass =
- mutableConfig.retrieveOption(OPTION_TARGET_CLASS, null);
- if (oldConfigClass != null && !oldConfigClass.equals(VideoCapture.class)) {
- throw new IllegalArgumentException(
- "Invalid target class configuration for "
- + Builder.this
- + ": "
- + oldConfigClass);
- }
-
- setTargetClass(VideoCapture.class);
- }
-
- /**
- * Generates a Builder from another Config object.
- *
- * @param configuration An immutable configuration to pre-populate this builder.
- * @return The new Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- static Builder fromConfig(@NonNull Config configuration) {
- return new Builder(MutableOptionsBundle.from(configuration));
- }
-
-
- /**
- * Generates a Builder from another Config object
- *
- * @param configuration An immutable configuration to pre-populate this builder.
- * @return The new Builder.
- */
- @NonNull
- public static Builder fromConfig(@NonNull VideoCaptureConfig configuration) {
- return new Builder(MutableOptionsBundle.from(configuration));
- }
-
- /**
- * {@inheritDoc}
- *
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public MutableConfig getMutableConfig() {
- return mMutableConfig;
- }
-
- /**
- * {@inheritDoc}
- *
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- @Override
- public VideoCaptureConfig getUseCaseConfig() {
- return new VideoCaptureConfig(OptionsBundle.from(mMutableConfig));
- }
-
- /**
- * Builds an immutable {@link VideoCaptureConfig} from the current state.
- *
- * @return A {@link VideoCaptureConfig} populated with the current state.
- */
- @Override
- @NonNull
- public VideoCapture build() {
- // Error at runtime for using both setTargetResolution and setTargetAspectRatio on
- // the same config.
- if (getMutableConfig().retrieveOption(OPTION_TARGET_ASPECT_RATIO, null) != null
- && getMutableConfig().retrieveOption(OPTION_TARGET_RESOLUTION, null) != null) {
- throw new IllegalArgumentException(
- "Cannot use both setTargetResolution and setTargetAspectRatio on the same "
- + "config.");
- }
- return new VideoCapture(getUseCaseConfig());
- }
-
- /**
- * Sets the recording frames per second.
- *
- * @param videoFrameRate The requested interval in seconds.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- public Builder setVideoFrameRate(int videoFrameRate) {
- getMutableConfig().insertOption(OPTION_VIDEO_FRAME_RATE, videoFrameRate);
- return this;
- }
-
- /**
- * Sets the encoding bit rate.
- *
- * @param bitRate The requested bit rate in bits per second.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- public Builder setBitRate(int bitRate) {
- getMutableConfig().insertOption(OPTION_BIT_RATE, bitRate);
- return this;
- }
-
- /**
- * Sets number of seconds between each key frame in seconds.
- *
- * @param interval The requested interval in seconds.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- public Builder setIFrameInterval(int interval) {
- getMutableConfig().insertOption(OPTION_INTRA_FRAME_INTERVAL, interval);
- return this;
- }
-
- /**
- * Sets the bit rate of the audio stream.
- *
- * @param bitRate The requested bit rate in bits/s.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- public Builder setAudioBitRate(int bitRate) {
- getMutableConfig().insertOption(OPTION_AUDIO_BIT_RATE, bitRate);
- return this;
- }
-
- /**
- * Sets the sample rate of the audio stream.
- *
- * @param sampleRate The requested sample rate in bits/s.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- public Builder setAudioSampleRate(int sampleRate) {
- getMutableConfig().insertOption(OPTION_AUDIO_SAMPLE_RATE, sampleRate);
- return this;
- }
-
- /**
- * Sets the number of audio channels.
- *
- * @param channelCount The requested number of audio channels.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- public Builder setAudioChannelCount(int channelCount) {
- getMutableConfig().insertOption(OPTION_AUDIO_CHANNEL_COUNT, channelCount);
- return this;
- }
-
- /**
- * Sets the audio min buffer size.
- *
- * @param minBufferSize The requested audio minimum buffer size, in bytes.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- public Builder setAudioMinBufferSize(int minBufferSize) {
- getMutableConfig().insertOption(OPTION_AUDIO_MIN_BUFFER_SIZE, minBufferSize);
- return this;
- }
-
- // Implementations of TargetConfig.Builder default methods
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public Builder setTargetClass(@NonNull Class targetClass) {
- getMutableConfig().insertOption(OPTION_TARGET_CLASS, targetClass);
-
- // If no name is set yet, then generate a unique name
- if (null == getMutableConfig().retrieveOption(OPTION_TARGET_NAME, null)) {
- String targetName = targetClass.getCanonicalName() + "-" + UUID.randomUUID();
- setTargetName(targetName);
- }
-
- return this;
- }
-
- /**
- * Sets the name of the target object being configured, used only for debug logging.
- *
- * The name should be a value that can uniquely identify an instance of the object being
- * configured.
- *
- *
If not set, the target name will default to an unique name automatically generated
- * with the class canonical name and random UUID.
- *
- * @param targetName A unique string identifier for the instance of the class being
- * configured.
- * @return the current Builder.
- */
- @Override
- @NonNull
- public Builder setTargetName(@NonNull String targetName) {
- getMutableConfig().insertOption(OPTION_TARGET_NAME, targetName);
- return this;
- }
-
- // Implementations of ImageOutputConfig.Builder default methods
-
- /**
- * Sets the aspect ratio of the intended target for images from this configuration.
- *
- *
It is not allowed to set both target aspect ratio and target resolution on the same
- * use case.
- *
- *
The target aspect ratio is used as a hint when determining the resulting output aspect
- * ratio which may differ from the request, possibly due to device constraints.
- * Application code should check the resulting output's resolution.
- *
- *
If not set, resolutions with aspect ratio 4:3 will be considered in higher
- * priority.
- *
- * @param aspectRatio A {@link AspectRatio} representing the ratio of the
- * target's width and height.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- @Override
- public Builder setTargetAspectRatio(@AspectRatio.Ratio int aspectRatio) {
- getMutableConfig().insertOption(OPTION_TARGET_ASPECT_RATIO, aspectRatio);
- return this;
- }
-
- /**
- * Sets the rotation of the intended target for images from this configuration.
- *
- *
This is one of four valid values: {@link Surface#ROTATION_0}, {@link
- * Surface#ROTATION_90}, {@link Surface#ROTATION_180}, {@link Surface#ROTATION_270}.
- * Rotation values are relative to the "natural" rotation, {@link Surface#ROTATION_0}.
- *
- *
If not set, the target rotation will default to the value of
- * {@link Display#getRotation()} of the default display at the time the use case is
- * created. The use case is fully created once it has been attached to a camera.
- *
- * @param rotation The rotation of the intended target.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- @Override
- public Builder setTargetRotation(@RotationValue int rotation) {
- getMutableConfig().insertOption(OPTION_TARGET_ROTATION, rotation);
- return this;
- }
-
- /**
- * Sets the resolution of the intended target from this configuration.
- *
- *
The target resolution attempts to establish a minimum bound for the image resolution.
- * The actual image resolution will be the closest available resolution in size that is not
- * smaller than the target resolution, as determined by the Camera implementation. However,
- * if no resolution exists that is equal to or larger than the target resolution, the
- * nearest available resolution smaller than the target resolution will be chosen.
- *
- *
It is not allowed to set both target aspect ratio and target resolution on the same
- * use case.
- *
- *
The target aspect ratio will also be set the same as the aspect ratio of the provided
- * {@link Size}. Make sure to set the target resolution with the correct orientation.
- *
- * @param resolution The target resolution to choose from supported output sizes list.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- @Override
- public Builder setTargetResolution(@NonNull Size resolution) {
- getMutableConfig().insertOption(OPTION_TARGET_RESOLUTION, resolution);
- return this;
- }
-
- /**
- * Sets the default resolution of the intended target from this configuration.
- *
- * @param resolution The default resolution to choose from supported output sizes list.
- * @return The current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- @Override
- public Builder setDefaultResolution(@NonNull Size resolution) {
- getMutableConfig().insertOption(OPTION_DEFAULT_RESOLUTION, resolution);
- return this;
- }
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- @Override
- public Builder setMaxResolution(@NonNull Size resolution) {
- getMutableConfig().insertOption(OPTION_MAX_RESOLUTION, resolution);
- return this;
- }
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public Builder setSupportedResolutions(@NonNull List> resolutions) {
- getMutableConfig().insertOption(OPTION_SUPPORTED_RESOLUTIONS, resolutions);
- return this;
- }
-
- // Implementations of ThreadConfig.Builder default methods
-
- /**
- * Sets the default executor that will be used for background tasks.
- *
- * If not set, the background executor will default to an automatically generated
- * {@link Executor}.
- *
- * @param executor The executor which will be used for background tasks.
- * @return the current Builder.
- * @hide
- */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public Builder setBackgroundExecutor(@NonNull Executor executor) {
- getMutableConfig().insertOption(OPTION_BACKGROUND_EXECUTOR, executor);
- return this;
- }
-
- // Implementations of UseCaseConfig.Builder default methods
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public Builder setDefaultSessionConfig(@NonNull SessionConfig sessionConfig) {
- getMutableConfig().insertOption(OPTION_DEFAULT_SESSION_CONFIG, sessionConfig);
- return this;
- }
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public Builder setDefaultCaptureConfig(@NonNull CaptureConfig captureConfig) {
- getMutableConfig().insertOption(OPTION_DEFAULT_CAPTURE_CONFIG, captureConfig);
- return this;
- }
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public Builder setSessionOptionUnpacker(
- @NonNull SessionConfig.OptionUnpacker optionUnpacker) {
- getMutableConfig().insertOption(OPTION_SESSION_CONFIG_UNPACKER, optionUnpacker);
- return this;
- }
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public Builder setCaptureOptionUnpacker(
- @NonNull CaptureConfig.OptionUnpacker optionUnpacker) {
- getMutableConfig().insertOption(OPTION_CAPTURE_CONFIG_UNPACKER, optionUnpacker);
- return this;
- }
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public Builder setSurfaceOccupancyPriority(int priority) {
- getMutableConfig().insertOption(OPTION_SURFACE_OCCUPANCY_PRIORITY, priority);
- return this;
- }
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY)
- @Override
- @NonNull
- public Builder setCameraSelector(@NonNull CameraSelector cameraSelector) {
- getMutableConfig().insertOption(OPTION_CAMERA_SELECTOR, cameraSelector);
- return this;
- }
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @Override
- @NonNull
- public Builder setUseCaseEventCallback(
- @NonNull UseCase.EventCallback useCaseEventCallback) {
- getMutableConfig().insertOption(OPTION_USE_CASE_EVENT_CALLBACK, useCaseEventCallback);
- return this;
- }
-
- /** @hide */
- @RestrictTo(Scope.LIBRARY_GROUP)
- @NonNull
- @Override
- public Builder setZslDisabled(boolean disabled) {
- getMutableConfig().insertOption(OPTION_ZSL_DISABLED, disabled);
- return this;
- }
- }
-
- /**
- * Options for saving newly captured video.
- *
- *
this class is used to configure save location and metadata. Save location can be
- * either a {@link File}, {@link MediaStore}. The metadata will be
- * stored with the saved video.
- */
- public static final class OutputFileOptions {
-
- private final SeekableWriter mWriter;
-
- public OutputFileOptions(SeekableWriter writer) {
- mWriter = writer;
- }
- }
-
- /**
- * Nested class to avoid verification errors for methods introduced in Android 6.0 (API 23).
- */
- @RequiresApi(23)
- private static class Api23Impl {
-
- private Api23Impl() {
- }
-
- @DoNotInline
- static int getCodecExceptionErrorCode(MediaCodec.CodecException e) {
- return e.getErrorCode();
- }
- }
-}
diff --git a/app/src/main/java/sushi/hardcore/droidfs/video_recording/VideoCaptureConfig.java b/app/src/main/java/sushi/hardcore/droidfs/video_recording/VideoCaptureConfig.java
deleted file mode 100644
index d00eb1d..0000000
--- a/app/src/main/java/sushi/hardcore/droidfs/video_recording/VideoCaptureConfig.java
+++ /dev/null
@@ -1,227 +0,0 @@
-/*
- * Copyright 2019 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package sushi.hardcore.droidfs.video_recording;
-
-import androidx.annotation.NonNull;
-import androidx.annotation.RequiresApi;
-import androidx.camera.core.impl.Config;
-import androidx.camera.core.impl.ImageFormatConstants;
-import androidx.camera.core.impl.ImageOutputConfig;
-import androidx.camera.core.impl.OptionsBundle;
-import androidx.camera.core.impl.UseCaseConfig;
-import androidx.camera.core.internal.ThreadConfig;
-
-/**
- * Config for a video capture use case.
- *
- *
In the earlier stage, the VideoCapture is deprioritized.
- */
-@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
-public final class VideoCaptureConfig
- implements UseCaseConfig,
- ImageOutputConfig,
- ThreadConfig {
-
- // Option Declarations:
- // *********************************************************************************************
-
- public static final Option OPTION_VIDEO_FRAME_RATE =
- Option.create("camerax.core.videoCapture.recordingFrameRate", int.class);
- public static final Option OPTION_BIT_RATE =
- Option.create("camerax.core.videoCapture.bitRate", int.class);
- public static final Option OPTION_INTRA_FRAME_INTERVAL =
- Option.create("camerax.core.videoCapture.intraFrameInterval", int.class);
- public static final Option OPTION_AUDIO_BIT_RATE =
- Option.create("camerax.core.videoCapture.audioBitRate", int.class);
- public static final Option OPTION_AUDIO_SAMPLE_RATE =
- Option.create("camerax.core.videoCapture.audioSampleRate", int.class);
- public static final Option OPTION_AUDIO_CHANNEL_COUNT =
- Option.create("camerax.core.videoCapture.audioChannelCount", int.class);
- public static final Option OPTION_AUDIO_MIN_BUFFER_SIZE =
- Option.create("camerax.core.videoCapture.audioMinBufferSize", int.class);
-
- // *********************************************************************************************
-
- private final OptionsBundle mConfig;
-
- public VideoCaptureConfig(@NonNull OptionsBundle config) {
- mConfig = config;
- }
-
- /**
- * Returns the recording frames per second.
- *
- * @param valueIfMissing The value to return if this configuration option has not been set.
- * @return The stored value or valueIfMissing
if the value does not exist in this
- * configuration.
- */
- public int getVideoFrameRate(int valueIfMissing) {
- return retrieveOption(OPTION_VIDEO_FRAME_RATE, valueIfMissing);
- }
-
- /**
- * Returns the recording frames per second.
- *
- * @return The stored value, if it exists in this configuration.
- * @throws IllegalArgumentException if the option does not exist in this configuration.
- */
- public int getVideoFrameRate() {
- return retrieveOption(OPTION_VIDEO_FRAME_RATE);
- }
-
- /**
- * Returns the encoding bit rate.
- *
- * @param valueIfMissing The value to return if this configuration option has not been set.
- * @return The stored value or valueIfMissing
if the value does not exist in this
- * configuration.
- */
- public int getBitRate(int valueIfMissing) {
- return retrieveOption(OPTION_BIT_RATE, valueIfMissing);
- }
-
- /**
- * Returns the encoding bit rate.
- *
- * @return The stored value, if it exists in this configuration.
- * @throws IllegalArgumentException if the option does not exist in this configuration.
- */
- public int getBitRate() {
- return retrieveOption(OPTION_BIT_RATE);
- }
-
- /**
- * Returns the number of seconds between each key frame.
- *
- * @param valueIfMissing The value to return if this configuration option has not been set.
- * @return The stored value or valueIfMissing
if the value does not exist in this
- * configuration.
- */
- public int getIFrameInterval(int valueIfMissing) {
- return retrieveOption(OPTION_INTRA_FRAME_INTERVAL, valueIfMissing);
- }
-
- /**
- * Returns the number of seconds between each key frame.
- *
- * @return The stored value, if it exists in this configuration.
- * @throws IllegalArgumentException if the option does not exist in this configuration.
- */
- public int getIFrameInterval() {
- return retrieveOption(OPTION_INTRA_FRAME_INTERVAL);
- }
-
- /**
- * Returns the audio encoding bit rate.
- *
- * @param valueIfMissing The value to return if this configuration option has not been set.
- * @return The stored value or valueIfMissing
if the value does not exist in this
- * configuration.
- */
- public int getAudioBitRate(int valueIfMissing) {
- return retrieveOption(OPTION_AUDIO_BIT_RATE, valueIfMissing);
- }
-
- /**
- * Returns the audio encoding bit rate.
- *
- * @return The stored value, if it exists in this configuration.
- * @throws IllegalArgumentException if the option does not exist in this configuration.
- */
- public int getAudioBitRate() {
- return retrieveOption(OPTION_AUDIO_BIT_RATE);
- }
-
- /**
- * Returns the audio sample rate.
- *
- * @param valueIfMissing The value to return if this configuration option has not been set.
- * @return The stored value or valueIfMissing
if the value does not exist in this
- * configuration.
- */
- public int getAudioSampleRate(int valueIfMissing) {
- return retrieveOption(OPTION_AUDIO_SAMPLE_RATE, valueIfMissing);
- }
-
- /**
- * Returns the audio sample rate.
- *
- * @return The stored value, if it exists in this configuration.
- * @throws IllegalArgumentException if the option does not exist in this configuration.
- */
- public int getAudioSampleRate() {
- return retrieveOption(OPTION_AUDIO_SAMPLE_RATE);
- }
-
- /**
- * Returns the audio channel count.
- *
- * @param valueIfMissing The value to return if this configuration option has not been set.
- * @return The stored value or valueIfMissing
if the value does not exist in this
- * configuration.
- */
- public int getAudioChannelCount(int valueIfMissing) {
- return retrieveOption(OPTION_AUDIO_CHANNEL_COUNT, valueIfMissing);
- }
-
- /**
- * Returns the audio channel count.
- *
- * @return The stored value, if it exists in this configuration.
- * @throws IllegalArgumentException if the option does not exist in this configuration.
- */
- public int getAudioChannelCount() {
- return retrieveOption(OPTION_AUDIO_CHANNEL_COUNT);
- }
-
- /**
- * Returns the audio minimum buffer size, in bytes.
- *
- * @param valueIfMissing The value to return if this configuration option has not been set.
- * @return The stored value or valueIfMissing
if the value does not exist in this
- * configuration.
- */
- public int getAudioMinBufferSize(int valueIfMissing) {
- return retrieveOption(OPTION_AUDIO_MIN_BUFFER_SIZE, valueIfMissing);
- }
-
- /**
- * Returns the audio minimum buffer size, in bytes.
- *
- * @return The stored value, if it exists in this configuration.
- * @throws IllegalArgumentException if the option does not exist in this configuration.
- */
- public int getAudioMinBufferSize() {
- return retrieveOption(OPTION_AUDIO_MIN_BUFFER_SIZE);
- }
-
- /**
- * Retrieves the format of the image that is fed as input.
- *
- * This should always be PRIVATE for VideoCapture.
- */
- @Override
- public int getInputFormat() {
- return ImageFormatConstants.INTERNAL_DEFINED_IMAGE_FORMAT_PRIVATE;
- }
-
- @NonNull
- @Override
- public Config getConfig() {
- return mConfig;
- }
-}
diff --git a/app/src/main/native/libmux.c b/app/src/main/native/libmux.c
index f2b6d0c..364df34 100644
--- a/app/src/main/native/libmux.c
+++ b/app/src/main/native/libmux.c
@@ -31,7 +31,7 @@ int64_t seek(void* opaque, int64_t offset, int whence) {
return offset;
}
-jlong Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_allocContext(JNIEnv *env, jobject thiz) {
+jlong Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_allocContext(JNIEnv *env, jobject thiz) {
const AVOutputFormat *format = av_guess_format("mp4", NULL, NULL);
struct Muxer* muxer = malloc(sizeof(struct Muxer));
(*env)->GetJavaVM(env, &muxer->jvm);
@@ -47,8 +47,8 @@ jlong Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_allocContext(JNIEn
}
JNIEXPORT jint JNICALL
-Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_addAudioTrack(JNIEnv *env, jobject thiz, jlong format_context, jint bitrate, jint sample_rate,
- jint channel_count) {
+Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addAudioTrack(JNIEnv *env, jobject thiz, jlong format_context, jint bitrate, jint sample_rate,
+ jint channel_count) {
const AVCodec* encoder = avcodec_find_encoder(AV_CODEC_ID_AAC);
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
AVCodecContext* codec_context = avcodec_alloc_context3(encoder);
@@ -62,17 +62,16 @@ Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_addAudioTrack(JNIEnv *en
codec_context->flags = AV_CODEC_FLAG_GLOBAL_HEADER;
avcodec_open2(codec_context, encoder, NULL);
avcodec_parameters_from_context(stream->codecpar, codec_context);
- int frame_size = codec_context->frame_size;
avcodec_free_context(&codec_context);
- return frame_size;
+ return stream->index;
}
JNIEXPORT jint JNICALL
-Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_addVideoTrack(JNIEnv *env, jobject thiz,
- jlong format_context,
- jint bitrate, jint width,
- jint height,
- jint orientation_hint) {
+Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addVideoTrack(JNIEnv *env, jobject thiz,
+ jlong format_context,
+ jint bitrate, jint width,
+ jint height,
+ jint orientation_hint) {
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
stream->codecpar->codec_id = AV_CODEC_ID_H264;
@@ -85,25 +84,22 @@ Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_addVideoTrack(JNIEnv *en
}
JNIEXPORT jint JNICALL
-Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_writeHeaders(JNIEnv *env, jobject thiz, jlong format_context) {
+Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeHeaders(JNIEnv *env, jobject thiz, jlong format_context) {
return avformat_write_header((AVFormatContext *) format_context, NULL);
}
JNIEXPORT void JNICALL
-Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_writePacket(JNIEnv *env, jobject thiz, jlong format_context,
- jbyteArray buffer, jlong pts, jint stream_index,
- jboolean is_key_frame) {
+Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writePacket(JNIEnv *env, jobject thiz, jlong format_context,
+ jbyteArray buffer, jlong pts, jint stream_index,
+ jboolean is_key_frame) {
AVPacket* packet = av_packet_alloc();
int size = (*env)->GetArrayLength(env, buffer);
av_new_packet(packet, size);
- packet->pts = pts;
- if (stream_index >= 0) { //video
- packet->stream_index = stream_index;
- AVRational r;
- r.num = 1;
- r.den = 1000000;
- av_packet_rescale_ts(packet, r, ((AVFormatContext *)format_context)->streams[stream_index]->time_base);
- }
+ packet->stream_index = stream_index;
+ AVRational r;
+ r.num = 1;
+ r.den = 1000000;
+ packet->pts = av_rescale_q(pts, r, ((AVFormatContext*)format_context)->streams[stream_index]->time_base);
uint8_t* buff = malloc(size);
(*env)->GetByteArrayRegion(env, buffer, 0, size, (signed char*)buff);
packet->data = buff;
@@ -116,12 +112,12 @@ Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_writePacket(JNIEnv *env,
}
JNIEXPORT void JNICALL
-Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_writeTrailer(JNIEnv *env, jobject thiz, jlong format_context) {
+Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeTrailer(JNIEnv *env, jobject thiz, jlong format_context) {
av_write_trailer((AVFormatContext *) format_context);
}
JNIEXPORT void JNICALL
-Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_release(JNIEnv *env, jobject thiz, jlong format_context) {
+Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_release(JNIEnv *env, jobject thiz, jlong format_context) {
AVFormatContext* fc = (AVFormatContext *) format_context;
av_free(fc->pb->buffer);
free(fc->pb->opaque);
diff --git a/build.gradle b/build.gradle
index 900ee6c..430f193 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,5 +1,5 @@
buildscript {
- ext.kotlin_version = "1.7.21"
+ ext.kotlin_version = "1.8.10"
repositories {
google()
mavenCentral()