New CameraX API

This commit is contained in:
Matéo Duparc 2023-04-17 15:52:20 +02:00
parent e6761d1798
commit e918a2f94c
Signed by: hardcoresushi
GPG Key ID: AFE384344A45E13A
19 changed files with 10647 additions and 2171 deletions

View File

@ -45,16 +45,16 @@ $ git clone --depth=1 https://git.ffmpeg.org/ffmpeg.git
If you want Gocryptfs support, you need to download OpenSSL: If you want Gocryptfs support, you need to download OpenSSL:
``` ```
$ cd ../libgocryptfs $ cd ../libgocryptfs
$ wget https://www.openssl.org/source/openssl-1.1.1q.tar.gz $ wget https://www.openssl.org/source/openssl-1.1.1t.tar.gz
``` ```
Verify OpenSSL signature: Verify OpenSSL signature:
``` ```
$ wget https://www.openssl.org/source/openssl-1.1.1q.tar.gz.asc $ wget https://www.openssl.org/source/openssl-1.1.1t.tar.gz.asc
$ gpg --verify openssl-1.1.1q.tar.gz.asc openssl-1.1.1q.tar.gz $ gpg --verify openssl-1.1.1t.tar.gz.asc openssl-1.1.1t.tar.gz
``` ```
Continue **ONLY** if the signature is **VALID**. Continue **ONLY** if the signature is **VALID**.
``` ```
$ tar -xzf openssl-1.1.1q.tar.gz $ tar -xzf openssl-1.1.1t.tar.gz
``` ```
If you want CryFS support, initialize libcryfs: If you want CryFS support, initialize libcryfs:
``` ```
@ -76,7 +76,7 @@ $ ./build.sh ffmpeg
This step is only required if you want Gocryptfs support. This step is only required if you want Gocryptfs support.
``` ```
$ cd app/libgocryptfs $ cd app/libgocryptfs
$ OPENSSL_PATH="./openssl-1.1.1q" ./build.sh $ OPENSSL_PATH="./openssl-1.1.1t" ./build.sh
``` ```
## Compile APKs ## Compile APKs
Gradle build libgocryptfs and libcryfs by default. Gradle build libgocryptfs and libcryfs by default.

View File

@ -81,16 +81,23 @@ android {
path file('CMakeLists.txt') path file('CMakeLists.txt')
} }
} }
sourceSets {
main {
java {
exclude 'androidx/camera/video/originals/**'
}
}
}
} }
dependencies { dependencies {
implementation project(":libpdfviewer:app") implementation project(":libpdfviewer:app")
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version" implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
implementation 'androidx.core:core-ktx:1.9.0' implementation 'androidx.core:core-ktx:1.9.0'
implementation "androidx.appcompat:appcompat:1.6.1" implementation "androidx.appcompat:appcompat:1.6.1"
implementation "androidx.constraintlayout:constraintlayout:2.1.4" implementation "androidx.constraintlayout:constraintlayout:2.1.4"
def lifecycle_version = "2.5.1" def lifecycle_version = "2.6.0"
implementation "androidx.lifecycle:lifecycle-viewmodel-ktx:$lifecycle_version" implementation "androidx.lifecycle:lifecycle-viewmodel-ktx:$lifecycle_version"
implementation "androidx.lifecycle:lifecycle-process:$lifecycle_version" implementation "androidx.lifecycle:lifecycle-process:$lifecycle_version"
@ -101,15 +108,19 @@ dependencies {
implementation "com.github.bumptech.glide:glide:4.13.2" implementation "com.github.bumptech.glide:glide:4.13.2"
implementation "androidx.biometric:biometric-ktx:1.2.0-alpha05" implementation "androidx.biometric:biometric-ktx:1.2.0-alpha05"
def exoplayer_version = "2.18.2" def exoplayer_version = "2.18.4"
implementation "com.google.android.exoplayer:exoplayer-core:$exoplayer_version" implementation "com.google.android.exoplayer:exoplayer-core:$exoplayer_version"
implementation "com.google.android.exoplayer:exoplayer-ui:$exoplayer_version" implementation "com.google.android.exoplayer:exoplayer-ui:$exoplayer_version"
implementation "androidx.concurrent:concurrent-futures:1.1.0" implementation "androidx.concurrent:concurrent-futures:1.1.0"
def camerax_version = "1.2.0-beta02" def camerax_version = "1.3.0-alpha04"
implementation "androidx.camera:camera-camera2:$camerax_version" implementation "androidx.camera:camera-camera2:$camerax_version"
implementation "androidx.camera:camera-lifecycle:$camerax_version" implementation "androidx.camera:camera-lifecycle:$camerax_version"
implementation "androidx.camera:camera-view:$camerax_version" implementation "androidx.camera:camera-view:$camerax_version"
implementation "androidx.camera:camera-extensions:$camerax_version" implementation "androidx.camera:camera-extensions:$camerax_version"
def autoValueVersion = "1.10.1"
implementation "com.google.auto.value:auto-value-annotations:$autoValueVersion"
annotationProcessor "com.google.auto.value:auto-value:$autoValueVersion"
} }

View File

@ -0,0 +1,14 @@
package androidx.camera.video
import android.media.MediaCodec
import android.media.MediaFormat
import java.nio.ByteBuffer
interface MediaMuxer {
fun setOrientationHint(degree: Int)
fun release()
fun addTrack(mediaFormat: MediaFormat): Int
fun start()
fun writeSampleData(trackIndex: Int, buffer: ByteBuffer, bufferInfo: MediaCodec.BufferInfo)
fun stop()
}

View File

@ -0,0 +1,16 @@
package androidx.camera.video
import android.location.Location
class MuxerOutputOptions(private val mediaMuxer: MediaMuxer): OutputOptions(MuxerOutputOptionsInternal()) {
private class MuxerOutputOptionsInternal: OutputOptionsInternal() {
override fun getFileSizeLimit(): Long = FILE_SIZE_UNLIMITED.toLong()
override fun getDurationLimitMillis(): Long = DURATION_UNLIMITED.toLong()
override fun getLocation(): Location? = null
}
fun getMediaMuxer(): MediaMuxer = mediaMuxer
}

View File

@ -0,0 +1,181 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.camera.video;
import android.Manifest;
import android.annotation.SuppressLint;
import android.content.Context;
import androidx.annotation.CheckResult;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RequiresPermission;
import androidx.camera.core.impl.utils.ContextUtil;
import androidx.core.content.PermissionChecker;
import androidx.core.util.Consumer;
import androidx.core.util.Preconditions;
import java.util.concurrent.Executor;
/**
* A recording that can be started at a future time.
*
* <p>A pending recording allows for configuration of a recording before it is started. Once a
* pending recording is started with {@link #start(Executor, Consumer)}, any changes to the pending
* recording will not affect the actual recording; any modifications to the recording will need
* to occur through the controls of the {@link SucklessRecording} class returned by
* {@link #start(Executor, Consumer)}.
*
* <p>A pending recording can be created using one of the {@link Recorder} methods for starting a
* recording such as {@link Recorder#prepareRecording(Context, MediaStoreOutputOptions)}.
* <p>There may be more settings that can only be changed per-recorder instead of per-recording,
* because it requires expensive operations like reconfiguring the camera. For those settings, use
* the {@link Recorder.Builder} methods to configure before creating the {@link Recorder}
* instance, then create the pending recording with it.
*/
@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
@SuppressLint("RestrictedApi")
public final class SucklessPendingRecording {
private final Context mContext;
private final SucklessRecorder mRecorder;
private final OutputOptions mOutputOptions;
private Consumer<VideoRecordEvent> mEventListener;
private Executor mListenerExecutor;
private boolean mAudioEnabled = false;
SucklessPendingRecording(@NonNull Context context, @NonNull SucklessRecorder recorder,
@NonNull OutputOptions options) {
// Application context is sufficient for all our needs, so store that to avoid leaking
// unused resources. For attribution, ContextUtil.getApplicationContext() will retain the
// attribution tag from the original context.
mContext = ContextUtil.getApplicationContext(context);
mRecorder = recorder;
mOutputOptions = options;
}
/**
* Returns an application context which was retrieved from the {@link Context} used to
* create this object.
*/
@NonNull
Context getApplicationContext() {
return mContext;
}
@NonNull
SucklessRecorder getRecorder() {
return mRecorder;
}
@NonNull
OutputOptions getOutputOptions() {
return mOutputOptions;
}
@Nullable
Executor getListenerExecutor() {
return mListenerExecutor;
}
@Nullable
Consumer<VideoRecordEvent> getEventListener() {
return mEventListener;
}
boolean isAudioEnabled() {
return mAudioEnabled;
}
/**
* Enables audio to be recorded for this recording.
*
* <p>This method must be called prior to {@link #start(Executor, Consumer)} to enable audio
* in the recording. If this method is not called, the {@link SucklessRecording} generated by
* {@link #start(Executor, Consumer)} will not contain audio, and
* {@link AudioStats#getAudioState()} will always return
* {@link AudioStats#AUDIO_STATE_DISABLED} for all {@link RecordingStats} send to the listener
* set passed to {@link #start(Executor, Consumer)}.
*
* <p>Recording with audio requires the {@link android.Manifest.permission#RECORD_AUDIO}
* permission; without it, recording will fail at {@link #start(Executor, Consumer)} with an
* {@link IllegalStateException}.
*
* @return this pending recording
* @throws IllegalStateException if the {@link Recorder} this recording is associated to
* doesn't support audio.
* @throws SecurityException if the {@link Manifest.permission#RECORD_AUDIO} permission
* is denied for the current application.
*/
@RequiresPermission(Manifest.permission.RECORD_AUDIO)
@NonNull
public SucklessPendingRecording withAudioEnabled() {
// Check permissions and throw a security exception if RECORD_AUDIO is not granted.
if (PermissionChecker.checkSelfPermission(mContext, Manifest.permission.RECORD_AUDIO)
== PermissionChecker.PERMISSION_DENIED) {
throw new SecurityException("Attempted to enable audio for recording but application "
+ "does not have RECORD_AUDIO permission granted.");
}
Preconditions.checkState(mRecorder.isAudioSupported(), "The Recorder this recording is "
+ "associated to doesn't support audio.");
mAudioEnabled = true;
return this;
}
/**
* Starts the recording, making it an active recording.
*
* <p>Only a single recording can be active at a time, so if another recording is active,
* this will throw an {@link IllegalStateException}.
*
* <p>If there are no errors starting the recording, the returned {@link SucklessRecording}
* can be used to {@link SucklessRecording#pause() pause}, {@link SucklessRecording#resume() resume},
* or {@link SucklessRecording#stop() stop} the recording.
*
* <p>Upon successfully starting the recording, a {@link VideoRecordEvent.Start} event will
* be the first event sent to the provided event listener.
*
* <p>If errors occur while starting the recording, a {@link VideoRecordEvent.Finalize} event
* will be the first event sent to the provided listener, and information about the error can
* be found in that event's {@link VideoRecordEvent.Finalize#getError()} method. The returned
* {@link SucklessRecording} will be in a finalized state, and all controls will be no-ops.
*
* <p>If the returned {@link SucklessRecording} is garbage collected, the recording will be
* automatically stopped. A reference to the active recording must be maintained as long as
* the recording needs to be active.
*
* @throws IllegalStateException if the associated Recorder currently has an unfinished
* active recording.
* @param listenerExecutor the executor that the event listener will be run on.
* @param listener the event listener to handle video record events.
*/
@NonNull
@CheckResult
public SucklessRecording start(
@NonNull Executor listenerExecutor,
@NonNull Consumer<VideoRecordEvent> listener) {
Preconditions.checkNotNull(listenerExecutor, "Listener Executor can't be null.");
Preconditions.checkNotNull(listener, "Event listener can't be null");
mListenerExecutor = listenerExecutor;
mEventListener = listener;
return mRecorder.start(this);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,220 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.camera.video;
import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
import android.annotation.SuppressLint;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.annotation.RestrictTo;
import androidx.camera.core.impl.utils.CloseGuardHelper;
import androidx.core.util.Consumer;
import androidx.core.util.Preconditions;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Provides controls for the currently active recording.
*
* <p>An active recording is created by starting a pending recording with
* {@link PendingRecording#start(Executor, Consumer)}. If there are no errors starting the
* recording, upon creation, an active recording will provide controls to pause, resume or stop a
* recording. If errors occur while starting the recording, the active recording will be
* instantiated in a {@link VideoRecordEvent.Finalize finalized} state, and all controls will be
* no-ops. The state of the recording can be observed by the video record event listener provided
* to {@link PendingRecording#start(Executor, Consumer)} when starting the recording.
*
* <p>Either {@link #stop()} or {@link #close()} can be called when it is desired to
* stop the recording. If {@link #stop()} or {@link #close()} are not called on this object
* before it is no longer referenced, it will be automatically stopped at a future point in time
* when the object is garbage collected, and no new recordings can be started from the same
* {@link Recorder} that generated the object until that occurs.
*/
@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
@SuppressLint("RestrictedApi")
public final class SucklessRecording implements AutoCloseable {
// Indicates the recording has been explicitly stopped by users.
private final AtomicBoolean mIsClosed = new AtomicBoolean(false);
private final SucklessRecorder mRecorder;
private final long mRecordingId;
private final OutputOptions mOutputOptions;
private final CloseGuardHelper mCloseGuard = CloseGuardHelper.create();
SucklessRecording(@NonNull SucklessRecorder recorder, long recordingId, @NonNull OutputOptions options,
boolean finalizedOnCreation) {
mRecorder = recorder;
mRecordingId = recordingId;
mOutputOptions = options;
if (finalizedOnCreation) {
mIsClosed.set(true);
} else {
mCloseGuard.open("stop");
}
}
/**
* Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID.
*
* <p>The recording ID is expected to be unique to the recorder that generated the pending
* recording.
*/
@NonNull
static SucklessRecording from(@NonNull SucklessPendingRecording pendingRecording, long recordingId) {
Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
return new SucklessRecording(pendingRecording.getRecorder(),
recordingId,
pendingRecording.getOutputOptions(),
/*finalizedOnCreation=*/false);
}
/**
* Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID in a
* finalized state.
*
* <p>This can be used if there was an error setting up the active recording and it would not
* be able to be started.
*
* <p>The recording ID is expected to be unique to the recorder that generated the pending
* recording.
*/
@NonNull
static SucklessRecording createFinalizedFrom(@NonNull SucklessPendingRecording pendingRecording,
long recordingId) {
Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
return new SucklessRecording(pendingRecording.getRecorder(),
recordingId,
pendingRecording.getOutputOptions(),
/*finalizedOnCreation=*/true);
}
@NonNull
OutputOptions getOutputOptions() {
return mOutputOptions;
}
/**
* Pauses the current recording if active.
*
* <p>Successful pausing of a recording will generate a {@link VideoRecordEvent.Pause} event
* which will be sent to the listener passed to
* {@link PendingRecording#start(Executor, Consumer)}.
*
* <p>If the recording has already been paused or has been finalized internally, this is a
* no-op.
*
* @throws IllegalStateException if the recording has been stopped with
* {@link #close()} or {@link #stop()}.
*/
public void pause() {
if (mIsClosed.get()) {
throw new IllegalStateException("The recording has been stopped.");
}
mRecorder.pause(this);
}
/**
* Resumes the current recording if paused.
*
* <p>Successful resuming of a recording will generate a {@link VideoRecordEvent.Resume} event
* which will be sent to the listener passed to
* {@link PendingRecording#start(Executor, Consumer)}.
*
* <p>If the recording is active or has been finalized internally, this is a no-op.
*
* @throws IllegalStateException if the recording has been stopped with
* {@link #close()} or {@link #stop()}.
*/
public void resume() {
if (mIsClosed.get()) {
throw new IllegalStateException("The recording has been stopped.");
}
mRecorder.resume(this);
}
/**
* Stops the recording, as if calling {@link #close()}.
*
* <p>This method is equivalent to calling {@link #close()}.
*/
public void stop() {
close();
}
/**
* Close this recording.
*
* <p>Once {@link #stop()} or {@code close()} called, all methods for controlling the state of
* this recording besides {@link #stop()} or {@code close()} will throw an
* {@link IllegalStateException}.
*
* <p>Once an active recording has been closed, the next recording can be started with
* {@link PendingRecording#start(Executor, Consumer)}.
*
* <p>This method is idempotent; if the recording has already been closed or has been
* finalized internally, calling {@link #stop()} or {@code close()} is a no-op.
*
* <p>This method is invoked automatically on active recording instances managed by the {@code
* try-with-resources} statement.
*/
@Override
public void close() {
mCloseGuard.close();
if (mIsClosed.getAndSet(true)) {
return;
}
mRecorder.stop(this);
}
@Override
@SuppressWarnings("GenericException") // super.finalize() throws Throwable
protected void finalize() throws Throwable {
try {
mCloseGuard.warnIfOpen();
stop();
} finally {
super.finalize();
}
}
/** Returns the recording ID which is unique to the recorder that generated this recording. */
long getRecordingId() {
return mRecordingId;
}
/**
* Returns whether the recording is closed.
*
* <p>The returned value does not reflect the state of the recording; it only reflects
* whether {@link #stop()} or {@link #close()} was called on this object.
*
* <p>The state of the recording should be checked from the listener passed to
* {@link PendingRecording#start(Executor, Consumer)}. Once the active recording is
* stopped, a {@link VideoRecordEvent.Finalize} event will be sent to the listener.
*
* @hide
*/
@RestrictTo(LIBRARY_GROUP)
public boolean isClosed() {
return mIsClosed.get();
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,179 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.camera.video;
import android.Manifest;
import android.content.Context;
import androidx.annotation.CheckResult;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RequiresPermission;
import androidx.camera.core.impl.utils.ContextUtil;
import androidx.core.content.PermissionChecker;
import androidx.core.util.Consumer;
import androidx.core.util.Preconditions;
import java.util.concurrent.Executor;
/**
* A recording that can be started at a future time.
*
* <p>A pending recording allows for configuration of a recording before it is started. Once a
* pending recording is started with {@link #start(Executor, Consumer)}, any changes to the pending
* recording will not affect the actual recording; any modifications to the recording will need
* to occur through the controls of the {@link SucklessRecording} class returned by
* {@link #start(Executor, Consumer)}.
*
* <p>A pending recording can be created using one of the {@link Recorder} methods for starting a
* recording such as {@link Recorder#prepareRecording(Context, MediaStoreOutputOptions)}.
* <p>There may be more settings that can only be changed per-recorder instead of per-recording,
* because it requires expensive operations like reconfiguring the camera. For those settings, use
* the {@link Recorder.Builder} methods to configure before creating the {@link Recorder}
* instance, then create the pending recording with it.
*/
@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
public final class PendingRecording {
private final Context mContext;
private final Recorder mRecorder;
private final OutputOptions mOutputOptions;
private Consumer<VideoRecordEvent> mEventListener;
private Executor mListenerExecutor;
private boolean mAudioEnabled = false;
PendingRecording(@NonNull Context context, @NonNull Recorder recorder,
@NonNull OutputOptions options) {
// Application context is sufficient for all our needs, so store that to avoid leaking
// unused resources. For attribution, ContextUtil.getApplicationContext() will retain the
// attribution tag from the original context.
mContext = ContextUtil.getApplicationContext(context);
mRecorder = recorder;
mOutputOptions = options;
}
/**
* Returns an application context which was retrieved from the {@link Context} used to
* create this object.
*/
@NonNull
Context getApplicationContext() {
return mContext;
}
@NonNull
Recorder getRecorder() {
return mRecorder;
}
@NonNull
OutputOptions getOutputOptions() {
return mOutputOptions;
}
@Nullable
Executor getListenerExecutor() {
return mListenerExecutor;
}
@Nullable
Consumer<VideoRecordEvent> getEventListener() {
return mEventListener;
}
boolean isAudioEnabled() {
return mAudioEnabled;
}
/**
* Enables audio to be recorded for this recording.
*
* <p>This method must be called prior to {@link #start(Executor, Consumer)} to enable audio
* in the recording. If this method is not called, the {@link SucklessRecording} generated by
* {@link #start(Executor, Consumer)} will not contain audio, and
* {@link AudioStats#getAudioState()} will always return
* {@link AudioStats#AUDIO_STATE_DISABLED} for all {@link RecordingStats} send to the listener
* set passed to {@link #start(Executor, Consumer)}.
*
* <p>Recording with audio requires the {@link android.Manifest.permission#RECORD_AUDIO}
* permission; without it, recording will fail at {@link #start(Executor, Consumer)} with an
* {@link IllegalStateException}.
*
* @return this pending recording
* @throws IllegalStateException if the {@link Recorder} this recording is associated to
* doesn't support audio.
* @throws SecurityException if the {@link Manifest.permission#RECORD_AUDIO} permission
* is denied for the current application.
*/
@RequiresPermission(Manifest.permission.RECORD_AUDIO)
@NonNull
public SucklessPendingRecording withAudioEnabled() {
// Check permissions and throw a security exception if RECORD_AUDIO is not granted.
if (PermissionChecker.checkSelfPermission(mContext, Manifest.permission.RECORD_AUDIO)
== PermissionChecker.PERMISSION_DENIED) {
throw new SecurityException("Attempted to enable audio for recording but application "
+ "does not have RECORD_AUDIO permission granted.");
}
Preconditions.checkState(mRecorder.isAudioSupported(), "The Recorder this recording is "
+ "associated to doesn't support audio.");
mAudioEnabled = true;
return this;
}
/**
* Starts the recording, making it an active recording.
*
* <p>Only a single recording can be active at a time, so if another recording is active,
* this will throw an {@link IllegalStateException}.
*
* <p>If there are no errors starting the recording, the returned {@link SucklessRecording}
* can be used to {@link SucklessRecording#pause() pause}, {@link SucklessRecording#resume() resume},
* or {@link SucklessRecording#stop() stop} the recording.
*
* <p>Upon successfully starting the recording, a {@link VideoRecordEvent.Start} event will
* be the first event sent to the provided event listener.
*
* <p>If errors occur while starting the recording, a {@link VideoRecordEvent.Finalize} event
* will be the first event sent to the provided listener, and information about the error can
* be found in that event's {@link VideoRecordEvent.Finalize#getError()} method. The returned
* {@link SucklessRecording} will be in a finalized state, and all controls will be no-ops.
*
* <p>If the returned {@link SucklessRecording} is garbage collected, the recording will be
* automatically stopped. A reference to the active recording must be maintained as long as
* the recording needs to be active.
*
* @throws IllegalStateException if the associated Recorder currently has an unfinished
* active recording.
* @param listenerExecutor the executor that the event listener will be run on.
* @param listener the event listener to handle video record events.
*/
@NonNull
@CheckResult
public SucklessRecording start(
@NonNull Executor listenerExecutor,
@NonNull Consumer<VideoRecordEvent> listener) {
Preconditions.checkNotNull(listenerExecutor, "Listener Executor can't be null.");
Preconditions.checkNotNull(listener, "Event listener can't be null");
mListenerExecutor = listenerExecutor;
mEventListener = listener;
return mRecorder.start(this);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,217 @@
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.camera.video;
import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.annotation.RestrictTo;
import androidx.camera.core.impl.utils.CloseGuardHelper;
import androidx.core.util.Consumer;
import androidx.core.util.Preconditions;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Provides controls for the currently active recording.
*
* <p>An active recording is created by starting a pending recording with
* {@link PendingRecording#start(Executor, Consumer)}. If there are no errors starting the
* recording, upon creation, an active recording will provide controls to pause, resume or stop a
* recording. If errors occur while starting the recording, the active recording will be
* instantiated in a {@link VideoRecordEvent.Finalize finalized} state, and all controls will be
* no-ops. The state of the recording can be observed by the video record event listener provided
* to {@link PendingRecording#start(Executor, Consumer)} when starting the recording.
*
* <p>Either {@link #stop()} or {@link #close()} can be called when it is desired to
* stop the recording. If {@link #stop()} or {@link #close()} are not called on this object
* before it is no longer referenced, it will be automatically stopped at a future point in time
* when the object is garbage collected, and no new recordings can be started from the same
* {@link Recorder} that generated the object until that occurs.
*/
@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
public final class Recording implements AutoCloseable {
// Indicates the recording has been explicitly stopped by users.
private final AtomicBoolean mIsClosed = new AtomicBoolean(false);
private final Recorder mRecorder;
private final long mRecordingId;
private final OutputOptions mOutputOptions;
private final CloseGuardHelper mCloseGuard = CloseGuardHelper.create();
Recording(@NonNull Recorder recorder, long recordingId, @NonNull OutputOptions options,
boolean finalizedOnCreation) {
mRecorder = recorder;
mRecordingId = recordingId;
mOutputOptions = options;
if (finalizedOnCreation) {
mIsClosed.set(true);
} else {
mCloseGuard.open("stop");
}
}
/**
* Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID.
*
* <p>The recording ID is expected to be unique to the recorder that generated the pending
* recording.
*/
@NonNull
static SucklessRecording from(@NonNull SucklessPendingRecording pendingRecording, long recordingId) {
Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
return new SucklessRecording(pendingRecording.getRecorder(),
recordingId,
pendingRecording.getOutputOptions(),
/*finalizedOnCreation=*/false);
}
/**
* Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID in a
* finalized state.
*
* <p>This can be used if there was an error setting up the active recording and it would not
* be able to be started.
*
* <p>The recording ID is expected to be unique to the recorder that generated the pending
* recording.
*/
@NonNull
static SucklessRecording createFinalizedFrom(@NonNull SucklessPendingRecording pendingRecording,
long recordingId) {
Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
return new SucklessRecording(pendingRecording.getRecorder(),
recordingId,
pendingRecording.getOutputOptions(),
/*finalizedOnCreation=*/true);
}
@NonNull
OutputOptions getOutputOptions() {
return mOutputOptions;
}
/**
* Pauses the current recording if active.
*
* <p>Successful pausing of a recording will generate a {@link VideoRecordEvent.Pause} event
* which will be sent to the listener passed to
* {@link PendingRecording#start(Executor, Consumer)}.
*
* <p>If the recording has already been paused or has been finalized internally, this is a
* no-op.
*
* @throws IllegalStateException if the recording has been stopped with
* {@link #close()} or {@link #stop()}.
*/
public void pause() {
if (mIsClosed.get()) {
throw new IllegalStateException("The recording has been stopped.");
}
mRecorder.pause(this);
}
/**
* Resumes the current recording if paused.
*
* <p>Successful resuming of a recording will generate a {@link VideoRecordEvent.Resume} event
* which will be sent to the listener passed to
* {@link PendingRecording#start(Executor, Consumer)}.
*
* <p>If the recording is active or has been finalized internally, this is a no-op.
*
* @throws IllegalStateException if the recording has been stopped with
* {@link #close()} or {@link #stop()}.
*/
public void resume() {
if (mIsClosed.get()) {
throw new IllegalStateException("The recording has been stopped.");
}
mRecorder.resume(this);
}
/**
* Stops the recording, as if calling {@link #close()}.
*
* <p>This method is equivalent to calling {@link #close()}.
*/
public void stop() {
close();
}
/**
* Close this recording.
*
* <p>Once {@link #stop()} or {@code close()} called, all methods for controlling the state of
* this recording besides {@link #stop()} or {@code close()} will throw an
* {@link IllegalStateException}.
*
* <p>Once an active recording has been closed, the next recording can be started with
* {@link PendingRecording#start(Executor, Consumer)}.
*
* <p>This method is idempotent; if the recording has already been closed or has been
* finalized internally, calling {@link #stop()} or {@code close()} is a no-op.
*
* <p>This method is invoked automatically on active recording instances managed by the {@code
* try-with-resources} statement.
*/
@Override
public void close() {
mCloseGuard.close();
if (mIsClosed.getAndSet(true)) {
return;
}
mRecorder.stop(this);
}
@Override
@SuppressWarnings("GenericException") // super.finalize() throws Throwable
protected void finalize() throws Throwable {
try {
mCloseGuard.warnIfOpen();
stop();
} finally {
super.finalize();
}
}
/** Returns the recording ID which is unique to the recorder that generated this recording. */
long getRecordingId() {
return mRecordingId;
}
/**
* Returns whether the recording is closed.
*
* <p>The returned value does not reflect the state of the recording; it only reflects
* whether {@link #stop()} or {@link #close()} was called on this object.
*
* <p>The state of the recording should be checked from the listener passed to
* {@link PendingRecording#start(Executor, Consumer)}. Once the active recording is
* stopped, a {@link VideoRecordEvent.Finalize} event will be sent to the listener.
*
* @hide
*/
@RestrictTo(LIBRARY_GROUP)
public boolean isClosed() {
return mIsClosed.get();
}
}

View File

@ -19,10 +19,24 @@ import android.widget.RelativeLayout
import android.widget.Toast import android.widget.Toast
import androidx.annotation.RequiresApi import androidx.annotation.RequiresApi
import androidx.camera.camera2.interop.Camera2CameraInfo import androidx.camera.camera2.interop.Camera2CameraInfo
import androidx.camera.core.* import androidx.camera.core.AspectRatio
import androidx.camera.core.Camera
import androidx.camera.core.CameraSelector
import androidx.camera.core.FocusMeteringAction
import androidx.camera.core.ImageCapture
import androidx.camera.core.ImageCaptureException
import androidx.camera.core.Preview
import androidx.camera.core.UseCase
import androidx.camera.extensions.ExtensionMode import androidx.camera.extensions.ExtensionMode
import androidx.camera.extensions.ExtensionsManager import androidx.camera.extensions.ExtensionsManager
import androidx.camera.lifecycle.ProcessCameraProvider import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.video.MuxerOutputOptions
import androidx.camera.video.Quality
import androidx.camera.video.QualitySelector
import androidx.camera.video.SucklessRecorder
import androidx.camera.video.SucklessRecording
import androidx.camera.video.VideoCapture
import androidx.camera.video.VideoRecordEvent
import androidx.core.app.ActivityCompat import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat import androidx.core.content.ContextCompat
import androidx.lifecycle.lifecycleScope import androidx.lifecycle.lifecycleScope
@ -32,8 +46,8 @@ import sushi.hardcore.droidfs.databinding.ActivityCameraBinding
import sushi.hardcore.droidfs.filesystems.EncryptedVolume import sushi.hardcore.droidfs.filesystems.EncryptedVolume
import sushi.hardcore.droidfs.util.IntentUtils import sushi.hardcore.droidfs.util.IntentUtils
import sushi.hardcore.droidfs.util.PathUtils import sushi.hardcore.droidfs.util.PathUtils
import sushi.hardcore.droidfs.video_recording.FFmpegMuxer
import sushi.hardcore.droidfs.video_recording.SeekableWriter import sushi.hardcore.droidfs.video_recording.SeekableWriter
import sushi.hardcore.droidfs.video_recording.VideoCapture
import sushi.hardcore.droidfs.widgets.CustomAlertDialogBuilder import sushi.hardcore.droidfs.widgets.CustomAlertDialogBuilder
import sushi.hardcore.droidfs.widgets.EditTextDialog import sushi.hardcore.droidfs.widgets.EditTextDialog
import java.io.ByteArrayInputStream import java.io.ByteArrayInputStream
@ -42,6 +56,7 @@ import java.text.SimpleDateFormat
import java.util.* import java.util.*
import java.util.concurrent.Executor import java.util.concurrent.Executor
@SuppressLint("RestrictedApi")
class CameraActivity : BaseActivity(), SensorOrientationListener.Listener { class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
companion object { companion object {
private const val CAMERA_PERMISSION_REQUEST_CODE = 0 private const val CAMERA_PERMISSION_REQUEST_CODE = 0
@ -73,11 +88,17 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
private lateinit var cameraSelector: CameraSelector private lateinit var cameraSelector: CameraSelector
private val cameraPreview = Preview.Builder().build() private val cameraPreview = Preview.Builder().build()
private var imageCapture: ImageCapture? = null private var imageCapture: ImageCapture? = null
private var videoCapture: VideoCapture? = null private var videoCapture: VideoCapture<SucklessRecorder>? = null
private var videoRecorder: SucklessRecorder? = null
private var videoRecording: SucklessRecording? = null
private var camera: Camera? = null private var camera: Camera? = null
private var resolutions: List<Size>? = null private var resolutions: List<Size>? = null
private var currentResolutionIndex: Int = 0 private var currentResolutionIndex: Int = 0
private var currentResolution: Size? = null private var currentResolution: Size? = null
private val aspectRatios = arrayOf(AspectRatio.RATIO_16_9, AspectRatio.RATIO_4_3)
private var currentAspectRatioIndex = 0
private var qualities: List<Quality>? = null
private var currentQualityIndex = -1
private var captureMode = ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY private var captureMode = ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY
private var isBackCamera = true private var isBackCamera = true
private var isInVideoMode = false private var isInVideoMode = false
@ -118,50 +139,76 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
} }
binding.imageCaptureMode.setOnClickListener { binding.imageCaptureMode.setOnClickListener {
val currentIndex = if (captureMode == ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY) { if (isInVideoMode) {
0 qualities?.let { qualities ->
} else { val qualityNames = qualities.map {
1 when (it) {
} Quality.UHD -> "UHD"
CustomAlertDialogBuilder(this, theme) Quality.FHD -> "FHD"
.setTitle(R.string.camera_optimization) Quality.HD -> "HD"
.setSingleChoiceItems(arrayOf(getString(R.string.maximize_quality), getString(R.string.minimize_latency)), currentIndex) { dialog, which -> Quality.SD -> "SD"
val resId: Int else -> throw IllegalArgumentException("Invalid quality: $it")
val newCaptureMode = if (which == 0) {
resId = R.drawable.icon_high_quality
ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY
} else {
resId = R.drawable.icon_speed
ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY
}
if (newCaptureMode != captureMode) {
captureMode = newCaptureMode
binding.imageCaptureMode.setImageResource(resId)
if (!isInVideoMode) {
cameraProvider.unbind(imageCapture)
refreshImageCapture()
cameraProvider.bindToLifecycle(this, cameraSelector, imageCapture)
} }
} }.toTypedArray()
dialog.dismiss() CustomAlertDialogBuilder(this, theme)
.setTitle("Choose quality:")
.setSingleChoiceItems(qualityNames, currentQualityIndex) { dialog, which ->
currentQualityIndex = which
rebindUseCases()
dialog.dismiss()
}
.setNegativeButton(R.string.cancel, null)
.show()
} }
.setNegativeButton(R.string.cancel, null) } else {
.show()
}
binding.imageRatio.setOnClickListener {
resolutions?.let {
CustomAlertDialogBuilder(this, theme) CustomAlertDialogBuilder(this, theme)
.setTitle(R.string.choose_resolution) .setTitle(R.string.camera_optimization)
.setSingleChoiceItems(it.map { size -> size.toString() }.toTypedArray(), currentResolutionIndex) { dialog, which -> .setSingleChoiceItems(
currentResolution = resolutions!![which] arrayOf(getString(R.string.maximize_quality), getString(R.string.minimize_latency)),
currentResolutionIndex = which if (captureMode == ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY) 0 else 1
setupCamera() ) { dialog, which ->
val newCaptureMode = if (which == 0) {
ImageCapture.CAPTURE_MODE_MAXIMIZE_QUALITY
} else {
ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY
}
if (newCaptureMode != captureMode) {
captureMode = newCaptureMode
setCaptureModeIcon()
rebindUseCases()
}
dialog.dismiss() dialog.dismiss()
} }
.setNegativeButton(R.string.cancel, null) .setNegativeButton(R.string.cancel, null)
.show() .show()
} }
} }
binding.imageRatio.setOnClickListener {
if (isInVideoMode) {
CustomAlertDialogBuilder(this, theme)
.setTitle("Aspect ratio:")
.setSingleChoiceItems(arrayOf("16:9", "4:3"), currentAspectRatioIndex) { dialog, which ->
currentAspectRatioIndex = which
rebindUseCases()
dialog.dismiss()
}
.setNegativeButton(R.string.cancel, null)
.show()
} else {
resolutions?.let {
CustomAlertDialogBuilder(this, theme)
.setTitle(R.string.choose_resolution)
.setSingleChoiceItems(it.map { size -> size.toString() }.toTypedArray(), currentResolutionIndex) { dialog, which ->
currentResolution = resolutions!![which]
currentResolutionIndex = which
rebindUseCases()
dialog.dismiss()
}
.setNegativeButton(R.string.cancel, null)
.show()
}
}
}
binding.imageTimer.setOnClickListener { binding.imageTimer.setOnClickListener {
with (EditTextDialog(this, R.string.enter_timer_duration) { with (EditTextDialog(this, R.string.enter_timer_duration) {
try { try {
@ -207,7 +254,7 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
} }
binding.imageModeSwitch.setOnClickListener { binding.imageModeSwitch.setOnClickListener {
isInVideoMode = !isInVideoMode isInVideoMode = !isInVideoMode
setupCamera() rebindUseCases()
binding.imageFlash.setImageResource(if (isInVideoMode) { binding.imageFlash.setImageResource(if (isInVideoMode) {
binding.recordVideoButton.visibility = View.VISIBLE binding.recordVideoButton.visibility = View.VISIBLE
binding.takePhotoButton.visibility = View.GONE binding.takePhotoButton.visibility = View.GONE
@ -219,6 +266,7 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
binding.imageModeSwitch.setImageDrawable(ContextCompat.getDrawable(this, R.drawable.icon_photo)?.mutate()?.also { binding.imageModeSwitch.setImageDrawable(ContextCompat.getDrawable(this, R.drawable.icon_photo)?.mutate()?.also {
it.setTint(ContextCompat.getColor(this, R.color.neutralIconTint)) it.setTint(ContextCompat.getColor(this, R.color.neutralIconTint))
}) })
setCaptureModeIcon()
imageCapture?.flashMode = ImageCapture.FLASH_MODE_OFF imageCapture?.flashMode = ImageCapture.FLASH_MODE_OFF
R.drawable.icon_flash_off R.drawable.icon_flash_off
} else { } else {
@ -243,6 +291,7 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
true true
} }
resolutions = null resolutions = null
qualities = null
setupCamera() setupCamera()
} }
binding.takePhotoButton.onClick = ::onClickTakePhoto binding.takePhotoButton.onClick = ::onClickTakePhoto
@ -299,6 +348,18 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
} }
} }
private fun setCaptureModeIcon() {
binding.imageCaptureMode.setImageResource(if (isInVideoMode) {
R.drawable.icon_high_quality
} else {
if (captureMode == ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY) {
R.drawable.icon_speed
} else {
R.drawable.icon_high_quality
}
})
}
private fun adaptPreviewSize(resolution: Size) { private fun adaptPreviewSize(resolution: Size) {
val screenWidth = resources.displayMetrics.widthPixels val screenWidth = resources.displayMetrics.widthPixels
val screenHeight = resources.displayMetrics.heightPixels val screenHeight = resources.displayMetrics.heightPixels
@ -327,43 +388,49 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
} }
private fun refreshVideoCapture() { private fun refreshVideoCapture() {
videoCapture = VideoCapture.Builder().apply { val recorderBuilder = SucklessRecorder.Builder()
currentResolution?.let { .setExecutor(executor)
setTargetResolution(it) .setAspectRatio(aspectRatios[currentAspectRatioIndex])
} if (currentQualityIndex != -1) {
}.build() recorderBuilder.setQualitySelector(QualitySelector.from(qualities!![currentQualityIndex]))
}
videoRecorder = recorderBuilder.build()
videoCapture = VideoCapture.withOutput(videoRecorder!!)
}
private fun rebindUseCases(): UseCase {
cameraProvider.unbindAll()
val currentUseCase = (if (isInVideoMode) {
refreshVideoCapture()
camera = cameraProvider.bindToLifecycle(this, cameraSelector, cameraPreview, videoCapture)
if (qualities == null) {
qualities = QualitySelector.getSupportedQualities(camera!!.cameraInfo)
}
videoCapture
} else {
refreshImageCapture()
camera = cameraProvider.bindToLifecycle(this, cameraSelector, cameraPreview, imageCapture)
if (resolutions == null) {
val info = Camera2CameraInfo.from(camera!!.cameraInfo)
val cameraManager = getSystemService(Context.CAMERA_SERVICE) as CameraManager
val characteristics = cameraManager.getCameraCharacteristics(info.cameraId)
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)?.let { streamConfigurationMap ->
resolutions = streamConfigurationMap.getOutputSizes(imageCapture!!.imageFormat).map { it.swap() }
}
}
imageCapture
})!!
adaptPreviewSize(currentUseCase.attachedSurfaceResolution!!.swap())
return currentUseCase
} }
@SuppressLint("RestrictedApi")
private fun setupCamera() { private fun setupCamera() {
if (permissionsGranted && ::extensionsManager.isInitialized && ::cameraProvider.isInitialized) { if (permissionsGranted && ::extensionsManager.isInitialized && ::cameraProvider.isInitialized) {
cameraSelector = if (isBackCamera){ CameraSelector.DEFAULT_BACK_CAMERA } else { CameraSelector.DEFAULT_FRONT_CAMERA } cameraSelector = if (isBackCamera){ CameraSelector.DEFAULT_BACK_CAMERA } else { CameraSelector.DEFAULT_FRONT_CAMERA }
if (extensionsManager.isExtensionAvailable(cameraSelector, ExtensionMode.AUTO)) { if (extensionsManager.isExtensionAvailable(cameraSelector, ExtensionMode.AUTO)) {
cameraSelector = extensionsManager.getExtensionEnabledCameraSelector(cameraSelector, ExtensionMode.AUTO) cameraSelector = extensionsManager.getExtensionEnabledCameraSelector(cameraSelector, ExtensionMode.AUTO)
} }
rebindUseCases()
cameraProvider.unbindAll()
val currentUseCase = (if (isInVideoMode) {
refreshVideoCapture()
camera = cameraProvider.bindToLifecycle(this, cameraSelector, cameraPreview, videoCapture)
videoCapture
} else {
refreshImageCapture()
camera = cameraProvider.bindToLifecycle(this, cameraSelector, cameraPreview, imageCapture)
imageCapture
})!!
adaptPreviewSize(currentResolution ?: currentUseCase.attachedSurfaceResolution!!.swap())
if (resolutions == null) {
val info = Camera2CameraInfo.from(camera!!.cameraInfo)
val cameraManager = getSystemService(Context.CAMERA_SERVICE) as CameraManager
val characteristics = cameraManager.getCameraCharacteristics(info.cameraId)
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)?.let { streamConfigurationMap ->
resolutions = streamConfigurationMap.getOutputSizes(currentUseCase.imageFormat).map { it.swap() }
}
}
} }
} }
@ -431,36 +498,60 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
@SuppressLint("MissingPermission") @SuppressLint("MissingPermission")
private fun onClickRecordVideo() { private fun onClickRecordVideo() {
if (isRecording) { if (isRecording) {
videoCapture?.stopRecording() videoRecording?.stop()
isRecording = false
} else if (!isWaitingForTimer) { } else if (!isWaitingForTimer) {
val path = getOutputPath(true) val path = getOutputPath(true)
startTimerThen { startTimerThen {
val fileHandle = encryptedVolume.openFile(path) var withAudio = true
videoCapture?.startRecording(VideoCapture.OutputFileOptions(object : SeekableWriter { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
var offset = 0L if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
override fun write(byteArray: ByteArray) { withAudio = false
offset += encryptedVolume.write(fileHandle, offset, byteArray, 0, byteArray.size.toLong())
} }
override fun seek(offset: Long) { }
this.offset = offset videoRecording = videoRecorder?.prepareRecording(
this,
MuxerOutputOptions(
FFmpegMuxer(object : SeekableWriter {
private val fileHandle = encryptedVolume.openFile(path)
private var offset = 0L
override fun close() {
encryptedVolume.closeFile(fileHandle)
}
override fun seek(offset: Long) {
this.offset = offset
}
override fun write(buffer: ByteArray) {
offset += encryptedVolume.write(fileHandle, offset, buffer, 0, buffer.size.toLong())
}
})
)
)?.apply {
if (withAudio) {
withAudioEnabled()
} }
override fun close() { }?.start(executor) {
encryptedVolume.closeFile(fileHandle) when (it) {
is VideoRecordEvent.Start -> {
binding.recordVideoButton.setImageResource(R.drawable.stop_recording_video_button)
isRecording = true
}
is VideoRecordEvent.Finalize -> {
if (it.hasError()) {
it.cause?.printStackTrace()
Toast.makeText(applicationContext, it.cause?.message, Toast.LENGTH_SHORT).show()
videoRecording?.close()
videoRecording = null
} else {
Toast.makeText(applicationContext, getString(R.string.video_save_success, path), Toast.LENGTH_SHORT).show()
}
binding.recordVideoButton.setImageResource(R.drawable.record_video_button)
isRecording = false
}
} }
}), executor, object : VideoCapture.OnVideoSavedCallback { }
override fun onVideoSaved() {
Toast.makeText(applicationContext, getString(R.string.video_save_success, path), Toast.LENGTH_SHORT).show()
binding.recordVideoButton.setImageResource(R.drawable.record_video_button)
}
override fun onError(videoCaptureError: Int, message: String, cause: Throwable?) {
Toast.makeText(applicationContext, message, Toast.LENGTH_SHORT).show()
cause?.printStackTrace()
binding.recordVideoButton.setImageResource(R.drawable.record_video_button)
}
})
binding.recordVideoButton.setImageResource(R.drawable.stop_recording_video_button)
isRecording = true
} }
} }
} }

View File

@ -2,9 +2,10 @@ package sushi.hardcore.droidfs.video_recording
import android.media.MediaCodec import android.media.MediaCodec
import android.media.MediaFormat import android.media.MediaFormat
import androidx.camera.video.MediaMuxer
import java.nio.ByteBuffer import java.nio.ByteBuffer
class MediaMuxer(val writer: SeekableWriter) { class FFmpegMuxer(val writer: SeekableWriter): MediaMuxer {
external fun allocContext(): Long external fun allocContext(): Long
external fun addVideoTrack(formatContext: Long, bitrate: Int, width: Int, height: Int, orientationHint: Int): Int external fun addVideoTrack(formatContext: Long, bitrate: Int, width: Int, height: Int, orientationHint: Int): Int
external fun addAudioTrack(formatContext: Long, bitrate: Int, sampleRate: Int, channelCount: Int): Int external fun addAudioTrack(formatContext: Long, bitrate: Int, sampleRate: Int, channelCount: Int): Int
@ -13,75 +14,70 @@ class MediaMuxer(val writer: SeekableWriter) {
external fun writeTrailer(formatContext: Long) external fun writeTrailer(formatContext: Long)
external fun release(formatContext: Long) external fun release(formatContext: Long)
companion object {
const val VIDEO_TRACK_INDEX = 0
const val AUDIO_TRACK_INDEX = 1
}
var formatContext: Long? var formatContext: Long?
var orientationHint = 0 var orientation = 0
var realVideoTrackIndex: Int? = null private var videoTrackIndex: Int? = null
var audioFrameSize: Int? = null private var audioTrackIndex: Int? = null
var firstPts: Long? = null private var firstPts: Long? = null
private var audioPts = 0L
init { init {
System.loadLibrary("mux") System.loadLibrary("mux")
formatContext = allocContext() formatContext = allocContext()
} }
fun writeSampleData(trackIndex: Int, buffer: ByteBuffer, bufferInfo: MediaCodec.BufferInfo) { override fun writeSampleData(trackIndex: Int, buffer: ByteBuffer, bufferInfo: MediaCodec.BufferInfo) {
val byteArray = ByteArray(bufferInfo.size) val byteArray = ByteArray(bufferInfo.size)
buffer.get(byteArray) buffer.get(byteArray)
if (firstPts == null) { if (firstPts == null) {
firstPts = bufferInfo.presentationTimeUs firstPts = bufferInfo.presentationTimeUs
} }
if (trackIndex == AUDIO_TRACK_INDEX) { writePacket(
writePacket(formatContext!!, byteArray, audioPts, -1, false) formatContext!!, byteArray, bufferInfo.presentationTimeUs - firstPts!!, trackIndex,
audioPts += audioFrameSize!! bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME != 0
} else { )
writePacket(
formatContext!!, byteArray, bufferInfo.presentationTimeUs - firstPts!!, realVideoTrackIndex!!,
bufferInfo.flags and MediaCodec.BUFFER_FLAG_KEY_FRAME != 0
)
}
} }
fun addTrack(format: MediaFormat): Int { override fun addTrack(mediaFormat: MediaFormat): Int {
val mime = format.getString("mime")!!.split('/') val mime = mediaFormat.getString("mime")!!.split('/')
val bitrate = format.getInteger("bitrate") val bitrate = mediaFormat.getInteger("bitrate")
return if (mime[0] == "audio") { return if (mime[0] == "audio") {
audioFrameSize = addAudioTrack( addAudioTrack(
formatContext!!, formatContext!!,
bitrate, bitrate,
format.getInteger("sample-rate"), mediaFormat.getInteger("sample-rate"),
format.getInteger("channel-count") mediaFormat.getInteger("channel-count")
) ).also {
AUDIO_TRACK_INDEX audioTrackIndex = it
}
} else { } else {
realVideoTrackIndex = addVideoTrack( addVideoTrack(
formatContext!!, formatContext!!,
bitrate, bitrate,
format.getInteger("width"), mediaFormat.getInteger("width"),
format.getInteger("height"), mediaFormat.getInteger("height"),
orientationHint orientation
) ).also {
VIDEO_TRACK_INDEX videoTrackIndex = it
}
} }
} }
fun start() { override fun start() {
writeHeaders(formatContext!!) writeHeaders(formatContext!!)
} }
fun stop() { override fun stop() {
writeTrailer(formatContext!!) writeTrailer(formatContext!!)
} }
fun release() {
override fun setOrientationHint(degree: Int) {
orientation = degree
}
override fun release() {
writer.close() writer.close()
release(formatContext!!) release(formatContext!!)
firstPts = null firstPts = null
audioPts = 0
formatContext = null formatContext = null
} }

View File

@ -1,7 +1,7 @@
package sushi.hardcore.droidfs.video_recording package sushi.hardcore.droidfs.video_recording
interface SeekableWriter { interface SeekableWriter {
fun write(byteArray: ByteArray) fun write(buffer: ByteArray)
fun seek(offset: Long) fun seek(offset: Long)
fun close() fun close()
} }

View File

@ -1,227 +0,0 @@
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sushi.hardcore.droidfs.video_recording;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.camera.core.impl.Config;
import androidx.camera.core.impl.ImageFormatConstants;
import androidx.camera.core.impl.ImageOutputConfig;
import androidx.camera.core.impl.OptionsBundle;
import androidx.camera.core.impl.UseCaseConfig;
import androidx.camera.core.internal.ThreadConfig;
/**
* Config for a video capture use case.
*
* <p>In the earlier stage, the VideoCapture is deprioritized.
*/
@RequiresApi(21) // TODO(b/200306659): Remove and replace with annotation on package-info.java
public final class VideoCaptureConfig
implements UseCaseConfig<VideoCapture>,
ImageOutputConfig,
ThreadConfig {
// Option Declarations:
// *********************************************************************************************
public static final Option<Integer> OPTION_VIDEO_FRAME_RATE =
Option.create("camerax.core.videoCapture.recordingFrameRate", int.class);
public static final Option<Integer> OPTION_BIT_RATE =
Option.create("camerax.core.videoCapture.bitRate", int.class);
public static final Option<Integer> OPTION_INTRA_FRAME_INTERVAL =
Option.create("camerax.core.videoCapture.intraFrameInterval", int.class);
public static final Option<Integer> OPTION_AUDIO_BIT_RATE =
Option.create("camerax.core.videoCapture.audioBitRate", int.class);
public static final Option<Integer> OPTION_AUDIO_SAMPLE_RATE =
Option.create("camerax.core.videoCapture.audioSampleRate", int.class);
public static final Option<Integer> OPTION_AUDIO_CHANNEL_COUNT =
Option.create("camerax.core.videoCapture.audioChannelCount", int.class);
public static final Option<Integer> OPTION_AUDIO_MIN_BUFFER_SIZE =
Option.create("camerax.core.videoCapture.audioMinBufferSize", int.class);
// *********************************************************************************************
private final OptionsBundle mConfig;
public VideoCaptureConfig(@NonNull OptionsBundle config) {
mConfig = config;
}
/**
* Returns the recording frames per second.
*
* @param valueIfMissing The value to return if this configuration option has not been set.
* @return The stored value or <code>valueIfMissing</code> if the value does not exist in this
* configuration.
*/
public int getVideoFrameRate(int valueIfMissing) {
return retrieveOption(OPTION_VIDEO_FRAME_RATE, valueIfMissing);
}
/**
* Returns the recording frames per second.
*
* @return The stored value, if it exists in this configuration.
* @throws IllegalArgumentException if the option does not exist in this configuration.
*/
public int getVideoFrameRate() {
return retrieveOption(OPTION_VIDEO_FRAME_RATE);
}
/**
* Returns the encoding bit rate.
*
* @param valueIfMissing The value to return if this configuration option has not been set.
* @return The stored value or <code>valueIfMissing</code> if the value does not exist in this
* configuration.
*/
public int getBitRate(int valueIfMissing) {
return retrieveOption(OPTION_BIT_RATE, valueIfMissing);
}
/**
* Returns the encoding bit rate.
*
* @return The stored value, if it exists in this configuration.
* @throws IllegalArgumentException if the option does not exist in this configuration.
*/
public int getBitRate() {
return retrieveOption(OPTION_BIT_RATE);
}
/**
* Returns the number of seconds between each key frame.
*
* @param valueIfMissing The value to return if this configuration option has not been set.
* @return The stored value or <code>valueIfMissing</code> if the value does not exist in this
* configuration.
*/
public int getIFrameInterval(int valueIfMissing) {
return retrieveOption(OPTION_INTRA_FRAME_INTERVAL, valueIfMissing);
}
/**
* Returns the number of seconds between each key frame.
*
* @return The stored value, if it exists in this configuration.
* @throws IllegalArgumentException if the option does not exist in this configuration.
*/
public int getIFrameInterval() {
return retrieveOption(OPTION_INTRA_FRAME_INTERVAL);
}
/**
* Returns the audio encoding bit rate.
*
* @param valueIfMissing The value to return if this configuration option has not been set.
* @return The stored value or <code>valueIfMissing</code> if the value does not exist in this
* configuration.
*/
public int getAudioBitRate(int valueIfMissing) {
return retrieveOption(OPTION_AUDIO_BIT_RATE, valueIfMissing);
}
/**
* Returns the audio encoding bit rate.
*
* @return The stored value, if it exists in this configuration.
* @throws IllegalArgumentException if the option does not exist in this configuration.
*/
public int getAudioBitRate() {
return retrieveOption(OPTION_AUDIO_BIT_RATE);
}
/**
* Returns the audio sample rate.
*
* @param valueIfMissing The value to return if this configuration option has not been set.
* @return The stored value or <code>valueIfMissing</code> if the value does not exist in this
* configuration.
*/
public int getAudioSampleRate(int valueIfMissing) {
return retrieveOption(OPTION_AUDIO_SAMPLE_RATE, valueIfMissing);
}
/**
* Returns the audio sample rate.
*
* @return The stored value, if it exists in this configuration.
* @throws IllegalArgumentException if the option does not exist in this configuration.
*/
public int getAudioSampleRate() {
return retrieveOption(OPTION_AUDIO_SAMPLE_RATE);
}
/**
* Returns the audio channel count.
*
* @param valueIfMissing The value to return if this configuration option has not been set.
* @return The stored value or <code>valueIfMissing</code> if the value does not exist in this
* configuration.
*/
public int getAudioChannelCount(int valueIfMissing) {
return retrieveOption(OPTION_AUDIO_CHANNEL_COUNT, valueIfMissing);
}
/**
* Returns the audio channel count.
*
* @return The stored value, if it exists in this configuration.
* @throws IllegalArgumentException if the option does not exist in this configuration.
*/
public int getAudioChannelCount() {
return retrieveOption(OPTION_AUDIO_CHANNEL_COUNT);
}
/**
* Returns the audio minimum buffer size, in bytes.
*
* @param valueIfMissing The value to return if this configuration option has not been set.
* @return The stored value or <code>valueIfMissing</code> if the value does not exist in this
* configuration.
*/
public int getAudioMinBufferSize(int valueIfMissing) {
return retrieveOption(OPTION_AUDIO_MIN_BUFFER_SIZE, valueIfMissing);
}
/**
* Returns the audio minimum buffer size, in bytes.
*
* @return The stored value, if it exists in this configuration.
* @throws IllegalArgumentException if the option does not exist in this configuration.
*/
public int getAudioMinBufferSize() {
return retrieveOption(OPTION_AUDIO_MIN_BUFFER_SIZE);
}
/**
* Retrieves the format of the image that is fed as input.
*
* <p>This should always be PRIVATE for VideoCapture.
*/
@Override
public int getInputFormat() {
return ImageFormatConstants.INTERNAL_DEFINED_IMAGE_FORMAT_PRIVATE;
}
@NonNull
@Override
public Config getConfig() {
return mConfig;
}
}

View File

@ -31,7 +31,7 @@ int64_t seek(void* opaque, int64_t offset, int whence) {
return offset; return offset;
} }
jlong Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_allocContext(JNIEnv *env, jobject thiz) { jlong Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_allocContext(JNIEnv *env, jobject thiz) {
const AVOutputFormat *format = av_guess_format("mp4", NULL, NULL); const AVOutputFormat *format = av_guess_format("mp4", NULL, NULL);
struct Muxer* muxer = malloc(sizeof(struct Muxer)); struct Muxer* muxer = malloc(sizeof(struct Muxer));
(*env)->GetJavaVM(env, &muxer->jvm); (*env)->GetJavaVM(env, &muxer->jvm);
@ -47,8 +47,8 @@ jlong Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_allocContext(JNIEn
} }
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_addAudioTrack(JNIEnv *env, jobject thiz, jlong format_context, jint bitrate, jint sample_rate, Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addAudioTrack(JNIEnv *env, jobject thiz, jlong format_context, jint bitrate, jint sample_rate,
jint channel_count) { jint channel_count) {
const AVCodec* encoder = avcodec_find_encoder(AV_CODEC_ID_AAC); const AVCodec* encoder = avcodec_find_encoder(AV_CODEC_ID_AAC);
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL); AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
AVCodecContext* codec_context = avcodec_alloc_context3(encoder); AVCodecContext* codec_context = avcodec_alloc_context3(encoder);
@ -62,17 +62,16 @@ Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_addAudioTrack(JNIEnv *en
codec_context->flags = AV_CODEC_FLAG_GLOBAL_HEADER; codec_context->flags = AV_CODEC_FLAG_GLOBAL_HEADER;
avcodec_open2(codec_context, encoder, NULL); avcodec_open2(codec_context, encoder, NULL);
avcodec_parameters_from_context(stream->codecpar, codec_context); avcodec_parameters_from_context(stream->codecpar, codec_context);
int frame_size = codec_context->frame_size;
avcodec_free_context(&codec_context); avcodec_free_context(&codec_context);
return frame_size; return stream->index;
} }
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_addVideoTrack(JNIEnv *env, jobject thiz, Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addVideoTrack(JNIEnv *env, jobject thiz,
jlong format_context, jlong format_context,
jint bitrate, jint width, jint bitrate, jint width,
jint height, jint height,
jint orientation_hint) { jint orientation_hint) {
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL); AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO; stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
stream->codecpar->codec_id = AV_CODEC_ID_H264; stream->codecpar->codec_id = AV_CODEC_ID_H264;
@ -85,25 +84,22 @@ Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_addVideoTrack(JNIEnv *en
} }
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_writeHeaders(JNIEnv *env, jobject thiz, jlong format_context) { Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeHeaders(JNIEnv *env, jobject thiz, jlong format_context) {
return avformat_write_header((AVFormatContext *) format_context, NULL); return avformat_write_header((AVFormatContext *) format_context, NULL);
} }
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_writePacket(JNIEnv *env, jobject thiz, jlong format_context, Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writePacket(JNIEnv *env, jobject thiz, jlong format_context,
jbyteArray buffer, jlong pts, jint stream_index, jbyteArray buffer, jlong pts, jint stream_index,
jboolean is_key_frame) { jboolean is_key_frame) {
AVPacket* packet = av_packet_alloc(); AVPacket* packet = av_packet_alloc();
int size = (*env)->GetArrayLength(env, buffer); int size = (*env)->GetArrayLength(env, buffer);
av_new_packet(packet, size); av_new_packet(packet, size);
packet->pts = pts; packet->stream_index = stream_index;
if (stream_index >= 0) { //video AVRational r;
packet->stream_index = stream_index; r.num = 1;
AVRational r; r.den = 1000000;
r.num = 1; packet->pts = av_rescale_q(pts, r, ((AVFormatContext*)format_context)->streams[stream_index]->time_base);
r.den = 1000000;
av_packet_rescale_ts(packet, r, ((AVFormatContext *)format_context)->streams[stream_index]->time_base);
}
uint8_t* buff = malloc(size); uint8_t* buff = malloc(size);
(*env)->GetByteArrayRegion(env, buffer, 0, size, (signed char*)buff); (*env)->GetByteArrayRegion(env, buffer, 0, size, (signed char*)buff);
packet->data = buff; packet->data = buff;
@ -116,12 +112,12 @@ Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_writePacket(JNIEnv *env,
} }
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_writeTrailer(JNIEnv *env, jobject thiz, jlong format_context) { Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeTrailer(JNIEnv *env, jobject thiz, jlong format_context) {
av_write_trailer((AVFormatContext *) format_context); av_write_trailer((AVFormatContext *) format_context);
} }
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
Java_sushi_hardcore_droidfs_video_1recording_MediaMuxer_release(JNIEnv *env, jobject thiz, jlong format_context) { Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_release(JNIEnv *env, jobject thiz, jlong format_context) {
AVFormatContext* fc = (AVFormatContext *) format_context; AVFormatContext* fc = (AVFormatContext *) format_context;
av_free(fc->pb->buffer); av_free(fc->pb->buffer);
free(fc->pb->opaque); free(fc->pb->opaque);

View File

@ -1,5 +1,5 @@
buildscript { buildscript {
ext.kotlin_version = "1.7.21" ext.kotlin_version = "1.8.10"
repositories { repositories {
google() google()
mavenCentral() mavenCentral()