Update dependencies

This commit is contained in:
Matéo Duparc 2023-04-17 17:06:51 +02:00
parent e918a2f94c
commit aea17aa7cb
Signed by untrusted user: hardcoresushi
GPG Key ID: AFE384344A45E13A
13 changed files with 152 additions and 161 deletions

View File

@ -94,27 +94,27 @@ android {
dependencies { dependencies {
implementation project(":libpdfviewer:app") implementation project(":libpdfviewer:app")
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version" implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
implementation 'androidx.core:core-ktx:1.9.0' implementation 'androidx.core:core-ktx:1.10.0'
implementation "androidx.appcompat:appcompat:1.6.1" implementation "androidx.appcompat:appcompat:1.6.1"
implementation "androidx.constraintlayout:constraintlayout:2.1.4" implementation "androidx.constraintlayout:constraintlayout:2.1.4"
def lifecycle_version = "2.6.0" def lifecycle_version = "2.6.1"
implementation "androidx.lifecycle:lifecycle-viewmodel-ktx:$lifecycle_version" implementation "androidx.lifecycle:lifecycle-viewmodel-ktx:$lifecycle_version"
implementation "androidx.lifecycle:lifecycle-process:$lifecycle_version" implementation "androidx.lifecycle:lifecycle-process:$lifecycle_version"
implementation "androidx.sqlite:sqlite-ktx:2.3.0" implementation "androidx.sqlite:sqlite-ktx:2.3.1"
implementation "androidx.preference:preference-ktx:1.2.0" implementation "androidx.preference:preference-ktx:1.2.0"
implementation "androidx.swiperefreshlayout:swiperefreshlayout:1.1.0" implementation "androidx.swiperefreshlayout:swiperefreshlayout:1.1.0"
implementation 'com.google.android.material:material:1.8.0' implementation 'com.google.android.material:material:1.8.0'
implementation "com.github.bumptech.glide:glide:4.13.2" implementation "com.github.bumptech.glide:glide:4.13.2"
implementation "androidx.biometric:biometric-ktx:1.2.0-alpha05" implementation "androidx.biometric:biometric-ktx:1.2.0-alpha05"
def exoplayer_version = "2.18.4" def exoplayer_version = "2.18.5"
implementation "com.google.android.exoplayer:exoplayer-core:$exoplayer_version" implementation "com.google.android.exoplayer:exoplayer-core:$exoplayer_version"
implementation "com.google.android.exoplayer:exoplayer-ui:$exoplayer_version" implementation "com.google.android.exoplayer:exoplayer-ui:$exoplayer_version"
implementation "androidx.concurrent:concurrent-futures:1.1.0" implementation "androidx.concurrent:concurrent-futures:1.1.0"
def camerax_version = "1.3.0-alpha04" def camerax_version = "1.3.0-alpha05"
implementation "androidx.camera:camera-camera2:$camerax_version" implementation "androidx.camera:camera-camera2:$camerax_version"
implementation "androidx.camera:camera-lifecycle:$camerax_version" implementation "androidx.camera:camera-lifecycle:$camerax_version"
implementation "androidx.camera:camera-view:$camerax_version" implementation "androidx.camera:camera-view:$camerax_version"

@ -1 +1 @@
Subproject commit 43de737624ceeb1c41012d2ea4f0d5dcba8a19e5 Subproject commit 445b26395bf94e3295d12aa46c8c15d5d63cab95

@ -1 +1 @@
Subproject commit 27232cbdb7257be13a12545b71fa32ee193cb11b Subproject commit 79f9a10e35847e46f8563941345355f15f2dba7c

View File

@ -29,7 +29,7 @@ import static androidx.camera.video.VideoRecordEvent.Finalize.VideoRecordError;
import static androidx.camera.video.internal.DebugUtils.readableUs; import static androidx.camera.video.internal.DebugUtils.readableUs;
import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioEncoderConfig; import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioEncoderConfig;
import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioMimeInfo; import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioMimeInfo;
import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioSourceSettings; import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioSettings;
import android.Manifest; import android.Manifest;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
@ -56,7 +56,6 @@ import androidx.annotation.VisibleForTesting;
import androidx.camera.core.AspectRatio; import androidx.camera.core.AspectRatio;
import androidx.camera.core.Logger; import androidx.camera.core.Logger;
import androidx.camera.core.SurfaceRequest; import androidx.camera.core.SurfaceRequest;
import androidx.camera.core.impl.CamcorderProfileProxy;
import androidx.camera.core.impl.MutableStateObservable; import androidx.camera.core.impl.MutableStateObservable;
import androidx.camera.core.impl.Observable; import androidx.camera.core.impl.Observable;
import androidx.camera.core.impl.StateObservable; import androidx.camera.core.impl.StateObservable;
@ -69,8 +68,11 @@ import androidx.camera.core.impl.utils.futures.Futures;
import androidx.camera.core.internal.utils.ArrayRingBuffer; import androidx.camera.core.internal.utils.ArrayRingBuffer;
import androidx.camera.core.internal.utils.RingBuffer; import androidx.camera.core.internal.utils.RingBuffer;
import androidx.camera.video.StreamInfo.StreamState; import androidx.camera.video.StreamInfo.StreamState;
import androidx.camera.video.internal.AudioSource; import androidx.camera.video.internal.VideoValidatedEncoderProfilesProxy;
import androidx.camera.video.internal.AudioSourceAccessException; import androidx.camera.video.internal.audio.AudioSettings;
import androidx.camera.video.internal.audio.AudioSource;
import androidx.camera.video.internal.audio.AudioSourceAccessException;
import androidx.camera.video.internal.compat.Api26Impl;
import androidx.camera.video.internal.compat.quirk.DeactivateEncoderSurfaceBeforeStopEncoderQuirk; import androidx.camera.video.internal.compat.quirk.DeactivateEncoderSurfaceBeforeStopEncoderQuirk;
import androidx.camera.video.internal.compat.quirk.DeviceQuirks; import androidx.camera.video.internal.compat.quirk.DeviceQuirks;
import androidx.camera.video.internal.compat.quirk.EncoderNotUsePersistentInputSurfaceQuirk; import androidx.camera.video.internal.compat.quirk.EncoderNotUsePersistentInputSurfaceQuirk;
@ -342,7 +344,7 @@ public final class SucklessRecorder implements VideoOutput {
@SuppressWarnings("WeakerAccess") /* synthetic accessor */ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
boolean mInProgressRecordingStopping = false; boolean mInProgressRecordingStopping = false;
private SurfaceRequest.TransformationInfo mSurfaceTransformationInfo = null; private SurfaceRequest.TransformationInfo mSurfaceTransformationInfo = null;
private CamcorderProfileProxy mResolvedCamcorderProfile = null; private VideoValidatedEncoderProfilesProxy mResolvedEncoderProfiles = null;
@SuppressWarnings("WeakerAccess") /* synthetic accessor */ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
final List<ListenableFuture<Void>> mEncodingFutures = new ArrayList<>(); final List<ListenableFuture<Void>> mEncodingFutures = new ArrayList<>();
@SuppressWarnings("WeakerAccess") /* synthetic accessor */ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
@ -452,7 +454,6 @@ public final class SucklessRecorder implements VideoOutput {
onSurfaceRequested(request, Timebase.UPTIME); onSurfaceRequested(request, Timebase.UPTIME);
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@Override @Override
public void onSurfaceRequested(@NonNull SurfaceRequest request, @NonNull Timebase timebase) { public void onSurfaceRequested(@NonNull SurfaceRequest request, @NonNull Timebase timebase) {
@ -466,7 +467,6 @@ public final class SucklessRecorder implements VideoOutput {
mSequentialExecutor.execute(() -> onSurfaceRequestedInternal(request, timebase)); mSequentialExecutor.execute(() -> onSurfaceRequestedInternal(request, timebase));
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@Override @Override
@NonNull @NonNull
@ -474,7 +474,6 @@ public final class SucklessRecorder implements VideoOutput {
return mMediaSpec; return mMediaSpec;
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@Override @Override
@NonNull @NonNull
@ -482,7 +481,6 @@ public final class SucklessRecorder implements VideoOutput {
return mStreamInfo; return mStreamInfo;
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@Override @Override
public void onSourceStateChanged(@NonNull SourceState newState) { public void onSourceStateChanged(@NonNull SourceState newState) {
@ -950,17 +948,17 @@ public final class SucklessRecorder implements VideoOutput {
surfaceRequest.setTransformationInfoListener(mSequentialExecutor, surfaceRequest.setTransformationInfoListener(mSequentialExecutor,
(transformationInfo) -> mSurfaceTransformationInfo = transformationInfo); (transformationInfo) -> mSurfaceTransformationInfo = transformationInfo);
Size surfaceSize = surfaceRequest.getResolution(); Size surfaceSize = surfaceRequest.getResolution();
// Fetch and cache nearest camcorder profile, if one exists. // Fetch and cache nearest encoder profiles, if one exists.
VideoCapabilities capabilities = VideoCapabilities capabilities =
VideoCapabilities.from(surfaceRequest.getCamera().getCameraInfo()); VideoCapabilities.from(surfaceRequest.getCamera().getCameraInfo());
Quality highestSupportedQuality = capabilities.findHighestSupportedQualityFor(surfaceSize); Quality highestSupportedQuality = capabilities.findHighestSupportedQualityFor(surfaceSize);
Logger.d(TAG, "Using supported quality of " + highestSupportedQuality Logger.d(TAG, "Using supported quality of " + highestSupportedQuality
+ " for surface size " + surfaceSize); + " for surface size " + surfaceSize);
if (highestSupportedQuality != Quality.NONE) { if (highestSupportedQuality != Quality.NONE) {
mResolvedCamcorderProfile = capabilities.getProfile(highestSupportedQuality); mResolvedEncoderProfiles = capabilities.getProfiles(highestSupportedQuality);
if (mResolvedCamcorderProfile == null) { if (mResolvedEncoderProfiles == null) {
throw new AssertionError("Camera advertised available quality but did not " throw new AssertionError("Camera advertised available quality but did not "
+ "produce CamcorderProfile for advertised quality."); + "produce EncoderProfiles for advertised quality.");
} }
} }
setupVideo(surfaceRequest, videoSourceTimebase); setupVideo(surfaceRequest, videoSourceTimebase);
@ -980,7 +978,7 @@ public final class SucklessRecorder implements VideoOutput {
MediaSpec mediaSpec = getObservableData(mMediaSpec); MediaSpec mediaSpec = getObservableData(mMediaSpec);
ListenableFuture<Encoder> configureFuture = ListenableFuture<Encoder> configureFuture =
videoEncoderSession.configure(request, timebase, mediaSpec, videoEncoderSession.configure(request, timebase, mediaSpec,
mResolvedCamcorderProfile); mResolvedEncoderProfiles);
mVideoEncoderSession = videoEncoderSession; mVideoEncoderSession = videoEncoderSession;
Futures.addCallback(configureFuture, new FutureCallback<Encoder>() { Futures.addCallback(configureFuture, new FutureCallback<Encoder>() {
@Override @Override
@ -1146,23 +1144,23 @@ public final class SucklessRecorder implements VideoOutput {
throws AudioSourceAccessException, InvalidConfigException { throws AudioSourceAccessException, InvalidConfigException {
MediaSpec mediaSpec = getObservableData(mMediaSpec); MediaSpec mediaSpec = getObservableData(mMediaSpec);
// Resolve the audio mime info // Resolve the audio mime info
MimeInfo audioMimeInfo = resolveAudioMimeInfo(mediaSpec, mResolvedCamcorderProfile); MimeInfo audioMimeInfo = resolveAudioMimeInfo(mediaSpec, mResolvedEncoderProfiles);
Timebase audioSourceTimebase = Timebase.UPTIME; Timebase audioSourceTimebase = Timebase.UPTIME;
// Select and create the audio source // Select and create the audio source
AudioSource.Settings audioSourceSettings = AudioSettings audioSettings =
resolveAudioSourceSettings(audioMimeInfo, mediaSpec.getAudioSpec()); resolveAudioSettings(audioMimeInfo, mediaSpec.getAudioSpec());
if (mAudioSource != null) { if (mAudioSource != null) {
releaseCurrentAudioSource(); releaseCurrentAudioSource();
} }
// TODO: set audioSourceTimebase to AudioSource. Currently AudioSource hard code // TODO: set audioSourceTimebase to AudioSource. Currently AudioSource hard code
// AudioTimestamp.TIMEBASE_MONOTONIC. // AudioTimestamp.TIMEBASE_MONOTONIC.
mAudioSource = setupAudioSource(recordingToStart, audioSourceSettings); mAudioSource = setupAudioSource(recordingToStart, audioSettings);
Logger.d(TAG, String.format("Set up new audio source: 0x%x", mAudioSource.hashCode())); Logger.d(TAG, String.format("Set up new audio source: 0x%x", mAudioSource.hashCode()));
// Select and create the audio encoder // Select and create the audio encoder
AudioEncoderConfig audioEncoderConfig = resolveAudioEncoderConfig(audioMimeInfo, AudioEncoderConfig audioEncoderConfig = resolveAudioEncoderConfig(audioMimeInfo,
audioSourceTimebase, audioSourceSettings, mediaSpec.getAudioSpec()); audioSourceTimebase, audioSettings, mediaSpec.getAudioSpec());
mAudioEncoder = mAudioEncoderFactory.createEncoder(mExecutor, audioEncoderConfig); mAudioEncoder = mAudioEncoderFactory.createEncoder(mExecutor, audioEncoderConfig);
// Connect the audio source to the audio encoder // Connect the audio source to the audio encoder
@ -1176,10 +1174,9 @@ public final class SucklessRecorder implements VideoOutput {
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
@NonNull @NonNull
private AudioSource setupAudioSource(@NonNull RecordingRecord recordingToStart, private AudioSource setupAudioSource(@NonNull RecordingRecord recordingToStart,
@NonNull AudioSource.Settings audioSourceSettings) @NonNull AudioSettings audioSettings)
throws AudioSourceAccessException { throws AudioSourceAccessException {
return recordingToStart.performOneTimeAudioSourceCreation(audioSourceSettings, return recordingToStart.performOneTimeAudioSourceCreation(audioSettings, AUDIO_EXECUTOR);
AUDIO_EXECUTOR);
} }
private void releaseCurrentAudioSource() { private void releaseCurrentAudioSource() {
@ -1286,7 +1283,7 @@ public final class SucklessRecorder implements VideoOutput {
MediaSpec mediaSpec = getObservableData(mMediaSpec); MediaSpec mediaSpec = getObservableData(mMediaSpec);
int muxerOutputFormat = int muxerOutputFormat =
mediaSpec.getOutputFormat() == MediaSpec.OUTPUT_FORMAT_AUTO mediaSpec.getOutputFormat() == MediaSpec.OUTPUT_FORMAT_AUTO
? supportedMuxerFormatOrDefaultFrom(mResolvedCamcorderProfile, ? supportedMuxerFormatOrDefaultFrom(mResolvedEncoderProfiles,
MediaSpec.outputFormatToMuxerFormat( MediaSpec.outputFormatToMuxerFormat(
MEDIA_SPEC_DEFAULT.getOutputFormat())) MEDIA_SPEC_DEFAULT.getOutputFormat()))
: MediaSpec.outputFormatToMuxerFormat(mediaSpec.getOutputFormat()); : MediaSpec.outputFormatToMuxerFormat(mediaSpec.getOutputFormat());
@ -1535,7 +1532,7 @@ public final class SucklessRecorder implements VideoOutput {
mAudioSource.setAudioSourceCallback(mSequentialExecutor, mAudioSource.setAudioSourceCallback(mSequentialExecutor,
new AudioSource.AudioSourceCallback() { new AudioSource.AudioSourceCallback() {
@Override @Override
public void onSilenced(boolean silenced) { public void onSilenceStateChanged(boolean silenced) {
if (mIsAudioSourceSilenced != silenced) { if (mIsAudioSourceSilenced != silenced) {
mIsAudioSourceSilenced = silenced; mIsAudioSourceSilenced = silenced;
mAudioErrorCause = silenced ? new IllegalStateException( mAudioErrorCause = silenced ? new IllegalStateException(
@ -2460,9 +2457,9 @@ public final class SucklessRecorder implements VideoOutput {
} }
private static int supportedMuxerFormatOrDefaultFrom( private static int supportedMuxerFormatOrDefaultFrom(
@Nullable CamcorderProfileProxy profileProxy, int defaultMuxerFormat) { @Nullable VideoValidatedEncoderProfilesProxy profilesProxy, int defaultMuxerFormat) {
if (profileProxy != null) { if (profilesProxy != null) {
switch (profileProxy.getFileFormat()) { switch (profilesProxy.getRecommendedFileFormat()) {
case MediaRecorder.OutputFormat.MPEG_4: case MediaRecorder.OutputFormat.MPEG_4:
return android.media.MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4; return android.media.MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
case MediaRecorder.OutputFormat.WEBM: case MediaRecorder.OutputFormat.WEBM:
@ -2576,7 +2573,7 @@ public final class SucklessRecorder implements VideoOutput {
@NonNull @NonNull
@Override @Override
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
public AudioSource get(@NonNull AudioSource.Settings settings, public AudioSource get(@NonNull AudioSettings settings,
@NonNull Executor executor) @NonNull Executor executor)
throws AudioSourceAccessException { throws AudioSourceAccessException {
// Context will only be held in local scope of the supplier so it will // Context will only be held in local scope of the supplier so it will
@ -2593,7 +2590,7 @@ public final class SucklessRecorder implements VideoOutput {
@NonNull @NonNull
@Override @Override
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
public AudioSource get(@NonNull AudioSource.Settings settings, public AudioSource get(@NonNull AudioSettings settings,
@NonNull Executor executor) @NonNull Executor executor)
throws AudioSourceAccessException { throws AudioSourceAccessException {
// Do not set (or retain) context on other API levels // Do not set (or retain) context on other API levels
@ -2708,7 +2705,7 @@ public final class SucklessRecorder implements VideoOutput {
@NonNull @NonNull
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
AudioSource performOneTimeAudioSourceCreation( AudioSource performOneTimeAudioSourceCreation(
@NonNull AudioSource.Settings settings, @NonNull Executor audioSourceExecutor) @NonNull AudioSettings settings, @NonNull Executor audioSourceExecutor)
throws AudioSourceAccessException { throws AudioSourceAccessException {
if (!hasAudioEnabled()) { if (!hasAudioEnabled()) {
throw new AssertionError("Recording does not have audio enabled. Unable to create" throw new AssertionError("Recording does not have audio enabled. Unable to create"
@ -2822,7 +2819,7 @@ public final class SucklessRecorder implements VideoOutput {
private interface AudioSourceSupplier { private interface AudioSourceSupplier {
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
@NonNull @NonNull
AudioSource get(@NonNull AudioSource.Settings settings, AudioSource get(@NonNull AudioSettings settings,
@NonNull Executor audioSourceExecutor) throws AudioSourceAccessException; @NonNull Executor audioSourceExecutor) throws AudioSourceAccessException;
} }
} }
@ -2972,7 +2969,6 @@ public final class SucklessRecorder implements VideoOutput {
return this; return this;
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@NonNull @NonNull
Builder setVideoEncoderFactory(@NonNull EncoderFactory videoEncoderFactory) { Builder setVideoEncoderFactory(@NonNull EncoderFactory videoEncoderFactory) {
@ -2980,7 +2976,6 @@ public final class SucklessRecorder implements VideoOutput {
return this; return this;
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@NonNull @NonNull
Builder setAudioEncoderFactory(@NonNull EncoderFactory audioEncoderFactory) { Builder setAudioEncoderFactory(@NonNull EncoderFactory audioEncoderFactory) {

View File

@ -210,7 +210,6 @@ public final class SucklessRecording implements AutoCloseable {
* {@link PendingRecording#start(Executor, Consumer)}. Once the active recording is * {@link PendingRecording#start(Executor, Consumer)}. Once the active recording is
* stopped, a {@link VideoRecordEvent.Finalize} event will be sent to the listener. * stopped, a {@link VideoRecordEvent.Finalize} event will be sent to the listener.
* *
* @hide
*/ */
@RestrictTo(LIBRARY_GROUP) @RestrictTo(LIBRARY_GROUP)
public boolean isClosed() { public boolean isClosed() {

View File

@ -25,6 +25,7 @@ import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.Interna
import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.RELEASED; import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.RELEASED;
import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.STARTED; import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.STARTED;
import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.STOPPING; import static androidx.camera.video.internal.encoder.SucklessEncoderImpl.InternalState.STOPPING;
import static androidx.core.util.Preconditions.checkState;
import static java.util.Objects.requireNonNull; import static java.util.Objects.requireNonNull;
@ -241,10 +242,14 @@ public class SucklessEncoderImpl implements Encoder {
mMediaFormat = encoderConfig.toMediaFormat(); mMediaFormat = encoderConfig.toMediaFormat();
Logger.d(mTag, "mMediaFormat = " + mMediaFormat); Logger.d(mTag, "mMediaFormat = " + mMediaFormat);
mMediaCodec = mEncoderFinder.findEncoder(mMediaFormat); mMediaCodec = mEncoderFinder.findEncoder(mMediaFormat);
clampVideoBitrateIfNotSupported(mMediaCodec.getCodecInfo(), mMediaFormat);
Logger.i(mTag, "Selected encoder: " + mMediaCodec.getName()); Logger.i(mTag, "Selected encoder: " + mMediaCodec.getName());
mEncoderInfo = createEncoderInfo(mIsVideoEncoder, mMediaCodec.getCodecInfo(), mEncoderInfo = createEncoderInfo(mIsVideoEncoder, mMediaCodec.getCodecInfo(),
encoderConfig.getMimeType()); encoderConfig.getMimeType());
if (mIsVideoEncoder) {
VideoEncoderInfo videoEncoderInfo = (VideoEncoderInfo) mEncoderInfo;
clampVideoBitrateIfNotSupported(videoEncoderInfo, mMediaFormat);
}
try { try {
reset(); reset();
} catch (MediaCodec.CodecException e) { } catch (MediaCodec.CodecException e) {
@ -263,41 +268,22 @@ public class SucklessEncoderImpl implements Encoder {
} }
/** /**
* If video bitrate in MediaFormat is not supported by supplied MediaCodecInfo, * Clamps the video bitrate in MediaFormat if the video bitrate is not supported by the
* clamp bitrate in MediaFormat * supplied VideoEncoderInfo.
* *
* @param mediaCodecInfo MediaCodecInfo object * @param videoEncoderInfo VideoEncoderInfo object
* @param mediaFormat MediaFormat object * @param mediaFormat MediaFormat object
*/ */
private void clampVideoBitrateIfNotSupported(@NonNull MediaCodecInfo mediaCodecInfo, private void clampVideoBitrateIfNotSupported(@NonNull VideoEncoderInfo videoEncoderInfo,
@NonNull MediaFormat mediaFormat) { @NonNull MediaFormat mediaFormat) {
checkState(mIsVideoEncoder);
if (!mediaCodecInfo.isEncoder() || !mIsVideoEncoder) { if (mediaFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
return; int origBitrate = mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE);
} int newBitrate = videoEncoderInfo.getSupportedBitrateRange().clamp(origBitrate);
if (origBitrate != newBitrate) {
try { mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, newBitrate);
String mime = mediaFormat.getString(MediaFormat.KEY_MIME); Logger.d(mTag, "updated bitrate from " + origBitrate + " to " + newBitrate);
MediaCodecInfo.CodecCapabilities caps = mediaCodecInfo.getCapabilitiesForType(mime);
Preconditions.checkArgument(caps != null,
"MIME type is not supported");
if (mediaFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
// We only handle video bitrate issues at this moment.
MediaCodecInfo.VideoCapabilities videoCaps = caps.getVideoCapabilities();
Preconditions.checkArgument(videoCaps != null,
"Not video codec");
int origBitrate = mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE);
int newBitrate = videoCaps.getBitrateRange().clamp(origBitrate);
if (origBitrate != newBitrate) {
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, newBitrate);
Logger.d(mTag, "updated bitrate from " + origBitrate
+ " to " + newBitrate);
}
} }
} catch (IllegalArgumentException e) {
Logger.w(mTag, "Unexpected error while validating video bitrate", e);
} }
} }
@ -402,7 +388,7 @@ public class SucklessEncoderImpl implements Encoder {
mLastDataStopTimestamp = null; mLastDataStopTimestamp = null;
final Range<Long> pauseRange = mActivePauseResumeTimeRanges.removeLast(); final Range<Long> pauseRange = mActivePauseResumeTimeRanges.removeLast();
Preconditions.checkState( checkState(
pauseRange != null && pauseRange.getUpper() == NO_LIMIT_LONG, pauseRange != null && pauseRange.getUpper() == NO_LIMIT_LONG,
"There should be a \"pause\" before \"resume\""); "There should be a \"pause\" before \"resume\"");
final long pauseTimeUs = pauseRange.getLower(); final long pauseTimeUs = pauseRange.getLower();
@ -1205,7 +1191,7 @@ public class SucklessEncoderImpl implements Encoder {
// If adjusted time <= last sent time, the buffer should have been detected and // If adjusted time <= last sent time, the buffer should have been detected and
// dropped in checkBufferInfo(). // dropped in checkBufferInfo().
Preconditions.checkState(adjustedTimeUs > mLastSentAdjustedTimeUs); checkState(adjustedTimeUs > mLastSentAdjustedTimeUs);
if (DEBUG) { if (DEBUG) {
Logger.d(mTag, "Adjust bufferInfo.presentationTimeUs to " Logger.d(mTag, "Adjust bufferInfo.presentationTimeUs to "
+ DebugUtils.readableUs(adjustedTimeUs)); + DebugUtils.readableUs(adjustedTimeUs));
@ -1614,7 +1600,7 @@ public class SucklessEncoderImpl implements Encoder {
private void cancelInputBuffer(@NonNull ListenableFuture<InputBuffer> inputBufferFuture) { private void cancelInputBuffer(@NonNull ListenableFuture<InputBuffer> inputBufferFuture) {
if (!inputBufferFuture.cancel(true)) { if (!inputBufferFuture.cancel(true)) {
// Not able to cancel the future, need to cancel the input buffer as possible. // Not able to cancel the future, need to cancel the input buffer as possible.
Preconditions.checkState(inputBufferFuture.isDone()); checkState(inputBufferFuture.isDone());
try { try {
inputBufferFuture.get().cancel(); inputBufferFuture.get().cancel();
} catch (ExecutionException | InterruptedException | CancellationException e) { } catch (ExecutionException | InterruptedException | CancellationException e) {
@ -1688,3 +1674,5 @@ public class SucklessEncoderImpl implements Encoder {
} }
} }
} }

View File

@ -0,0 +1,20 @@
# Update the modified CameraX files to a new upstream version:
Create the `new` folder if needed:
```
mkdir -p new
```
Put new CameraX files from upstream in the `new` folder.
Perform the 3 way merge:
```
./merge.sh
```
If new files are created in the current directory, they contains conflicts. Resolve them then move them to the right location.
Finally, update the base:
```
./update.sh
```

View File

@ -25,6 +25,7 @@ import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.P
import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.RELEASED; import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.RELEASED;
import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.STARTED; import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.STARTED;
import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.STOPPING; import static androidx.camera.video.internal.encoder.EncoderImpl.InternalState.STOPPING;
import static androidx.core.util.Preconditions.checkState;
import static java.util.Objects.requireNonNull; import static java.util.Objects.requireNonNull;
@ -240,10 +241,14 @@ public class EncoderImpl implements Encoder {
mMediaFormat = encoderConfig.toMediaFormat(); mMediaFormat = encoderConfig.toMediaFormat();
Logger.d(mTag, "mMediaFormat = " + mMediaFormat); Logger.d(mTag, "mMediaFormat = " + mMediaFormat);
mMediaCodec = mEncoderFinder.findEncoder(mMediaFormat); mMediaCodec = mEncoderFinder.findEncoder(mMediaFormat);
clampVideoBitrateIfNotSupported(mMediaCodec.getCodecInfo(), mMediaFormat);
Logger.i(mTag, "Selected encoder: " + mMediaCodec.getName()); Logger.i(mTag, "Selected encoder: " + mMediaCodec.getName());
mEncoderInfo = createEncoderInfo(mIsVideoEncoder, mMediaCodec.getCodecInfo(), mEncoderInfo = createEncoderInfo(mIsVideoEncoder, mMediaCodec.getCodecInfo(),
encoderConfig.getMimeType()); encoderConfig.getMimeType());
if (mIsVideoEncoder) {
VideoEncoderInfo videoEncoderInfo = (VideoEncoderInfo) mEncoderInfo;
clampVideoBitrateIfNotSupported(videoEncoderInfo, mMediaFormat);
}
try { try {
reset(); reset();
} catch (MediaCodec.CodecException e) { } catch (MediaCodec.CodecException e) {
@ -262,41 +267,22 @@ public class EncoderImpl implements Encoder {
} }
/** /**
* If video bitrate in MediaFormat is not supported by supplied MediaCodecInfo, * Clamps the video bitrate in MediaFormat if the video bitrate is not supported by the
* clamp bitrate in MediaFormat * supplied VideoEncoderInfo.
* *
* @param mediaCodecInfo MediaCodecInfo object * @param videoEncoderInfo VideoEncoderInfo object
* @param mediaFormat MediaFormat object * @param mediaFormat MediaFormat object
*/ */
private void clampVideoBitrateIfNotSupported(@NonNull MediaCodecInfo mediaCodecInfo, private void clampVideoBitrateIfNotSupported(@NonNull VideoEncoderInfo videoEncoderInfo,
@NonNull MediaFormat mediaFormat) { @NonNull MediaFormat mediaFormat) {
checkState(mIsVideoEncoder);
if (!mediaCodecInfo.isEncoder() || !mIsVideoEncoder) { if (mediaFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
return; int origBitrate = mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE);
} int newBitrate = videoEncoderInfo.getSupportedBitrateRange().clamp(origBitrate);
if (origBitrate != newBitrate) {
try { mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, newBitrate);
String mime = mediaFormat.getString(MediaFormat.KEY_MIME); Logger.d(mTag, "updated bitrate from " + origBitrate + " to " + newBitrate);
MediaCodecInfo.CodecCapabilities caps = mediaCodecInfo.getCapabilitiesForType(mime);
Preconditions.checkArgument(caps != null,
"MIME type is not supported");
if (mediaFormat.containsKey(MediaFormat.KEY_BIT_RATE)) {
// We only handle video bitrate issues at this moment.
MediaCodecInfo.VideoCapabilities videoCaps = caps.getVideoCapabilities();
Preconditions.checkArgument(videoCaps != null,
"Not video codec");
int origBitrate = mediaFormat.getInteger(MediaFormat.KEY_BIT_RATE);
int newBitrate = videoCaps.getBitrateRange().clamp(origBitrate);
if (origBitrate != newBitrate) {
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, newBitrate);
Logger.d(mTag, "updated bitrate from " + origBitrate
+ " to " + newBitrate);
}
} }
} catch (IllegalArgumentException e) {
Logger.w(mTag, "Unexpected error while validating video bitrate", e);
} }
} }
@ -401,7 +387,7 @@ public class EncoderImpl implements Encoder {
mLastDataStopTimestamp = null; mLastDataStopTimestamp = null;
final Range<Long> pauseRange = mActivePauseResumeTimeRanges.removeLast(); final Range<Long> pauseRange = mActivePauseResumeTimeRanges.removeLast();
Preconditions.checkState( checkState(
pauseRange != null && pauseRange.getUpper() == NO_LIMIT_LONG, pauseRange != null && pauseRange.getUpper() == NO_LIMIT_LONG,
"There should be a \"pause\" before \"resume\""); "There should be a \"pause\" before \"resume\"");
final long pauseTimeUs = pauseRange.getLower(); final long pauseTimeUs = pauseRange.getLower();
@ -1204,7 +1190,7 @@ public class EncoderImpl implements Encoder {
// If adjusted time <= last sent time, the buffer should have been detected and // If adjusted time <= last sent time, the buffer should have been detected and
// dropped in checkBufferInfo(). // dropped in checkBufferInfo().
Preconditions.checkState(adjustedTimeUs > mLastSentAdjustedTimeUs); checkState(adjustedTimeUs > mLastSentAdjustedTimeUs);
if (DEBUG) { if (DEBUG) {
Logger.d(mTag, "Adjust bufferInfo.presentationTimeUs to " Logger.d(mTag, "Adjust bufferInfo.presentationTimeUs to "
+ DebugUtils.readableUs(adjustedTimeUs)); + DebugUtils.readableUs(adjustedTimeUs));
@ -1622,7 +1608,7 @@ public class EncoderImpl implements Encoder {
private void cancelInputBuffer(@NonNull ListenableFuture<InputBuffer> inputBufferFuture) { private void cancelInputBuffer(@NonNull ListenableFuture<InputBuffer> inputBufferFuture) {
if (!inputBufferFuture.cancel(true)) { if (!inputBufferFuture.cancel(true)) {
// Not able to cancel the future, need to cancel the input buffer as possible. // Not able to cancel the future, need to cancel the input buffer as possible.
Preconditions.checkState(inputBufferFuture.isDone()); checkState(inputBufferFuture.isDone());
try { try {
inputBufferFuture.get().cancel(); inputBufferFuture.get().cancel();
} catch (ExecutionException | InterruptedException | CancellationException e) { } catch (ExecutionException | InterruptedException | CancellationException e) {
@ -1697,3 +1683,4 @@ public class EncoderImpl implements Encoder {
} }
} }

View File

@ -37,7 +37,7 @@ import java.util.concurrent.Executor;
* <p>A pending recording allows for configuration of a recording before it is started. Once a * <p>A pending recording allows for configuration of a recording before it is started. Once a
* pending recording is started with {@link #start(Executor, Consumer)}, any changes to the pending * pending recording is started with {@link #start(Executor, Consumer)}, any changes to the pending
* recording will not affect the actual recording; any modifications to the recording will need * recording will not affect the actual recording; any modifications to the recording will need
* to occur through the controls of the {@link SucklessRecording} class returned by * to occur through the controls of the {@link Recording} class returned by
* {@link #start(Executor, Consumer)}. * {@link #start(Executor, Consumer)}.
* *
* <p>A pending recording can be created using one of the {@link Recorder} methods for starting a * <p>A pending recording can be created using one of the {@link Recorder} methods for starting a
@ -106,7 +106,7 @@ public final class PendingRecording {
* Enables audio to be recorded for this recording. * Enables audio to be recorded for this recording.
* *
* <p>This method must be called prior to {@link #start(Executor, Consumer)} to enable audio * <p>This method must be called prior to {@link #start(Executor, Consumer)} to enable audio
* in the recording. If this method is not called, the {@link SucklessRecording} generated by * in the recording. If this method is not called, the {@link Recording} generated by
* {@link #start(Executor, Consumer)} will not contain audio, and * {@link #start(Executor, Consumer)} will not contain audio, and
* {@link AudioStats#getAudioState()} will always return * {@link AudioStats#getAudioState()} will always return
* {@link AudioStats#AUDIO_STATE_DISABLED} for all {@link RecordingStats} send to the listener * {@link AudioStats#AUDIO_STATE_DISABLED} for all {@link RecordingStats} send to the listener
@ -124,7 +124,7 @@ public final class PendingRecording {
*/ */
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
@NonNull @NonNull
public SucklessPendingRecording withAudioEnabled() { public PendingRecording withAudioEnabled() {
// Check permissions and throw a security exception if RECORD_AUDIO is not granted. // Check permissions and throw a security exception if RECORD_AUDIO is not granted.
if (PermissionChecker.checkSelfPermission(mContext, Manifest.permission.RECORD_AUDIO) if (PermissionChecker.checkSelfPermission(mContext, Manifest.permission.RECORD_AUDIO)
== PermissionChecker.PERMISSION_DENIED) { == PermissionChecker.PERMISSION_DENIED) {
@ -143,9 +143,9 @@ public final class PendingRecording {
* <p>Only a single recording can be active at a time, so if another recording is active, * <p>Only a single recording can be active at a time, so if another recording is active,
* this will throw an {@link IllegalStateException}. * this will throw an {@link IllegalStateException}.
* *
* <p>If there are no errors starting the recording, the returned {@link SucklessRecording} * <p>If there are no errors starting the recording, the returned {@link Recording}
* can be used to {@link SucklessRecording#pause() pause}, {@link SucklessRecording#resume() resume}, * can be used to {@link Recording#pause() pause}, {@link Recording#resume() resume},
* or {@link SucklessRecording#stop() stop} the recording. * or {@link Recording#stop() stop} the recording.
* *
* <p>Upon successfully starting the recording, a {@link VideoRecordEvent.Start} event will * <p>Upon successfully starting the recording, a {@link VideoRecordEvent.Start} event will
* be the first event sent to the provided event listener. * be the first event sent to the provided event listener.
@ -153,9 +153,9 @@ public final class PendingRecording {
* <p>If errors occur while starting the recording, a {@link VideoRecordEvent.Finalize} event * <p>If errors occur while starting the recording, a {@link VideoRecordEvent.Finalize} event
* will be the first event sent to the provided listener, and information about the error can * will be the first event sent to the provided listener, and information about the error can
* be found in that event's {@link VideoRecordEvent.Finalize#getError()} method. The returned * be found in that event's {@link VideoRecordEvent.Finalize#getError()} method. The returned
* {@link SucklessRecording} will be in a finalized state, and all controls will be no-ops. * {@link Recording} will be in a finalized state, and all controls will be no-ops.
* *
* <p>If the returned {@link SucklessRecording} is garbage collected, the recording will be * <p>If the returned {@link Recording} is garbage collected, the recording will be
* automatically stopped. A reference to the active recording must be maintained as long as * automatically stopped. A reference to the active recording must be maintained as long as
* the recording needs to be active. * the recording needs to be active.
* *
@ -166,7 +166,7 @@ public final class PendingRecording {
*/ */
@NonNull @NonNull
@CheckResult @CheckResult
public SucklessRecording start( public Recording start(
@NonNull Executor listenerExecutor, @NonNull Executor listenerExecutor,
@NonNull Consumer<VideoRecordEvent> listener) { @NonNull Consumer<VideoRecordEvent> listener) {
Preconditions.checkNotNull(listenerExecutor, "Listener Executor can't be null."); Preconditions.checkNotNull(listenerExecutor, "Listener Executor can't be null.");

View File

@ -29,7 +29,7 @@ import static androidx.camera.video.VideoRecordEvent.Finalize.VideoRecordError;
import static androidx.camera.video.internal.DebugUtils.readableUs; import static androidx.camera.video.internal.DebugUtils.readableUs;
import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioEncoderConfig; import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioEncoderConfig;
import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioMimeInfo; import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioMimeInfo;
import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioSourceSettings; import static androidx.camera.video.internal.config.AudioConfigUtil.resolveAudioSettings;
import android.Manifest; import android.Manifest;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
@ -59,7 +59,6 @@ import androidx.annotation.VisibleForTesting;
import androidx.camera.core.AspectRatio; import androidx.camera.core.AspectRatio;
import androidx.camera.core.Logger; import androidx.camera.core.Logger;
import androidx.camera.core.SurfaceRequest; import androidx.camera.core.SurfaceRequest;
import androidx.camera.core.impl.CamcorderProfileProxy;
import androidx.camera.core.impl.MutableStateObservable; import androidx.camera.core.impl.MutableStateObservable;
import androidx.camera.core.impl.Observable; import androidx.camera.core.impl.Observable;
import androidx.camera.core.impl.StateObservable; import androidx.camera.core.impl.StateObservable;
@ -72,8 +71,10 @@ import androidx.camera.core.impl.utils.futures.Futures;
import androidx.camera.core.internal.utils.ArrayRingBuffer; import androidx.camera.core.internal.utils.ArrayRingBuffer;
import androidx.camera.core.internal.utils.RingBuffer; import androidx.camera.core.internal.utils.RingBuffer;
import androidx.camera.video.StreamInfo.StreamState; import androidx.camera.video.StreamInfo.StreamState;
import androidx.camera.video.internal.AudioSource; import androidx.camera.video.internal.VideoValidatedEncoderProfilesProxy;
import androidx.camera.video.internal.AudioSourceAccessException; import androidx.camera.video.internal.audio.AudioSettings;
import androidx.camera.video.internal.audio.AudioSource;
import androidx.camera.video.internal.audio.AudioSourceAccessException;
import androidx.camera.video.internal.compat.Api26Impl; import androidx.camera.video.internal.compat.Api26Impl;
import androidx.camera.video.internal.compat.quirk.DeactivateEncoderSurfaceBeforeStopEncoderQuirk; import androidx.camera.video.internal.compat.quirk.DeactivateEncoderSurfaceBeforeStopEncoderQuirk;
import androidx.camera.video.internal.compat.quirk.DeviceQuirks; import androidx.camera.video.internal.compat.quirk.DeviceQuirks;
@ -346,7 +347,7 @@ public final class Recorder implements VideoOutput {
@SuppressWarnings("WeakerAccess") /* synthetic accessor */ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
boolean mInProgressRecordingStopping = false; boolean mInProgressRecordingStopping = false;
private SurfaceRequest.TransformationInfo mSurfaceTransformationInfo = null; private SurfaceRequest.TransformationInfo mSurfaceTransformationInfo = null;
private CamcorderProfileProxy mResolvedCamcorderProfile = null; private VideoValidatedEncoderProfilesProxy mResolvedEncoderProfiles = null;
@SuppressWarnings("WeakerAccess") /* synthetic accessor */ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
final List<ListenableFuture<Void>> mEncodingFutures = new ArrayList<>(); final List<ListenableFuture<Void>> mEncodingFutures = new ArrayList<>();
@SuppressWarnings("WeakerAccess") /* synthetic accessor */ @SuppressWarnings("WeakerAccess") /* synthetic accessor */
@ -456,7 +457,6 @@ public final class Recorder implements VideoOutput {
onSurfaceRequested(request, Timebase.UPTIME); onSurfaceRequested(request, Timebase.UPTIME);
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@Override @Override
public void onSurfaceRequested(@NonNull SurfaceRequest request, @NonNull Timebase timebase) { public void onSurfaceRequested(@NonNull SurfaceRequest request, @NonNull Timebase timebase) {
@ -470,7 +470,6 @@ public final class Recorder implements VideoOutput {
mSequentialExecutor.execute(() -> onSurfaceRequestedInternal(request, timebase)); mSequentialExecutor.execute(() -> onSurfaceRequestedInternal(request, timebase));
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@Override @Override
@NonNull @NonNull
@ -478,7 +477,6 @@ public final class Recorder implements VideoOutput {
return mMediaSpec; return mMediaSpec;
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@Override @Override
@NonNull @NonNull
@ -486,7 +484,6 @@ public final class Recorder implements VideoOutput {
return mStreamInfo; return mStreamInfo;
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@Override @Override
public void onSourceStateChanged(@NonNull SourceState newState) { public void onSourceStateChanged(@NonNull SourceState newState) {
@ -1041,17 +1038,17 @@ public final class Recorder implements VideoOutput {
surfaceRequest.setTransformationInfoListener(mSequentialExecutor, surfaceRequest.setTransformationInfoListener(mSequentialExecutor,
(transformationInfo) -> mSurfaceTransformationInfo = transformationInfo); (transformationInfo) -> mSurfaceTransformationInfo = transformationInfo);
Size surfaceSize = surfaceRequest.getResolution(); Size surfaceSize = surfaceRequest.getResolution();
// Fetch and cache nearest camcorder profile, if one exists. // Fetch and cache nearest encoder profiles, if one exists.
VideoCapabilities capabilities = VideoCapabilities capabilities =
VideoCapabilities.from(surfaceRequest.getCamera().getCameraInfo()); VideoCapabilities.from(surfaceRequest.getCamera().getCameraInfo());
Quality highestSupportedQuality = capabilities.findHighestSupportedQualityFor(surfaceSize); Quality highestSupportedQuality = capabilities.findHighestSupportedQualityFor(surfaceSize);
Logger.d(TAG, "Using supported quality of " + highestSupportedQuality Logger.d(TAG, "Using supported quality of " + highestSupportedQuality
+ " for surface size " + surfaceSize); + " for surface size " + surfaceSize);
if (highestSupportedQuality != Quality.NONE) { if (highestSupportedQuality != Quality.NONE) {
mResolvedCamcorderProfile = capabilities.getProfile(highestSupportedQuality); mResolvedEncoderProfiles = capabilities.getProfiles(highestSupportedQuality);
if (mResolvedCamcorderProfile == null) { if (mResolvedEncoderProfiles == null) {
throw new AssertionError("Camera advertised available quality but did not " throw new AssertionError("Camera advertised available quality but did not "
+ "produce CamcorderProfile for advertised quality."); + "produce EncoderProfiles for advertised quality.");
} }
} }
setupVideo(surfaceRequest, videoSourceTimebase); setupVideo(surfaceRequest, videoSourceTimebase);
@ -1071,7 +1068,7 @@ public final class Recorder implements VideoOutput {
MediaSpec mediaSpec = getObservableData(mMediaSpec); MediaSpec mediaSpec = getObservableData(mMediaSpec);
ListenableFuture<Encoder> configureFuture = ListenableFuture<Encoder> configureFuture =
videoEncoderSession.configure(request, timebase, mediaSpec, videoEncoderSession.configure(request, timebase, mediaSpec,
mResolvedCamcorderProfile); mResolvedEncoderProfiles);
mVideoEncoderSession = videoEncoderSession; mVideoEncoderSession = videoEncoderSession;
Futures.addCallback(configureFuture, new FutureCallback<Encoder>() { Futures.addCallback(configureFuture, new FutureCallback<Encoder>() {
@Override @Override
@ -1237,23 +1234,23 @@ public final class Recorder implements VideoOutput {
throws AudioSourceAccessException, InvalidConfigException { throws AudioSourceAccessException, InvalidConfigException {
MediaSpec mediaSpec = getObservableData(mMediaSpec); MediaSpec mediaSpec = getObservableData(mMediaSpec);
// Resolve the audio mime info // Resolve the audio mime info
MimeInfo audioMimeInfo = resolveAudioMimeInfo(mediaSpec, mResolvedCamcorderProfile); MimeInfo audioMimeInfo = resolveAudioMimeInfo(mediaSpec, mResolvedEncoderProfiles);
Timebase audioSourceTimebase = Timebase.UPTIME; Timebase audioSourceTimebase = Timebase.UPTIME;
// Select and create the audio source // Select and create the audio source
AudioSource.Settings audioSourceSettings = AudioSettings audioSettings =
resolveAudioSourceSettings(audioMimeInfo, mediaSpec.getAudioSpec()); resolveAudioSettings(audioMimeInfo, mediaSpec.getAudioSpec());
if (mAudioSource != null) { if (mAudioSource != null) {
releaseCurrentAudioSource(); releaseCurrentAudioSource();
} }
// TODO: set audioSourceTimebase to AudioSource. Currently AudioSource hard code // TODO: set audioSourceTimebase to AudioSource. Currently AudioSource hard code
// AudioTimestamp.TIMEBASE_MONOTONIC. // AudioTimestamp.TIMEBASE_MONOTONIC.
mAudioSource = setupAudioSource(recordingToStart, audioSourceSettings); mAudioSource = setupAudioSource(recordingToStart, audioSettings);
Logger.d(TAG, String.format("Set up new audio source: 0x%x", mAudioSource.hashCode())); Logger.d(TAG, String.format("Set up new audio source: 0x%x", mAudioSource.hashCode()));
// Select and create the audio encoder // Select and create the audio encoder
AudioEncoderConfig audioEncoderConfig = resolveAudioEncoderConfig(audioMimeInfo, AudioEncoderConfig audioEncoderConfig = resolveAudioEncoderConfig(audioMimeInfo,
audioSourceTimebase, audioSourceSettings, mediaSpec.getAudioSpec()); audioSourceTimebase, audioSettings, mediaSpec.getAudioSpec());
mAudioEncoder = mAudioEncoderFactory.createEncoder(mExecutor, audioEncoderConfig); mAudioEncoder = mAudioEncoderFactory.createEncoder(mExecutor, audioEncoderConfig);
// Connect the audio source to the audio encoder // Connect the audio source to the audio encoder
@ -1267,10 +1264,9 @@ public final class Recorder implements VideoOutput {
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
@NonNull @NonNull
private AudioSource setupAudioSource(@NonNull RecordingRecord recordingToStart, private AudioSource setupAudioSource(@NonNull RecordingRecord recordingToStart,
@NonNull AudioSource.Settings audioSourceSettings) @NonNull AudioSettings audioSettings)
throws AudioSourceAccessException { throws AudioSourceAccessException {
return recordingToStart.performOneTimeAudioSourceCreation(audioSourceSettings, return recordingToStart.performOneTimeAudioSourceCreation(audioSettings, AUDIO_EXECUTOR);
AUDIO_EXECUTOR);
} }
private void releaseCurrentAudioSource() { private void releaseCurrentAudioSource() {
@ -1377,7 +1373,7 @@ public final class Recorder implements VideoOutput {
MediaSpec mediaSpec = getObservableData(mMediaSpec); MediaSpec mediaSpec = getObservableData(mMediaSpec);
int muxerOutputFormat = int muxerOutputFormat =
mediaSpec.getOutputFormat() == MediaSpec.OUTPUT_FORMAT_AUTO mediaSpec.getOutputFormat() == MediaSpec.OUTPUT_FORMAT_AUTO
? supportedMuxerFormatOrDefaultFrom(mResolvedCamcorderProfile, ? supportedMuxerFormatOrDefaultFrom(mResolvedEncoderProfiles,
MediaSpec.outputFormatToMuxerFormat( MediaSpec.outputFormatToMuxerFormat(
MEDIA_SPEC_DEFAULT.getOutputFormat())) MEDIA_SPEC_DEFAULT.getOutputFormat()))
: MediaSpec.outputFormatToMuxerFormat(mediaSpec.getOutputFormat()); : MediaSpec.outputFormatToMuxerFormat(mediaSpec.getOutputFormat());
@ -1641,7 +1637,7 @@ public final class Recorder implements VideoOutput {
mAudioSource.setAudioSourceCallback(mSequentialExecutor, mAudioSource.setAudioSourceCallback(mSequentialExecutor,
new AudioSource.AudioSourceCallback() { new AudioSource.AudioSourceCallback() {
@Override @Override
public void onSilenced(boolean silenced) { public void onSilenceStateChanged(boolean silenced) {
if (mIsAudioSourceSilenced != silenced) { if (mIsAudioSourceSilenced != silenced) {
mIsAudioSourceSilenced = silenced; mIsAudioSourceSilenced = silenced;
mAudioErrorCause = silenced ? new IllegalStateException( mAudioErrorCause = silenced ? new IllegalStateException(
@ -2566,9 +2562,9 @@ public final class Recorder implements VideoOutput {
} }
private static int supportedMuxerFormatOrDefaultFrom( private static int supportedMuxerFormatOrDefaultFrom(
@Nullable CamcorderProfileProxy profileProxy, int defaultMuxerFormat) { @Nullable VideoValidatedEncoderProfilesProxy profilesProxy, int defaultMuxerFormat) {
if (profileProxy != null) { if (profilesProxy != null) {
switch (profileProxy.getFileFormat()) { switch (profilesProxy.getRecommendedFileFormat()) {
case MediaRecorder.OutputFormat.MPEG_4: case MediaRecorder.OutputFormat.MPEG_4:
return MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4; return MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
case MediaRecorder.OutputFormat.WEBM: case MediaRecorder.OutputFormat.WEBM:
@ -2738,7 +2734,7 @@ public final class Recorder implements VideoOutput {
@NonNull @NonNull
@Override @Override
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
public AudioSource get(@NonNull AudioSource.Settings settings, public AudioSource get(@NonNull AudioSettings settings,
@NonNull Executor executor) @NonNull Executor executor)
throws AudioSourceAccessException { throws AudioSourceAccessException {
// Context will only be held in local scope of the supplier so it will // Context will only be held in local scope of the supplier so it will
@ -2755,7 +2751,7 @@ public final class Recorder implements VideoOutput {
@NonNull @NonNull
@Override @Override
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
public AudioSource get(@NonNull AudioSource.Settings settings, public AudioSource get(@NonNull AudioSettings settings,
@NonNull Executor executor) @NonNull Executor executor)
throws AudioSourceAccessException { throws AudioSourceAccessException {
// Do not set (or retain) context on other API levels // Do not set (or retain) context on other API levels
@ -2870,7 +2866,7 @@ public final class Recorder implements VideoOutput {
@NonNull @NonNull
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
AudioSource performOneTimeAudioSourceCreation( AudioSource performOneTimeAudioSourceCreation(
@NonNull AudioSource.Settings settings, @NonNull Executor audioSourceExecutor) @NonNull AudioSettings settings, @NonNull Executor audioSourceExecutor)
throws AudioSourceAccessException { throws AudioSourceAccessException {
if (!hasAudioEnabled()) { if (!hasAudioEnabled()) {
throw new AssertionError("Recording does not have audio enabled. Unable to create" throw new AssertionError("Recording does not have audio enabled. Unable to create"
@ -2984,7 +2980,7 @@ public final class Recorder implements VideoOutput {
private interface AudioSourceSupplier { private interface AudioSourceSupplier {
@RequiresPermission(Manifest.permission.RECORD_AUDIO) @RequiresPermission(Manifest.permission.RECORD_AUDIO)
@NonNull @NonNull
AudioSource get(@NonNull AudioSource.Settings settings, AudioSource get(@NonNull AudioSettings settings,
@NonNull Executor audioSourceExecutor) throws AudioSourceAccessException; @NonNull Executor audioSourceExecutor) throws AudioSourceAccessException;
} }
} }
@ -3134,7 +3130,6 @@ public final class Recorder implements VideoOutput {
return this; return this;
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@NonNull @NonNull
Builder setVideoEncoderFactory(@NonNull EncoderFactory videoEncoderFactory) { Builder setVideoEncoderFactory(@NonNull EncoderFactory videoEncoderFactory) {
@ -3142,7 +3137,6 @@ public final class Recorder implements VideoOutput {
return this; return this;
} }
/** @hide */
@RestrictTo(RestrictTo.Scope.LIBRARY) @RestrictTo(RestrictTo.Scope.LIBRARY)
@NonNull @NonNull
Builder setAudioEncoderFactory(@NonNull EncoderFactory audioEncoderFactory) { Builder setAudioEncoderFactory(@NonNull EncoderFactory audioEncoderFactory) {

View File

@ -69,22 +69,22 @@ public final class Recording implements AutoCloseable {
} }
/** /**
* Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID. * Creates an {@link Recording} from a {@link PendingRecording} and recording ID.
* *
* <p>The recording ID is expected to be unique to the recorder that generated the pending * <p>The recording ID is expected to be unique to the recorder that generated the pending
* recording. * recording.
*/ */
@NonNull @NonNull
static SucklessRecording from(@NonNull SucklessPendingRecording pendingRecording, long recordingId) { static Recording from(@NonNull PendingRecording pendingRecording, long recordingId) {
Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null."); Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
return new SucklessRecording(pendingRecording.getRecorder(), return new Recording(pendingRecording.getRecorder(),
recordingId, recordingId,
pendingRecording.getOutputOptions(), pendingRecording.getOutputOptions(),
/*finalizedOnCreation=*/false); /*finalizedOnCreation=*/false);
} }
/** /**
* Creates an {@link SucklessRecording} from a {@link PendingRecording} and recording ID in a * Creates an {@link Recording} from a {@link PendingRecording} and recording ID in a
* finalized state. * finalized state.
* *
* <p>This can be used if there was an error setting up the active recording and it would not * <p>This can be used if there was an error setting up the active recording and it would not
@ -94,10 +94,10 @@ public final class Recording implements AutoCloseable {
* recording. * recording.
*/ */
@NonNull @NonNull
static SucklessRecording createFinalizedFrom(@NonNull SucklessPendingRecording pendingRecording, static Recording createFinalizedFrom(@NonNull PendingRecording pendingRecording,
long recordingId) { long recordingId) {
Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null."); Preconditions.checkNotNull(pendingRecording, "The given PendingRecording cannot be null.");
return new SucklessRecording(pendingRecording.getRecorder(), return new Recording(pendingRecording.getRecorder(),
recordingId, recordingId,
pendingRecording.getOutputOptions(), pendingRecording.getOutputOptions(),
/*finalizedOnCreation=*/true); /*finalizedOnCreation=*/true);
@ -207,7 +207,6 @@ public final class Recording implements AutoCloseable {
* {@link PendingRecording#start(Executor, Consumer)}. Once the active recording is * {@link PendingRecording#start(Executor, Consumer)}. Once the active recording is
* stopped, a {@link VideoRecordEvent.Finalize} event will be sent to the listener. * stopped, a {@link VideoRecordEvent.Finalize} event will be sent to the listener.
* *
* @hide
*/ */
@RestrictTo(LIBRARY_GROUP) @RestrictTo(LIBRARY_GROUP)
public boolean isClosed() { public boolean isClosed() {

View File

@ -0,0 +1,6 @@
#!/bin/sh
for i in "PendingRecording" "Recording" "Recorder"; do
diff3 -m ../Suckless$i.java base/$i.java new/$i.java > Suckless$i.java && mv Suckless$i.java ..
done
diff3 -m ../internal/encoder/SucklessEncoderImpl.java base/EncoderImpl.java new/EncoderImpl.java > SucklessEncoderImpl.java && mv SucklessEncoderImpl.java ../internal/encoder/SucklessEncoderImpl.java

View File

@ -0,0 +1,3 @@
#!/bin/sh
rm -r base && mv new base