Improve video recording: fix freezes & ExoPlayer errors

This commit is contained in:
Matéo Duparc 2023-05-06 23:40:37 +02:00
parent 4c412be7dc
commit b3a25e03e7
Signed by untrusted user: hardcoresushi
GPG Key ID: AFE384344A45E13A
7 changed files with 140 additions and 73 deletions

View File

@ -72,4 +72,5 @@ target_link_libraries(
avformat avformat
avcodec avcodec
avutil avutil
log
) )

View File

@ -1254,46 +1254,13 @@ public class SucklessEncoderImpl implements Encoder {
mVideoTimestampConverter.convertToUptimeUs(bufferInfo.presentationTimeUs); mVideoTimestampConverter.convertToUptimeUs(bufferInfo.presentationTimeUs);
} }
// MediaCodec may send out of order buffer
if (bufferInfo.presentationTimeUs <= mLastPresentationTimeUs) {
Logger.d(mTag, "Drop buffer by out of order buffer from MediaCodec.");
return false;
}
mLastPresentationTimeUs = bufferInfo.presentationTimeUs; mLastPresentationTimeUs = bufferInfo.presentationTimeUs;
// Ignore buffers are not in start/stop range. One situation is to ignore outdated
// frames when using the Surface of MediaCodec#createPersistentInputSurface. After
// the persistent Surface stops, it will keep a small number of old frames in its
// buffer, and send those old frames in the next startup.
if (!mStartStopTimeRangeUs.contains(bufferInfo.presentationTimeUs)) {
Logger.d(mTag, "Drop buffer by not in start-stop range.");
// If data hasn't reached the expected stop timestamp, set the stop timestamp.
if (mPendingCodecStop
&& bufferInfo.presentationTimeUs >= mStartStopTimeRangeUs.getUpper()) {
if (mStopTimeoutFuture != null) {
mStopTimeoutFuture.cancel(true);
}
mLastDataStopTimestamp = bufferInfo.presentationTimeUs;
signalCodecStop();
mPendingCodecStop = false;
}
return false;
}
if (updatePauseRangeStateAndCheckIfBufferPaused(bufferInfo)) { if (updatePauseRangeStateAndCheckIfBufferPaused(bufferInfo)) {
Logger.d(mTag, "Drop buffer by pause."); Logger.d(mTag, "Drop buffer by pause.");
return false; return false;
} }
// We should check if the adjusted time is valid. see b/189114207.
if (getAdjustedTimeUs(bufferInfo) <= mLastSentAdjustedTimeUs) {
Logger.d(mTag, "Drop buffer by adjusted time is less than the last sent time.");
if (mIsVideoEncoder && isKeyFrame(bufferInfo)) {
mIsKeyFrameRequired = true;
}
return false;
}
if (!mHasFirstData && !mIsKeyFrameRequired && mIsVideoEncoder) { if (!mHasFirstData && !mIsKeyFrameRequired && mIsVideoEncoder) {
mIsKeyFrameRequired = true; mIsKeyFrameRequired = true;
} }

View File

@ -47,6 +47,7 @@ import sushi.hardcore.droidfs.databinding.ActivityCameraBinding
import sushi.hardcore.droidfs.filesystems.EncryptedVolume import sushi.hardcore.droidfs.filesystems.EncryptedVolume
import sushi.hardcore.droidfs.util.IntentUtils import sushi.hardcore.droidfs.util.IntentUtils
import sushi.hardcore.droidfs.util.PathUtils import sushi.hardcore.droidfs.util.PathUtils
import sushi.hardcore.droidfs.video_recording.AsynchronousSeekableWriter
import sushi.hardcore.droidfs.video_recording.FFmpegMuxer import sushi.hardcore.droidfs.video_recording.FFmpegMuxer
import sushi.hardcore.droidfs.video_recording.SeekableWriter import sushi.hardcore.droidfs.video_recording.SeekableWriter
import sushi.hardcore.droidfs.widgets.CustomAlertDialogBuilder import sushi.hardcore.droidfs.widgets.CustomAlertDialogBuilder
@ -510,37 +511,32 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
.show() .show()
return return
} }
startTimerThen { val writer = AsynchronousSeekableWriter(object : SeekableWriter {
var withAudio = true private var offset = 0L
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { override fun close() {
withAudio = false encryptedVolume.closeFile(fileHandle)
}
} }
videoRecording = videoRecorder?.prepareRecording(
this,
MuxerOutputOptions(
FFmpegMuxer(object : SeekableWriter {
private var offset = 0L
override fun close() { override fun seek(offset: Long) {
encryptedVolume.closeFile(fileHandle) this.offset = offset
} }
override fun seek(offset: Long) { override fun write(buffer: ByteArray, size: Int) {
this.offset = offset offset += encryptedVolume.write(fileHandle, offset, buffer, 0, size.toLong())
} }
})
override fun write(buffer: ByteArray) { val pendingRecording = videoRecorder!!.prepareRecording(
offset += encryptedVolume.write(fileHandle, offset, buffer, 0, buffer.size.toLong()) this,
} MuxerOutputOptions(FFmpegMuxer(writer))
}) ).also {
) if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M || ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) {
)?.apply { it.withAudioEnabled()
if (withAudio) { }
withAudioEnabled() }
} startTimerThen {
}?.start(executor) { writer.start()
videoRecording = pendingRecording.start(executor) {
val buttons = arrayOf(binding.imageCaptureMode, binding.imageRatio, binding.imageTimer, binding.imageModeSwitch, binding.imageCameraSwitch) val buttons = arrayOf(binding.imageCaptureMode, binding.imageRatio, binding.imageTimer, binding.imageModeSwitch, binding.imageCameraSwitch)
when (it) { when (it) {
is VideoRecordEvent.Start -> { is VideoRecordEvent.Start -> {

View File

@ -0,0 +1,68 @@
package sushi.hardcore.droidfs.video_recording
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.channels.Channel
import kotlinx.coroutines.launch
import sushi.hardcore.droidfs.Constants
import java.nio.ByteBuffer
class AsynchronousSeekableWriter(private val internalWriter: SeekableWriter): SeekableWriter {
internal enum class Operation { WRITE, SEEK, CLOSE }
internal class Task(
val operation: Operation,
val buffer: ByteArray? = null,
val offset: Long? = null,
)
private val channel = Channel<Task>(Channel.UNLIMITED)
private fun flush(buffer: ByteBuffer) {
internalWriter.write(buffer.array(), buffer.position())
buffer.position(0)
}
fun start() {
CoroutineScope(Dispatchers.IO).launch {
val buffer = ByteBuffer.allocate(Constants.IO_BUFF_SIZE)
while (true) {
val task = channel.receive()
when (task.operation) {
Operation.WRITE -> {
if (task.buffer!!.size > buffer.remaining()) {
flush(buffer)
}
buffer.put(task.buffer)
}
Operation.SEEK -> {
if (buffer.position() > 0) {
flush(buffer)
}
internalWriter.seek(task.offset!!)
}
Operation.CLOSE -> {
if (buffer.position() > 0) {
flush(buffer)
}
internalWriter.close()
break
}
}
}
}
}
override fun write(buffer: ByteArray, size: Int) {
channel.trySend(Task(Operation.WRITE, buffer)).exceptionOrNull()?.let { throw it }
}
override fun seek(offset: Long) {
channel.trySend(Task(Operation.SEEK, offset = offset)).exceptionOrNull()?.let { throw it }
}
override fun close() {
channel.trySend(Task(Operation.CLOSE)).exceptionOrNull()?.let { throw it }
}
}

View File

@ -7,7 +7,7 @@ import java.nio.ByteBuffer
class FFmpegMuxer(val writer: SeekableWriter): MediaMuxer { class FFmpegMuxer(val writer: SeekableWriter): MediaMuxer {
external fun allocContext(): Long external fun allocContext(): Long
external fun addVideoTrack(formatContext: Long, bitrate: Int, width: Int, height: Int, orientationHint: Int): Int external fun addVideoTrack(formatContext: Long, bitrate: Int, frameRate: Int, width: Int, height: Int, orientationHint: Int): Int
external fun addAudioTrack(formatContext: Long, bitrate: Int, sampleRate: Int, channelCount: Int): Int external fun addAudioTrack(formatContext: Long, bitrate: Int, sampleRate: Int, channelCount: Int): Int
external fun writeHeaders(formatContext: Long): Int external fun writeHeaders(formatContext: Long): Int
external fun writePacket(formatContext: Long, buffer: ByteArray, pts: Long, streamIndex: Int, isKeyFrame: Boolean) external fun writePacket(formatContext: Long, buffer: ByteArray, pts: Long, streamIndex: Int, isKeyFrame: Boolean)
@ -54,6 +54,7 @@ class FFmpegMuxer(val writer: SeekableWriter): MediaMuxer {
addVideoTrack( addVideoTrack(
formatContext!!, formatContext!!,
bitrate, bitrate,
mediaFormat.getInteger("frame-rate"),
mediaFormat.getInteger("width"), mediaFormat.getInteger("width"),
mediaFormat.getInteger("height"), mediaFormat.getInteger("height"),
orientation orientation
@ -82,7 +83,7 @@ class FFmpegMuxer(val writer: SeekableWriter): MediaMuxer {
} }
fun writePacket(buff: ByteArray) { fun writePacket(buff: ByteArray) {
writer.write(buff) writer.write(buff, buff.size)
} }
fun seek(offset: Long) { fun seek(offset: Long) {
writer.seek(offset) writer.seek(offset)

View File

@ -1,7 +1,7 @@
package sushi.hardcore.droidfs.video_recording package sushi.hardcore.droidfs.video_recording
interface SeekableWriter { interface SeekableWriter {
fun write(buffer: ByteArray) fun write(buffer: ByteArray, size: Int)
fun seek(offset: Long) fun seek(offset: Long)
fun close() fun close()
} }

View File

@ -3,9 +3,36 @@
#include <libavutil/channel_layout.h> #include <libavutil/channel_layout.h>
#include <libavutil/display.h> #include <libavutil/display.h>
#include <jni.h> #include <jni.h>
#include <android/log.h>
const char* LOG_TAG = "LIBMUX";
const size_t BUFF_SIZE = 4096; const size_t BUFF_SIZE = 4096;
int to_android_log_level(int level) {
switch (level) {
case AV_LOG_PANIC:
case AV_LOG_FATAL: return ANDROID_LOG_FATAL;
case AV_LOG_ERROR: return ANDROID_LOG_ERROR;
case AV_LOG_WARNING: return ANDROID_LOG_WARN;
case AV_LOG_INFO: return ANDROID_LOG_INFO;
default: return ANDROID_LOG_UNKNOWN;
}
}
void log_callback(void *ptr, int level, const char *fmt, va_list vl)
{
char line[1024];
static int print_prefix = 1;
av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
__android_log_print(to_android_log_level(level), LOG_TAG, "%s", line);
}
void log_err(int result, const char* name) {
if (result < 0) {
__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "%s: %d", name, result);
}
}
struct Muxer { struct Muxer {
JavaVM* jvm; JavaVM* jvm;
jobject thiz; jobject thiz;
@ -32,6 +59,8 @@ int64_t seek(void* opaque, int64_t offset, int whence) {
} }
jlong Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_allocContext(JNIEnv *env, jobject thiz) { jlong Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_allocContext(JNIEnv *env, jobject thiz) {
av_log_set_callback(log_callback);
av_log_set_level(AV_LOG_INFO);
const AVOutputFormat *format = av_guess_format("mp4", NULL, NULL); const AVOutputFormat *format = av_guess_format("mp4", NULL, NULL);
struct Muxer* muxer = malloc(sizeof(struct Muxer)); struct Muxer* muxer = malloc(sizeof(struct Muxer));
(*env)->GetJavaVM(env, &muxer->jvm); (*env)->GetJavaVM(env, &muxer->jvm);
@ -50,17 +79,14 @@ JNIEXPORT jint JNICALL
Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addAudioTrack(JNIEnv *env, jobject thiz, jlong format_context, jint bitrate, jint sample_rate, Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addAudioTrack(JNIEnv *env, jobject thiz, jlong format_context, jint bitrate, jint sample_rate,
jint channel_count) { jint channel_count) {
const AVCodec* encoder = avcodec_find_encoder(AV_CODEC_ID_AAC); const AVCodec* encoder = avcodec_find_encoder(AV_CODEC_ID_AAC);
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
AVCodecContext* codec_context = avcodec_alloc_context3(encoder); AVCodecContext* codec_context = avcodec_alloc_context3(encoder);
av_channel_layout_default(&codec_context->ch_layout, channel_count); av_channel_layout_default(&codec_context->ch_layout, channel_count);
codec_context->sample_rate = sample_rate; codec_context->sample_rate = sample_rate;
codec_context->sample_fmt = encoder->sample_fmts[0]; codec_context->sample_fmt = encoder->sample_fmts[0];
codec_context->bit_rate = bitrate; codec_context->bit_rate = bitrate;
codec_context->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL; codec_context->time_base = (AVRational) {1, sample_rate};
stream->time_base.den = sample_rate;
stream->time_base.num = 1;
codec_context->flags = AV_CODEC_FLAG_GLOBAL_HEADER;
avcodec_open2(codec_context, encoder, NULL); avcodec_open2(codec_context, encoder, NULL);
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
avcodec_parameters_from_context(stream->codecpar, codec_context); avcodec_parameters_from_context(stream->codecpar, codec_context);
avcodec_free_context(&codec_context); avcodec_free_context(&codec_context);
return stream->index; return stream->index;
@ -69,7 +95,9 @@ Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addAudioTrack(JNIEnv *e
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addVideoTrack(JNIEnv *env, jobject thiz, Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addVideoTrack(JNIEnv *env, jobject thiz,
jlong format_context, jlong format_context,
jint bitrate, jint width, jint bitrate,
jint frame_rate,
jint width,
jint height, jint height,
jint orientation_hint) { jint orientation_hint) {
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL); AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
@ -78,6 +106,8 @@ Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addVideoTrack(JNIEnv *e
stream->codecpar->bit_rate = bitrate; stream->codecpar->bit_rate = bitrate;
stream->codecpar->width = width; stream->codecpar->width = width;
stream->codecpar->height = height; stream->codecpar->height = height;
stream->codecpar->format = AV_PIX_FMT_YUVJ420P;
stream->time_base = (AVRational) {1, frame_rate};
uint8_t* matrix = av_stream_new_side_data(stream, AV_PKT_DATA_DISPLAYMATRIX, sizeof(int32_t) * 9); uint8_t* matrix = av_stream_new_side_data(stream, AV_PKT_DATA_DISPLAYMATRIX, sizeof(int32_t) * 9);
av_display_rotation_set((int32_t *) matrix, orientation_hint); av_display_rotation_set((int32_t *) matrix, orientation_hint);
return stream->index; return stream->index;
@ -85,7 +115,10 @@ Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addVideoTrack(JNIEnv *e
JNIEXPORT jint JNICALL JNIEXPORT jint JNICALL
Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeHeaders(JNIEnv *env, jobject thiz, jlong format_context) { Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeHeaders(JNIEnv *env, jobject thiz, jlong format_context) {
return avformat_write_header((AVFormatContext *) format_context, NULL); av_dump_format((AVFormatContext *) format_context, 0, NULL, 1);
int result = avformat_write_header((AVFormatContext *) format_context, NULL);
log_err(result, "avformat_write_header");
return result;
} }
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
@ -100,20 +133,21 @@ Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writePacket(JNIEnv *env
r.num = 1; r.num = 1;
r.den = 1000000; r.den = 1000000;
packet->pts = av_rescale_q(pts, r, ((AVFormatContext*)format_context)->streams[stream_index]->time_base); packet->pts = av_rescale_q(pts, r, ((AVFormatContext*)format_context)->streams[stream_index]->time_base);
packet->dts = packet->pts;
uint8_t* buff = malloc(size); uint8_t* buff = malloc(size);
(*env)->GetByteArrayRegion(env, buffer, 0, size, (signed char*)buff); (*env)->GetByteArrayRegion(env, buffer, 0, size, (signed char*)buff);
packet->data = buff; packet->data = buff;
if (is_key_frame) { if (is_key_frame) {
packet->flags = AV_PKT_FLAG_KEY; packet->flags = AV_PKT_FLAG_KEY;
} }
av_write_frame((AVFormatContext *)format_context, packet); log_err(av_write_frame((AVFormatContext *)format_context, packet), "av_write_frame");
free(buff); free(buff);
av_packet_free(&packet); av_packet_free(&packet);
} }
JNIEXPORT void JNICALL JNIEXPORT void JNICALL
Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeTrailer(JNIEnv *env, jobject thiz, jlong format_context) { Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeTrailer(JNIEnv *env, jobject thiz, jlong format_context) {
av_write_trailer((AVFormatContext *) format_context); log_err(av_write_trailer((AVFormatContext *) format_context), "av_write_trailer");
} }
JNIEXPORT void JNICALL JNIEXPORT void JNICALL