forked from hardcoresushi/DroidFS
Improve video recording: fix freezes & ExoPlayer errors
This commit is contained in:
parent
4c412be7dc
commit
b3a25e03e7
@ -72,4 +72,5 @@ target_link_libraries(
|
||||
avformat
|
||||
avcodec
|
||||
avutil
|
||||
log
|
||||
)
|
@ -1254,46 +1254,13 @@ public class SucklessEncoderImpl implements Encoder {
|
||||
mVideoTimestampConverter.convertToUptimeUs(bufferInfo.presentationTimeUs);
|
||||
}
|
||||
|
||||
// MediaCodec may send out of order buffer
|
||||
if (bufferInfo.presentationTimeUs <= mLastPresentationTimeUs) {
|
||||
Logger.d(mTag, "Drop buffer by out of order buffer from MediaCodec.");
|
||||
return false;
|
||||
}
|
||||
mLastPresentationTimeUs = bufferInfo.presentationTimeUs;
|
||||
|
||||
// Ignore buffers are not in start/stop range. One situation is to ignore outdated
|
||||
// frames when using the Surface of MediaCodec#createPersistentInputSurface. After
|
||||
// the persistent Surface stops, it will keep a small number of old frames in its
|
||||
// buffer, and send those old frames in the next startup.
|
||||
if (!mStartStopTimeRangeUs.contains(bufferInfo.presentationTimeUs)) {
|
||||
Logger.d(mTag, "Drop buffer by not in start-stop range.");
|
||||
// If data hasn't reached the expected stop timestamp, set the stop timestamp.
|
||||
if (mPendingCodecStop
|
||||
&& bufferInfo.presentationTimeUs >= mStartStopTimeRangeUs.getUpper()) {
|
||||
if (mStopTimeoutFuture != null) {
|
||||
mStopTimeoutFuture.cancel(true);
|
||||
}
|
||||
mLastDataStopTimestamp = bufferInfo.presentationTimeUs;
|
||||
signalCodecStop();
|
||||
mPendingCodecStop = false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (updatePauseRangeStateAndCheckIfBufferPaused(bufferInfo)) {
|
||||
Logger.d(mTag, "Drop buffer by pause.");
|
||||
return false;
|
||||
}
|
||||
|
||||
// We should check if the adjusted time is valid. see b/189114207.
|
||||
if (getAdjustedTimeUs(bufferInfo) <= mLastSentAdjustedTimeUs) {
|
||||
Logger.d(mTag, "Drop buffer by adjusted time is less than the last sent time.");
|
||||
if (mIsVideoEncoder && isKeyFrame(bufferInfo)) {
|
||||
mIsKeyFrameRequired = true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!mHasFirstData && !mIsKeyFrameRequired && mIsVideoEncoder) {
|
||||
mIsKeyFrameRequired = true;
|
||||
}
|
||||
|
@ -47,6 +47,7 @@ import sushi.hardcore.droidfs.databinding.ActivityCameraBinding
|
||||
import sushi.hardcore.droidfs.filesystems.EncryptedVolume
|
||||
import sushi.hardcore.droidfs.util.IntentUtils
|
||||
import sushi.hardcore.droidfs.util.PathUtils
|
||||
import sushi.hardcore.droidfs.video_recording.AsynchronousSeekableWriter
|
||||
import sushi.hardcore.droidfs.video_recording.FFmpegMuxer
|
||||
import sushi.hardcore.droidfs.video_recording.SeekableWriter
|
||||
import sushi.hardcore.droidfs.widgets.CustomAlertDialogBuilder
|
||||
@ -510,37 +511,32 @@ class CameraActivity : BaseActivity(), SensorOrientationListener.Listener {
|
||||
.show()
|
||||
return
|
||||
}
|
||||
startTimerThen {
|
||||
var withAudio = true
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
|
||||
withAudio = false
|
||||
}
|
||||
val writer = AsynchronousSeekableWriter(object : SeekableWriter {
|
||||
private var offset = 0L
|
||||
|
||||
override fun close() {
|
||||
encryptedVolume.closeFile(fileHandle)
|
||||
}
|
||||
videoRecording = videoRecorder?.prepareRecording(
|
||||
this,
|
||||
MuxerOutputOptions(
|
||||
FFmpegMuxer(object : SeekableWriter {
|
||||
private var offset = 0L
|
||||
|
||||
override fun close() {
|
||||
encryptedVolume.closeFile(fileHandle)
|
||||
}
|
||||
override fun seek(offset: Long) {
|
||||
this.offset = offset
|
||||
}
|
||||
|
||||
override fun seek(offset: Long) {
|
||||
this.offset = offset
|
||||
}
|
||||
|
||||
override fun write(buffer: ByteArray) {
|
||||
offset += encryptedVolume.write(fileHandle, offset, buffer, 0, buffer.size.toLong())
|
||||
}
|
||||
})
|
||||
)
|
||||
)?.apply {
|
||||
if (withAudio) {
|
||||
withAudioEnabled()
|
||||
}
|
||||
}?.start(executor) {
|
||||
override fun write(buffer: ByteArray, size: Int) {
|
||||
offset += encryptedVolume.write(fileHandle, offset, buffer, 0, size.toLong())
|
||||
}
|
||||
})
|
||||
val pendingRecording = videoRecorder!!.prepareRecording(
|
||||
this,
|
||||
MuxerOutputOptions(FFmpegMuxer(writer))
|
||||
).also {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M || ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) {
|
||||
it.withAudioEnabled()
|
||||
}
|
||||
}
|
||||
startTimerThen {
|
||||
writer.start()
|
||||
videoRecording = pendingRecording.start(executor) {
|
||||
val buttons = arrayOf(binding.imageCaptureMode, binding.imageRatio, binding.imageTimer, binding.imageModeSwitch, binding.imageCameraSwitch)
|
||||
when (it) {
|
||||
is VideoRecordEvent.Start -> {
|
||||
|
@ -0,0 +1,68 @@
|
||||
package sushi.hardcore.droidfs.video_recording
|
||||
|
||||
import kotlinx.coroutines.CoroutineScope
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.channels.Channel
|
||||
import kotlinx.coroutines.launch
|
||||
import sushi.hardcore.droidfs.Constants
|
||||
import java.nio.ByteBuffer
|
||||
|
||||
class AsynchronousSeekableWriter(private val internalWriter: SeekableWriter): SeekableWriter {
|
||||
|
||||
internal enum class Operation { WRITE, SEEK, CLOSE }
|
||||
|
||||
internal class Task(
|
||||
val operation: Operation,
|
||||
val buffer: ByteArray? = null,
|
||||
val offset: Long? = null,
|
||||
)
|
||||
|
||||
private val channel = Channel<Task>(Channel.UNLIMITED)
|
||||
|
||||
private fun flush(buffer: ByteBuffer) {
|
||||
internalWriter.write(buffer.array(), buffer.position())
|
||||
buffer.position(0)
|
||||
}
|
||||
|
||||
fun start() {
|
||||
CoroutineScope(Dispatchers.IO).launch {
|
||||
val buffer = ByteBuffer.allocate(Constants.IO_BUFF_SIZE)
|
||||
while (true) {
|
||||
val task = channel.receive()
|
||||
when (task.operation) {
|
||||
Operation.WRITE -> {
|
||||
if (task.buffer!!.size > buffer.remaining()) {
|
||||
flush(buffer)
|
||||
}
|
||||
buffer.put(task.buffer)
|
||||
}
|
||||
Operation.SEEK -> {
|
||||
if (buffer.position() > 0) {
|
||||
flush(buffer)
|
||||
}
|
||||
internalWriter.seek(task.offset!!)
|
||||
}
|
||||
Operation.CLOSE -> {
|
||||
if (buffer.position() > 0) {
|
||||
flush(buffer)
|
||||
}
|
||||
internalWriter.close()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun write(buffer: ByteArray, size: Int) {
|
||||
channel.trySend(Task(Operation.WRITE, buffer)).exceptionOrNull()?.let { throw it }
|
||||
}
|
||||
|
||||
override fun seek(offset: Long) {
|
||||
channel.trySend(Task(Operation.SEEK, offset = offset)).exceptionOrNull()?.let { throw it }
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
channel.trySend(Task(Operation.CLOSE)).exceptionOrNull()?.let { throw it }
|
||||
}
|
||||
}
|
@ -7,7 +7,7 @@ import java.nio.ByteBuffer
|
||||
|
||||
class FFmpegMuxer(val writer: SeekableWriter): MediaMuxer {
|
||||
external fun allocContext(): Long
|
||||
external fun addVideoTrack(formatContext: Long, bitrate: Int, width: Int, height: Int, orientationHint: Int): Int
|
||||
external fun addVideoTrack(formatContext: Long, bitrate: Int, frameRate: Int, width: Int, height: Int, orientationHint: Int): Int
|
||||
external fun addAudioTrack(formatContext: Long, bitrate: Int, sampleRate: Int, channelCount: Int): Int
|
||||
external fun writeHeaders(formatContext: Long): Int
|
||||
external fun writePacket(formatContext: Long, buffer: ByteArray, pts: Long, streamIndex: Int, isKeyFrame: Boolean)
|
||||
@ -54,6 +54,7 @@ class FFmpegMuxer(val writer: SeekableWriter): MediaMuxer {
|
||||
addVideoTrack(
|
||||
formatContext!!,
|
||||
bitrate,
|
||||
mediaFormat.getInteger("frame-rate"),
|
||||
mediaFormat.getInteger("width"),
|
||||
mediaFormat.getInteger("height"),
|
||||
orientation
|
||||
@ -82,7 +83,7 @@ class FFmpegMuxer(val writer: SeekableWriter): MediaMuxer {
|
||||
}
|
||||
|
||||
fun writePacket(buff: ByteArray) {
|
||||
writer.write(buff)
|
||||
writer.write(buff, buff.size)
|
||||
}
|
||||
fun seek(offset: Long) {
|
||||
writer.seek(offset)
|
||||
|
@ -1,7 +1,7 @@
|
||||
package sushi.hardcore.droidfs.video_recording
|
||||
|
||||
interface SeekableWriter {
|
||||
fun write(buffer: ByteArray)
|
||||
fun write(buffer: ByteArray, size: Int)
|
||||
fun seek(offset: Long)
|
||||
fun close()
|
||||
}
|
@ -3,9 +3,36 @@
|
||||
#include <libavutil/channel_layout.h>
|
||||
#include <libavutil/display.h>
|
||||
#include <jni.h>
|
||||
#include <android/log.h>
|
||||
|
||||
const char* LOG_TAG = "LIBMUX";
|
||||
const size_t BUFF_SIZE = 4096;
|
||||
|
||||
int to_android_log_level(int level) {
|
||||
switch (level) {
|
||||
case AV_LOG_PANIC:
|
||||
case AV_LOG_FATAL: return ANDROID_LOG_FATAL;
|
||||
case AV_LOG_ERROR: return ANDROID_LOG_ERROR;
|
||||
case AV_LOG_WARNING: return ANDROID_LOG_WARN;
|
||||
case AV_LOG_INFO: return ANDROID_LOG_INFO;
|
||||
default: return ANDROID_LOG_UNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
void log_callback(void *ptr, int level, const char *fmt, va_list vl)
|
||||
{
|
||||
char line[1024];
|
||||
static int print_prefix = 1;
|
||||
av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
|
||||
__android_log_print(to_android_log_level(level), LOG_TAG, "%s", line);
|
||||
}
|
||||
|
||||
void log_err(int result, const char* name) {
|
||||
if (result < 0) {
|
||||
__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "%s: %d", name, result);
|
||||
}
|
||||
}
|
||||
|
||||
struct Muxer {
|
||||
JavaVM* jvm;
|
||||
jobject thiz;
|
||||
@ -32,6 +59,8 @@ int64_t seek(void* opaque, int64_t offset, int whence) {
|
||||
}
|
||||
|
||||
jlong Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_allocContext(JNIEnv *env, jobject thiz) {
|
||||
av_log_set_callback(log_callback);
|
||||
av_log_set_level(AV_LOG_INFO);
|
||||
const AVOutputFormat *format = av_guess_format("mp4", NULL, NULL);
|
||||
struct Muxer* muxer = malloc(sizeof(struct Muxer));
|
||||
(*env)->GetJavaVM(env, &muxer->jvm);
|
||||
@ -50,17 +79,14 @@ JNIEXPORT jint JNICALL
|
||||
Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addAudioTrack(JNIEnv *env, jobject thiz, jlong format_context, jint bitrate, jint sample_rate,
|
||||
jint channel_count) {
|
||||
const AVCodec* encoder = avcodec_find_encoder(AV_CODEC_ID_AAC);
|
||||
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
|
||||
AVCodecContext* codec_context = avcodec_alloc_context3(encoder);
|
||||
av_channel_layout_default(&codec_context->ch_layout, channel_count);
|
||||
codec_context->sample_rate = sample_rate;
|
||||
codec_context->sample_fmt = encoder->sample_fmts[0];
|
||||
codec_context->bit_rate = bitrate;
|
||||
codec_context->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
|
||||
stream->time_base.den = sample_rate;
|
||||
stream->time_base.num = 1;
|
||||
codec_context->flags = AV_CODEC_FLAG_GLOBAL_HEADER;
|
||||
codec_context->time_base = (AVRational) {1, sample_rate};
|
||||
avcodec_open2(codec_context, encoder, NULL);
|
||||
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
|
||||
avcodec_parameters_from_context(stream->codecpar, codec_context);
|
||||
avcodec_free_context(&codec_context);
|
||||
return stream->index;
|
||||
@ -69,7 +95,9 @@ Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addAudioTrack(JNIEnv *e
|
||||
JNIEXPORT jint JNICALL
|
||||
Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addVideoTrack(JNIEnv *env, jobject thiz,
|
||||
jlong format_context,
|
||||
jint bitrate, jint width,
|
||||
jint bitrate,
|
||||
jint frame_rate,
|
||||
jint width,
|
||||
jint height,
|
||||
jint orientation_hint) {
|
||||
AVStream* stream = avformat_new_stream((AVFormatContext *) format_context, NULL);
|
||||
@ -78,6 +106,8 @@ Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addVideoTrack(JNIEnv *e
|
||||
stream->codecpar->bit_rate = bitrate;
|
||||
stream->codecpar->width = width;
|
||||
stream->codecpar->height = height;
|
||||
stream->codecpar->format = AV_PIX_FMT_YUVJ420P;
|
||||
stream->time_base = (AVRational) {1, frame_rate};
|
||||
uint8_t* matrix = av_stream_new_side_data(stream, AV_PKT_DATA_DISPLAYMATRIX, sizeof(int32_t) * 9);
|
||||
av_display_rotation_set((int32_t *) matrix, orientation_hint);
|
||||
return stream->index;
|
||||
@ -85,7 +115,10 @@ Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_addVideoTrack(JNIEnv *e
|
||||
|
||||
JNIEXPORT jint JNICALL
|
||||
Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeHeaders(JNIEnv *env, jobject thiz, jlong format_context) {
|
||||
return avformat_write_header((AVFormatContext *) format_context, NULL);
|
||||
av_dump_format((AVFormatContext *) format_context, 0, NULL, 1);
|
||||
int result = avformat_write_header((AVFormatContext *) format_context, NULL);
|
||||
log_err(result, "avformat_write_header");
|
||||
return result;
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
@ -100,20 +133,21 @@ Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writePacket(JNIEnv *env
|
||||
r.num = 1;
|
||||
r.den = 1000000;
|
||||
packet->pts = av_rescale_q(pts, r, ((AVFormatContext*)format_context)->streams[stream_index]->time_base);
|
||||
packet->dts = packet->pts;
|
||||
uint8_t* buff = malloc(size);
|
||||
(*env)->GetByteArrayRegion(env, buffer, 0, size, (signed char*)buff);
|
||||
packet->data = buff;
|
||||
if (is_key_frame) {
|
||||
packet->flags = AV_PKT_FLAG_KEY;
|
||||
}
|
||||
av_write_frame((AVFormatContext *)format_context, packet);
|
||||
log_err(av_write_frame((AVFormatContext *)format_context, packet), "av_write_frame");
|
||||
free(buff);
|
||||
av_packet_free(&packet);
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_sushi_hardcore_droidfs_video_1recording_FFmpegMuxer_writeTrailer(JNIEnv *env, jobject thiz, jlong format_context) {
|
||||
av_write_trailer((AVFormatContext *) format_context);
|
||||
log_err(av_write_trailer((AVFormatContext *) format_context), "av_write_trailer");
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
|
Loading…
Reference in New Issue
Block a user