class BitmapToVideoEncoder(private val mCallback: IBitmapToVideoEncoderCallback) {
private var mOutputFile: File? = null
private var mEncodeQueue: Queue<Bitmap?> = ConcurrentLinkedQueue<Bitmap?>()
private var mediaCodec: MediaCodec? = null
private var mediaMuxer: MediaMuxer? = null
private val mFrameSync = Any()
private var mNewFrameLatch: CountDownLatch? = null
private var mGenerateIndex = 0
private var mTrackIndex = 0
private var mNoMoreFrames = false
private var mAbort = false
@OptIn(DelicateCoroutinesApi::class)
fun startEncoding(width: Int, height: Int, outputFile: File) {
println("DEBUG: Start Encoding")
mWidth = width
mHeight = height
mOutputFile = outputFile
val outputFileString: String = try {
outputFile.canonicalPath
} catch (e: IOException) {
Log.e(
TAG,
"Unable to get path for $outputFile"
)
return
}
val codecInfo = selectCodec()
if (codecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for $MIME_TYPE")
return
}
Log.d(TAG, "found codec: " + codecInfo.name)
val colorFormat: Int = try {
selectColorFormat(codecInfo)
} catch (e: java.lang.Exception) {
CodecCapabilities.COLOR_FormatYUV420Flexible
}
mediaCodec = try {
MediaCodec.createByCodecName(codecInfo.name)
} catch (e: IOException) {
Log.e(TAG, "Unable to create MediaCodec " + e.message)
return
}
val mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight)
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE)
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE)
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat)
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, I_FRAME_INTERVAL)
mediaCodec!!.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
mediaCodec!!.start()
mediaMuxer = try {
MediaMuxer(outputFileString, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
} catch (e: IOException) {
Log.e(TAG, "MediaMuxer creation failed. " + e.message)
return
}
Log.d(TAG, "Initialization complete. Starting encoder...")
GlobalScope.launch {
try {
encode()
} catch(e: Exception) {
e.printStackTrace()
println("DEBUG: ${e.localizedMessage}")
}
}
}
fun stopEncoding() {
//
}
fun queueFrame(bitmap: Bitmap?) {
if (mediaCodec == null || mediaMuxer == null) {
Log.d(TAG, "Failed to queue frame. Encoding not started")
return
}
Log.d(TAG, "Queueing frame")
mEncodeQueue.add(bitmap)
synchronized(mFrameSync) {
if (mNewFrameLatch != null && mNewFrameLatch!!.count > 0) {
mNewFrameLatch!!.countDown()
}
}
}
private fun encode() {
Log.d(TAG, "Encoder started")
while (true) {
if (mNoMoreFrames && mEncodeQueue.size == 0) break
var bitmap = mEncodeQueue.poll()
if (bitmap == null) {
synchronized(mFrameSync) {
mNewFrameLatch = CountDownLatch(1)
}
try {
mNewFrameLatch!!.await()
} catch (e: InterruptedException) {
e.printStackTrace()
println("DEBUG: (Encode) ${e.localizedMessage}")
}
bitmap = mEncodeQueue.poll()
}
if (bitmap == null) continue
val byteConvertFrame = getNV21(bitmap.width, bitmap.height, bitmap)
val TIMEOUT_USEC: Long = 500000
val inputBufIndex = mediaCodec!!.dequeueInputBuffer(TIMEOUT_USEC)
val ptsUsec = computePresentationTime(mGenerateIndex.toLong())
if (inputBufIndex >= 0) {
val inputBuffer = mediaCodec!!.getInputBuffer(inputBufIndex)
inputBuffer!!.clear()
inputBuffer.put(byteConvertFrame)
val presentationTimeUs = computePresentationTime(mGenerateIndex.toLong())
mediaCodec!!.queueInputBuffer(inputBufIndex, 0, byteConvertFrame.size, presentationTimeUs, 0)
mGenerateIndex++
}
val mBufferInfo = MediaCodec.BufferInfo()
val encoderStatus = mediaCodec!!.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC)
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
Log.e(TAG, "No output from encoder available")
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// not expected for an encoder
val newFormat = mediaCodec!!.outputFormat
mTrackIndex = mediaMuxer!!.addTrack(newFormat)
mediaMuxer!!.start()
} else if (encoderStatus < 0) {
Log.e(
TAG,
"unexpected result from encoder.dequeueOutputBuffer: $encoderStatus"
)
} else if (mBufferInfo.size != 0) {
val encodedData = mediaCodec!!.getOutputBuffer(encoderStatus)
if (encodedData == null) {
Log.e(
TAG,
"encoderOutputBuffer $encoderStatus was null"
)
} else {
encodedData.position(mBufferInfo.offset)
encodedData.limit(mBufferInfo.offset + mBufferInfo.size)
mediaMuxer!!.writeSampleData(mTrackIndex, encodedData, mBufferInfo)
mediaCodec!!.releaseOutputBuffer(encoderStatus, false)
}
}
}
release()
if (mAbort) {
mOutputFile!!.delete()
} else {
mCallback.onEncodingComplete(mOutputFile)
}
}
private fun release() {
if (mediaCodec != null) {
mediaCodec!!.stop()
mediaCodec!!.release()
mediaCodec = null
Log.d(TAG, "RELEASE CODEC")
}
if (mediaMuxer != null) {
mediaMuxer!!.stop()
mediaMuxer!!.release()
mediaMuxer = null
Log.d(TAG, "RELEASE MUXER")
}
}
private fun getNV21(inputWidth: Int, inputHeight: Int, bitmap: Bitmap): ByteArray {
val argb = IntArray(inputWidth * inputHeight)
bitmap.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight)
val yuv = ByteArray(inputWidth * inputHeight * 3 / 2)
var yIndex = 0
var uvIndex = inputWidth * inputHeight
var index = 0
for (j in 0 until inputHeight) {
for (i in 0 until inputWidth) {
val r = (argb[index] and 0xff0000) shr 16
val g = (argb[index] and 0xff00) shr 8
val b = argb[index] and 0xff
// Y channel
yuv[yIndex++] = ((66 * r + 129 * g + 25 * b + 128) shr 8 + 16).toByte()
// UV channel
if (j % 2 == 0 && i % 2 == 0) {
yuv[uvIndex++] = ((-38 * r - 74 * g + 112 * b + 128) shr 8 + 128).toByte()
yuv[uvIndex++] = ((112 * r - 94 * g - 18 * b + 128) shr 8 + 128).toByte()
}
index++
}
}
return yuv
}
private fun computePresentationTime(frameIndex: Long): Long {
return 132 + frameIndex * 1000000 / FRAME_RATE
}
companion object {
private val TAG = BitmapToVideoEncoder::class.java.simpleName
private const val MIME_TYPE = "video/avc" // H.264 Advanced Video Coding
private var mWidth = 0
private var mHeight = 0
private const val BIT_RATE = 16000000
private const val FRAME_RATE = 30 // Frames per second
private const val I_FRAME_INTERVAL = 1
private fun selectCodec(): MediaCodecInfo? {
return MediaCodecList(MediaCodecList.ALL_CODECS).codecInfos.firstOrNull { codecInfo ->
codecInfo.isEncoder && codecInfo.supportedTypes.any { it.equals(MIME_TYPE, ignoreCase = true) }
}
}
private fun selectColorFormat(codecInfo: MediaCodecInfo): Int {
codecInfo.getCapabilitiesForType(MIME_TYPE).colorFormats.forEach { colorFormat ->
if (isRecognizedFormat(colorFormat)) {
return colorFormat
}
}
throw IllegalArgumentException("Unsupported color format")
}
private fun isRecognizedFormat(colorFormat: Int): Boolean {
return colorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible
}
}
}
I'm using cameraX, I'm analysing the frames and converting to bitmap and wanted to encode the bitmap frames to a video for the user to watch at the end. When the video file is returned to me it looks corrupted. But it's working (kinda), when I wave my hand in front of the camera when recording, a distorted hand is visible.
I've tried changing the size of the video, I'm expecting the video to be returned properly