mediaExtractor.setDataSource(dataSource) // 查看是否含有视频轨 val trackIndex = mediaExtractor.selectVideoTrack() if (trackIndex < 0) { throw RuntimeException("this data source not video") } mediaExtractor.selectTrack(trackIndex) fun MediaExtractor.selectVideoTrack(): Int { val numTracks = trackCount for (i in 0 until numTracks) { val format = getTrackFormat(i) val mime = format.getString(MediaFormat.KEY_MIME) if (mime.startsWith("video/")) { return i } } return -1 }
outputSurface = if (isSurface) OutputSurface(mediaFormat.width, mediaFormat.height) else null
// 指定帧格式COLOR_FormatYUV420Flexible,几乎所有的解码器都支持 if (decoder.codecInfo.getCapabilitiesForType(mediaFormat.mime).isSupportColorFormat(defDecoderColorFormat)) { mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, defDecoderColorFormat) decoder.configure(mediaFormat, outputSurface?.surface, null, 0) } else { throw RuntimeException("this mobile not support YUV 420 Color Format") }
val startTime = System.currentTimeMillis() Log.d(TAG, "start decode frames") isStart = true val bufferInfo = MediaCodec.BufferInfo() // 是否输入完毕 var inputEnd = false // 是否输出完毕 var outputEnd = false decoder.start() var outputFrameCount = 0
while (!outputEnd && isStart) { if (!inputEnd) { val inputBufferId = decoder.dequeueInputBuffer(DEF_TIME_OUT) if (inputBufferId >= 0) { // 获得一个可写的输入缓存对象 val inputBuffer = decoder.getInputBuffer(inputBufferId) // 使用MediaExtractor读取数据 val sampleSize = videoAnalyze.mediaExtractor.readSampleData(inputBuffer, 0) if (sampleSize < 0) { // 2019/2/8-19:15 没有数据 decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM) inputEnd = true } else { // 将数据压入到输入队列 val presentationTimeUs = videoAnalyze.mediaExtractor.sampleTime decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0) videoAnalyze.mediaExtractor.advance() } } }
YuvImage的封装,官方文档有这样一段描述:Currently only ImageFormat.NV21 and ImageFormat.YUY2 are supported。 YuvImage只支持NV21或者YUY2格式,所以还可能需要对Image的原始数据作进一步处理,将其转换为NV21的Byte数组
fun Image.getDataByte(): ByteArray { val format = format if (!isSupportFormat()) { throw RuntimeException("image can not support format is $format") } // 指定了图片的有效区域,只有这个Rect内的像素才是有效的 val rect = cropRect val width = rect.width() val height = rect.height() val planes = planes val data = ByteArray(width * height * ImageFormat.getBitsPerPixel(format) / 8) val rowData = ByteArray(planes[0].rowStride)
var channelOffset = 0 var outputStride = 1 for (i in 0 until planes.size) { when (i) { 0 -> { channelOffset = 0 outputStride = 1 } 1 -> { channelOffset = width * height + 1 outputStride = 2 } 2 -> { channelOffset = width * height outputStride = 2 } }
// 此时得到的ByteBuffer的position指向末端 val buffer = planes[i].buffer // 行跨距 val rowStride = planes[i].rowStride // 行内颜色值间隔,真实间隔值为此值减一 val pixelStride = planes[i].pixelStride
val shift = if (i == 0) 0 else 1 val w = width.shr(shift) val h = height.shr(shift) buffer.position(rowStride * (rect.top.shr(shift)) + pixelStride + (rect.left.shr(shift))) for (row in 0 until h) { var length: Int if (pixelStride == 1 && outputStride == 1) { length = w // 2019/2/11-23:05 buffer有时候遗留的长度,小于length就会报错 buffer.getNoException(data, channelOffset, length) channelOffset += length } else { length = (w - 1) * pixelStride + 1 buffer.getNoException(rowData, 0, length) for (col in 0 until w) { data[channelOffset] = rowData[col * pixelStride] channelOffset += outputStride } }
if (row < h - 1) { buffer.position(buffer.position() + rowStride - length) } } } return data }
最后封装YuvImage并压缩为文件
1 2 3 4
val rect = image.cropRect val yuvImage = YuvImage(image.getDataByte(), ImageFormat.NV21, rect.width(), rect.height(), null) yuvImage.compressToJpeg(rect, 100, fileOutputStream) fileOutputStream.close()