使用MediaCodec和GLSurfaceView浏览视频

问题描述 投票:4回答:1

我正在尝试使用MediaCodec解码器来实现对视频文件的搜索,该解码器输出到GLSurfaceView。该解决方案基于Bigflake示例和fadden注释。它适用于SurfaceView,但使用GLSurafaceView时遇到了一些麻烦:渲染的帧始终为黑色

查看实现:

class GLVideoView @JvmOverloads constructor(
    context: Context,
    attrs: AttributeSet? = null
) : GLSurfaceView(context, attrs),
    SurfaceTexture.OnFrameAvailableListener {

    private var outputSurface: OutputSurface? = null
    private var videoPlayer: VideoPlayer? = null

    private var videoFilePath: String? = null
    private var videoDuration: Int = 0
    private var videoWidth = 0
    private var videoHeight = 0

    private val renderer: Renderer

    init {
        setEGLContextClientVersion(2)
        renderer = VideoRender()
        setRenderer(renderer)
        renderMode = RENDERMODE_WHEN_DIRTY
    }

    // region Public API

    fun setVideoSource(videoFilePath: String) {
        this.videoFilePath = videoFilePath

        val metadataRetriever = MediaMetadataRetriever().apply { setDataSource(videoFilePath) }
        videoDuration = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION).toInt()
        videoWidth = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH).toInt()
        videoHeight = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT).toInt()
        try {
            val rotation = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION).toInt()
            if (rotation == 90 || rotation == 270) {
                val temp = videoWidth
                videoWidth = videoHeight
                videoHeight = temp
            }
        } catch (e: Exception) {
            // ignore
        }
    }

    override fun onMeasure(widthMeasureSpec: Int, heightMeasureSpec: Int) {
        ...
    }

    override fun onFrameAvailable(st: SurfaceTexture) {
        L.debug { "onFrameAvailable()" }
        outputSurface?.updateTextureImage()
        requestRender()
    }

    // endregion

    // region Private API

    private fun initVideoPlayer() {
        val filePath  = videoFilePath ?: throw IllegalStateException("No video source!")

        outputSurface = OutputSurface(this)
        val surface = outputSurface?.surface ?: throw IllegalStateException("No surface created!")

        videoPlayer = VideoPlayer(filePath, outputSurface!!).apply { initialize(surface) }
    }

    // endregion

    companion object {
        private val L = Logger()
    }

    private inner class VideoRender : Renderer {
        override fun onDrawFrame(gl: GL10?) {
            L.debug { "onDrawFrame()" }
            outputSurface?.drawImage()
        }

        override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
            GLES20.glViewport(0, 0, width, height)
        }

        override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
            if (videoPlayer == null) {
                initVideoPlayer()
            }
        }
    }
}

OutputSurface来自Bigflake,以及TextureRenderer link

这是基本的解码器实现:

internal class GLSyncVideoDecoder(
    private val mediaExtractor: VideoExtractor,
    private val outputSurface: OutputSurface
) : VideoFrameDecoder {

    private lateinit var mediaCodec: MediaCodec

    private lateinit var taskHandler: Handler
    private val uiHandler: Handler = Handler(Looper.getMainLooper())

    @Volatile
    private var isRunning = false

    @Throws(IOException::class)
    override fun initCodec(
        outSurface: Surface,
        inputFormat: MediaFormat,
        handlerThread: HandlerThread
    ): Boolean {
        taskHandler = Handler(handlerThread.looper)

        val mime = inputFormat.getString(MediaFormat.KEY_MIME) ?: return false

        mediaCodec = MediaCodec.createDecoderByType(mime).apply {
            configure(inputFormat, outSurface, null, 0)
            start()
        }

        return true
    }

    override fun decodeFrameAt(timeUs: Long) {
        if (isRunning) {
            L.debug { "!@# Skip 'seekTo()' at time: $timeUs" }
            return
        }

        isRunning = true
        taskHandler.post {
            mediaCodec.flush()

            seekTo(timeUs, mediaCodec)

            isRunning = false
        }
    }

    private fun seekTo(timeUs: Long, decoder: MediaCodec) {
        var outputDone = false
        var inputDone = false

        mediaExtractor.seekTo(timeUs, MediaExtractor.SEEK_TO_PREVIOUS_SYNC)

        val bufferInfo = MediaCodec.BufferInfo()

        outerloop@ while (true) {
            var ptUs = 0L
            // Feed more data to the decoder.
            if (!inputDone) {
                val inputBufIndex = decoder.dequeueInputBuffer(1000)
                if (inputBufIndex >= 0) {
                    val inputBuf = decoder.getInputBuffer(inputBufIndex)
                    val chunkSize =  mediaExtractor.readSampleData(inputBuf!!, 0)

                    if (chunkSize < 0) {
                        // End of stream -- send empty frame with EOS flag set.
                        decoder.queueInputBuffer(
                            inputBufIndex,
                            0,
                            0,
                            0L,
                            MediaCodec.BUFFER_FLAG_END_OF_STREAM
                        )
                        inputDone = true
                    } else {
                        val presentationTimeUs = mediaExtractor.sampleTime
                        val flags = mediaExtractor.sampleFlags
                        ptUs = presentationTimeUs
                        decoder.queueInputBuffer(
                            inputBufIndex,
                            0,
                            chunkSize,
                            presentationTimeUs,
                            flags
                        )
                        mediaExtractor.advance()
                    }
                }
            }

            if (!outputDone) {
                val decoderStatus = decoder.dequeueOutputBuffer(bufferInfo, 1000)
                when {
                    decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER -> { }
                    decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED -> { }
                    decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> { }
                    decoderStatus < 0 -> throw RuntimeException("unexpected result from decoder.dequeueOutputBuffer: $decoderStatus")
                    else -> { // decoderStatus >= 0
                        if ((bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                            outputDone = true
                            break@outerloop
                        }

                        val presentationTimeUs = bufferInfo.presentationTimeUs
                        val validFrame = presentationTimeUs >= timeUs
                        val doRender = (bufferInfo.size != 0) && validFrame

                        decoder.releaseOutputBuffer(decoderStatus, doRender)
                        if (doRender) {
                            break@outerloop
                        }
                    }
                }
            }
        }
    }

    ...
}

如何使TextureRenderer正确绘制到GLSurfaceView?我做错了什么?是不正确的OpenGL绘图还是GLSurfaceView未链接到MediaCodec输出表面?

android surfaceview mediacodec glsurfaceview grafika
1个回答
0
投票

最后,我找到了问题的答案。我遵循了VideoSurfaceView.java中的代码。(将OutputSurface放入Renderer线程,并从渲染器的SurfaceTexture更新onDrawFrame() texImage)

希望它将来可能对某人有所帮助。感谢您的关注:)

© www.soinside.com 2019 - 2024. All rights reserved.