Android 獲取視頻縮略圖(獲取視頻每幀數(shù)據(jù))的優(yōu)化方案

代碼位置 MediaMetadataRetriever Wrapper

速度對(duì)比

左邊的圖片是通過(guò)方式1
右邊的圖片是通過(guò)方式2

speed.gif

速度優(yōu)化,效果拔群。

  • 在縮小2倍的Bitmap輸出情況下
    使用MediaMetadataRetriever 抽幀的速度,每幀穩(wěn)定在 300ms左右。
    使用MediaCodec+ImageReader 第一次抽幀。大概是200ms ,后續(xù)每幀則是50ms左右。

注意:如果不縮小圖片的話,建議還是使用MediaMetadataRetriever。
使用當(dāng)前庫(kù)的話,調(diào)用metadataRetriever.forceFallBack(true);

方案

1. 通過(guò)MediaMetaRetrivier來(lái)進(jìn)行獲取

代碼較為簡(jiǎn)單,就是一個(gè)循環(huán)

 MediaMetadataRetriever metadataRetriever = new MediaMetadataRetriever();
                metadataRetriever.setDataSource(fileName);

                String duration = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
                Log.d(TAG, "duration = " + duration);
                int durationMs = Integer.parseInt(duration);

                //每秒取一次
                for (int i = 0; i < durationMs; i += 1000) {
                    long start = System.nanoTime();
                    Log.d(TAG, "getFrameAtTime time = " + i);
                    //這里傳入的是ms
                    Bitmap frameAtIndex = metadataRetriever.getFrameAtTime(i * 1000);
                    Bitmap frame=Bitmap.createScaledBitmap(frameAtIndex,frameAtIndex.getWidth()/8,frameAtIndex.getHeight()/8,false);
                    frameAtIndex.recycle();
                    long end = System.nanoTime();
                    long cost = end - start;
                    Log.d(TAG, "cost time in millis = " + (cost * 1f / 1000000));

                    if (callBack != null) {
                        callBack.onComplete(frame);
                    }
                }
                metadataRetriever.release();

2. 通過(guò)MediaCodec和ImageReader進(jìn)行獲取

就是通過(guò)通過(guò)Surface,用MediaExtrator,將MediaCodec解碼后的數(shù)據(jù),傳遞給ImageReader。來(lái)進(jìn)行顯示。

 MediaExtractor extractor = null;
                MediaCodec codec = null;
                try {
                    extractor = new MediaExtractor();
                    extractor.setDataSource(fileName);
                    int trackCount = extractor.getTrackCount();
                    MediaFormat videoFormat = null;
                    for (int i = 0; i < trackCount; i++) {
                        MediaFormat trackFormat = extractor.getTrackFormat(i);
                        if (trackFormat.getString(MediaFormat.KEY_MIME).contains("video")) {
                            videoFormat = trackFormat;
                            extractor.selectTrack(i);
                            break;
                        }
                    }
                    if (videoFormat == null) {
                        Log.d(TAG, "Can not get video format");
                        return;
                    }

                    int imageFormat = ImageFormat.YUV_420_888;
                    int colorFormat = COLOR_FormatYUV420Flexible;
                    videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
                    videoFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getInteger(MediaFormat.KEY_WIDTH) / 4);
                    videoFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getInteger(MediaFormat.KEY_HEIGHT) / 4);

                    long duration = videoFormat.getLong(MediaFormat.KEY_DURATION);

                    codec = MediaCodec.createDecoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
                    ImageReader imageReader = ImageReader
                            .newInstance(
                                    videoFormat.getInteger(MediaFormat.KEY_WIDTH),
                                    videoFormat.getInteger(MediaFormat.KEY_HEIGHT),
                                    imageFormat,
                                    3);
                    final ImageReaderHandlerThread imageReaderHandlerThread = new ImageReaderHandlerThread();

                    imageReader.setOnImageAvailableListener(new MyOnImageAvailableListener(callBack), imageReaderHandlerThread.getHandler());
                    codec.configure(videoFormat, imageReader.getSurface(), null, 0);
                    codec.start();
                    MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
                    long timeOut = 5 * 1000;//10ms
                    boolean inputDone = false;
                    boolean outputDone = false;
                    ByteBuffer[] inputBuffers = null;
                    if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
                        inputBuffers = codec.getInputBuffers();
                    }
                    //開始進(jìn)行解碼。
                    int count = 1;
                    while (!outputDone) {
                        if (requestStop) {
                            return;
                        }
                        if (!inputDone) {
                            //feed data
                            int inputBufferIndex = codec.dequeueInputBuffer(timeOut);
                            if (inputBufferIndex >= 0) {
                                ByteBuffer inputBuffer;
                                if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                                    inputBuffer = codec.getInputBuffer(inputBufferIndex);
                                } else {
                                    inputBuffer = inputBuffers[inputBufferIndex];
                                }
                                int sampleData = extractor.readSampleData(inputBuffer, 0);
                                if (sampleData > 0) {
                                    long sampleTime = extractor.getSampleTime();
                                    codec.queueInputBuffer(inputBufferIndex, 0, sampleData, sampleTime, 0);
                                    //繼續(xù)
                                    if (interval == 0) {
                                        extractor.advance();
                                    } else {
                                        extractor.seekTo(count * interval * 1000, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
                                        count++;
//                                        extractor.advance();
                                    }
                                } else {
                                    //小于0,說(shuō)明讀完了
                                    codec.queueInputBuffer(inputBufferIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                                    inputDone = true;
                                    Log.d(TAG, "end of stream");
                                }
                            }
                        }
                        if (!outputDone) {
                            //get data
                            int status = codec.dequeueOutputBuffer(bufferInfo, timeOut);
                            if (status ==
                                    MediaCodec.INFO_TRY_AGAIN_LATER) {
                                //繼續(xù)
                            } else if (status == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                                //開始進(jìn)行解碼
                            } else if (status == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                                //同樣啥都不做
                            } else {
                                //在這里判斷,當(dāng)前編碼器的狀態(tài)
                                if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                                    Log.d(TAG, "output EOS");
                                    outputDone = true;
                                }
                                boolean doRender = (bufferInfo.size != 0);
                                long presentationTimeUs = bufferInfo.presentationTimeUs;
                                if (lastPresentationTimeUs == 0) {
                                    lastPresentationTimeUs = presentationTimeUs;
                                } else {
                                    long diff = presentationTimeUs - lastPresentationTimeUs;
                                    if (interval != 0) {
                                        if (diff < interval * 1000) {
                                            doRender = false;
                                        } else {
                                            lastPresentationTimeUs = presentationTimeUs;
                                        }
                                        Log.d(TAG,
                                                "diff time in ms =" + diff / 1000);
                                    }
                                }
                                //有數(shù)據(jù)了.因?yàn)闀?huì)直接傳遞給Surface,所以說(shuō)明都不做好了
                                Log.d(TAG, "surface decoder given buffer " + status +
                                        " (size=" + bufferInfo.size + ")" + ",doRender = " + doRender + ", presentationTimeUs=" + presentationTimeUs);
                                //直接送顯就可以了
                                codec.releaseOutputBuffer(status, doRender);
                            }
                        }
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    if (codec != null) {
                        codec.stop();
                        codec.release();
                    }
                    if (extractor != null) {
                        extractor.release();
                    }
                }
            }

通過(guò)libyuv進(jìn)行數(shù)據(jù)的轉(zhuǎn)換

private static class MyOnImageAvailableListener implements ImageReader.OnImageAvailableListener {
        private final BitmapCallBack callBack;

        private MyOnImageAvailableListener(BitmapCallBack callBack) {
            this.callBack = callBack;
        }

        @Override
        public void onImageAvailable(ImageReader reader) {
            Log.i(TAG, "in OnImageAvailable");
            Image img = null;
            try {
                img = reader.acquireLatestImage();
                if (img != null) {
                    //這里得到的YUV的數(shù)據(jù)。需要將YUV的數(shù)據(jù)變成Bitmap
                    Image.Plane[] planes = img.getPlanes();
                    if (planes[0].getBuffer() == null) {
                        return;
                    }

//                    Bitmap bitmap = getBitmap(img);
                    Bitmap bitmap = getBitmapScale(img, 8);
//                    Bitmap bitmap = getBitmapFromNv21(img);
                    if (callBack != null && bitmap != null) {
                        Log.d(TAG, "onComplete bitmap ");
                        callBack.onComplete(bitmap);
                    }
                }
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                if (img != null) {
                    img.close();
                }
            }

        }
        
        @NonNull
        private Bitmap getBitmapScale(Image img, int scale) {
            int width = img.getWidth() / scale;
            int height = img.getHeight() / scale;
            final byte[] bytesImage = getDataFromYUV420Scale(img, scale);
            Bitmap bitmap = null;
            bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888);
            bitmap.copyPixelsFromBuffer(ByteBuffer.wrap(bytesImage));
            return bitmap;
        }

        private byte[] getDataFromYUV420Scale(Image image, int scale) {
            int width = image.getWidth();
            int height = image.getHeight();
            // Read image data
            Image.Plane[] planes = image.getPlanes();

            byte[] argb = new byte[width / scale * height / scale * 4];

            //值得注意的是在Java層傳入byte[]以RGBA順序排列時(shí),libyuv是用ABGR來(lái)表示這個(gè)排列
            //libyuv表示的排列順序和Bitmap的RGBA表示的順序是反向的。
            // 所以實(shí)際要調(diào)用libyuv::ABGRToI420才能得到正確的結(jié)果。
            YuvUtils.yuvI420ToABGRWithScale(
                    argb,
                    planes[0].getBuffer(), planes[0].getRowStride(),
                    planes[1].getBuffer(), planes[1].getRowStride(),
                    planes[2].getBuffer(), planes[2].getRowStride(),
                    width, height,
                    scale
            );
            return argb;
        }
}

libyuv


extern "C"
JNIEXPORT void JNICALL
Java_com_example_yuv_YuvUtils_yuvI420ToABGRWithScale(JNIEnv *env, jclass type, jbyteArray argb_,
                                                     jobject y_buffer, jint y_rowStride,
                                                     jobject u_buffer, jint u_rowStride,
                                                     jobject v_buffer, jint v_rowStride,
                                                     jint width, jint height,
                                                     jint scale) {
    jbyte *argb = env->GetByteArrayElements(argb_, NULL);

    uint8_t *srcYPtr = reinterpret_cast<uint8_t *>(env->GetDirectBufferAddress(y_buffer));
    uint8_t *srcUPtr = reinterpret_cast<uint8_t *>(env->GetDirectBufferAddress(u_buffer));
    uint8_t *srcVPtr = reinterpret_cast<uint8_t *>(env->GetDirectBufferAddress(v_buffer));

    int scaleW = width / scale;
    int scaleH = height / scale;
    int scaleSize = scaleW * scaleH;
    jbyte *temp_y_scale = new jbyte[scaleSize * 3 / 2];
    jbyte *temp_u_scale = temp_y_scale + scaleSize;
    jbyte *temp_v_scale = temp_y_scale + scaleSize + scaleSize / 4;

    libyuv::I420Scale(
            srcYPtr, y_rowStride,
            srcUPtr, u_rowStride,
            srcVPtr, v_rowStride,
            width, height,
            (uint8_t *) temp_y_scale, scaleW,
            (uint8_t *) temp_u_scale, scaleW >> 1,
            (uint8_t *) temp_v_scale, scaleW >> 1,
            scaleW, scaleH,
            libyuv::kFilterNone
    );

    width = scaleW;
    height = scaleH;
    jbyte *temp_y = new jbyte[width * height * 3 / 2];
    jbyte *temp_u = temp_y + width * height;
    jbyte *temp_v = temp_y + width * height + width * height / 4;

    libyuv::I420Rotate(
            (uint8_t *) temp_y_scale, scaleW,
            (uint8_t *) temp_u_scale, scaleW >> 1,
            (uint8_t *) temp_v_scale, scaleW >> 1,
//
            (uint8_t *) temp_y, height,
            (uint8_t *) temp_u, height >> 1,
            (uint8_t *) temp_v, height >> 1,

            width, height,
            libyuv::kRotate90
    );

    libyuv::I420ToABGR(
            (uint8_t *) temp_y, height,
            (uint8_t *) temp_u, height >> 1,
            (uint8_t *) temp_v, height >> 1,

            (uint8_t *) argb, height * 4,
            height, width
    );



    env->ReleaseByteArrayElements(argb_, argb, 0);
}

后續(xù)

將文件通過(guò)MediaCodec解碼。 輸出到ImageReader當(dāng)中。來(lái)獲取截圖。

使用MediaMetadataRetriever的方式,因?yàn)闊o(wú)法配置輸出的圖片的大小。

但當(dāng)我們只需要生成小圖預(yù)覽的時(shí)候, 如果我們實(shí)現(xiàn)做了縮放的處理。就能得到很快的速度。

不足

需要對(duì)原來(lái)MediaMetadataRetriever的原理探究

最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請(qǐng)聯(lián)系作者
【社區(qū)內(nèi)容提示】社區(qū)部分內(nèi)容疑似由AI輔助生成,瀏覽時(shí)請(qǐng)結(jié)合常識(shí)與多方信息審慎甄別。
平臺(tái)聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點(diǎn),簡(jiǎn)書系信息發(fā)布平臺(tái),僅提供信息存儲(chǔ)服務(wù)。

相關(guān)閱讀更多精彩內(nèi)容

友情鏈接更多精彩內(nèi)容