5 FFmpeg4Android:視頻播放
視頻文件的播放過程,就是將視頻中的壓縮數(shù)據(jù)解碼成一幀幀的RGB數(shù)據(jù),繪制到Native或Android的控件上。因此,視頻的播放可方式分為兩種:
1)jni層播放:即將視頻文件通過ffmpeg解碼,同時(shí)將java層的顯示控件surface傳入到j(luò)ni層, 在jni層進(jìn)行渲染;
2)java層播放:即將視頻文件通過ffmpeg解碼,然后將解碼一幀一幀數(shù)據(jù)返回到j(luò)ava層,并轉(zhuǎn)化成Bitmap顯示到相應(yīng)控件中(如:ImageView、SurfaceView等)。
可以看出方式1)效率會(huì)更高,只會(huì)顯示的話用1)方法即可;但是有些時(shí)候出于某種原因需要對顯示的Bitmap圖片在java層進(jìn)行處理,就需要用到方式2)。
5.1 視頻的jni層播放
視頻的jni層播放,是在jni中的通過ANativeWindow進(jìn)行渲染顯示。
先看C端的代碼(ffmpeg_video_player.c文件):
#include <stdlib.h>
#include <stdio.h>
#include <unistd.h>
#include <android/log.h>
#include <android/native_window_jni.h>
#include <android/native_window.h>
#include "libyuv.h"
// 封裝格式
#include "libavformat/avformat.h"
// 解碼
#include "libavcodec/avcodec.h"
// 縮放
#include "libswscale/swscale.h"
#include "queue.h"
#define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO, "ffmpeg", FORMAT, ##__VA_ARGS__);
#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR, "ffmpeg", FORMAT, ##__VA_ARGS__);
static int s_mwidth;
static int s_mheight;
JNIEXPORT void JNICALL Java_com_lzp_decoder_Player_render
(JNIEnv *env, jobject jobj, jstring input_jstr, jobject surface)
{
const char* input_cstr = (*env)->GetStringUTFChars(env, input_jstr, NULL);
// 1.注冊組件
av_register_all();
// 封裝格式上下文
AVFormatContext *pFormatCtx = avformat_alloc_context();
// 2.打開輸入視頻文件
if (avformat_open_input(&pFormatCtx, input_cstr, NULL, NULL) != 0) {
LOGE("%s", "打開輸入視頻文件失敗");
return;
}
// 3.獲取視頻信息
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("%s", "獲取視頻信息失敗");
return;
}
// 視頻解碼,需要找到視頻對應(yīng)的AVStream所在pFormatCtx->streams的索引位置
int video_stream_idx = -1;
int i = 0;
for (; i < pFormatCtx->nb_streams; i++) {
// 根據(jù)類型判斷,是否是視頻流
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_idx = i;
break;
}
}
// 4.獲取視頻解碼器
AVCodecContext *pCodeCtx = pFormatCtx->streams[video_stream_idx]->codec;
AVCodec *pCodec = avcodec_find_decoder(pCodeCtx->codec_id);
if (pCodec == NULL) {
LOGE("%s", "無法解碼");
return;
}
// 5.打開解碼器
if (avcodec_open2(pCodeCtx, pCodec, NULL) < 0) {
LOGE("%s", "解碼器無法打開");
return;
}
// 編碼數(shù)據(jù)
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
// 像素?cái)?shù)據(jù)(解碼數(shù)據(jù))
AVFrame *yuv_frame = av_frame_alloc();
AVFrame *rgb_frame = av_frame_alloc();
// native繪制
// 窗體
ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
// 繪制時(shí)的緩沖區(qū)
ANativeWindow_Buffer outBuffer;
int len, got_frame, framecount = 0;
// 6.一陣一陣讀取壓縮的視頻數(shù)據(jù)AVPacket
while (av_read_frame(pFormatCtx, packet) >= 0) {
// 解碼AVPacket->AVFrame
len = avcodec_decode_video2(pCodeCtx, yuv_frame, &got_frame, packet);
// Zero if no frame could be decompressed
// 非零,正在解碼
if (got_frame) {
LOGI("解碼%d幀", framecount++);
// lock
// 設(shè)置緩沖區(qū)的屬性(寬、高、像素格式)
ANativeWindow_setBuffersGeometry(nativeWindow, pCodeCtx->width, pCodeCtx->height, WINDOW_FORMAT_RGBA_8888);
ANativeWindow_lock(nativeWindow, &outBuffer, NULL);
// 設(shè)置rgb_frame的屬性(像素格式、寬高)和緩沖區(qū)
// rgb_frame緩沖區(qū)與outBuffer.bits是同一塊內(nèi)存
avpicture_fill((AVPicture *)rgb_frame, outBuffer.bits, AV_PIX_FMT_RGBA, pCodeCtx->width, pCodeCtx->height);
// YUV->RGBA_8888
I420ToARGB(
yuv_frame->data[0], yuv_frame->linesize[0],
yuv_frame->data[2], yuv_frame->linesize[2],
yuv_frame->data[1], yuv_frame->linesize[1],
rgb_frame->data[0], rgb_frame->linesize[0],
pCodeCtx->width, pCodeCtx->height);
// unlock
ANativeWindow_unlockAndPost(nativeWindow);
usleep(1000 * 16);
}
av_free_packet(packet);
}
ANativeWindow_release(nativeWindow);
av_frame_free(&yuv_frame);
avcodec_close(pCodeCtx);
avformat_free_context(pFormatCtx);
(*env)->ReleaseStringUTFChars(env, input_jstr, input_cstr);
}
可以看到函數(shù)Java_com_lzp_decoder_Player_render
(JNIEnv *env, jobject jobj, jstring input_jstr, jobject surface)中的第3個(gè)參數(shù)傳入的是jobject surface,對應(yīng)java層中的Surface。
將繪制窗體ANativeWindow與繪制緩沖區(qū)ANativeWindow_Buffer相關(guān)聯(lián),通過操作緩沖區(qū)的方式來達(dá)到渲染的效果。
JAVA端代碼如下:
package com.lzp.decoder;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.util.Log;
import android.view.Surface;
/**
* 視頻播放的控制器
*/
public class Player {
// 解碼視頻
public native void render(String input, Surface surface);
static {
System.loadLibrary("avutil-54");
System.loadLibrary("swresample-1");
System.loadLibrary("avcodec-56");
System.loadLibrary("avformat-56");
System.loadLibrary("swscale-3");
System.loadLibrary("postproc-53");
System.loadLibrary("avfilter-5");
System.loadLibrary("avdevice-56");
System.loadLibrary("myffmpeg");
}
}
5.2 視頻的java層播放
視頻的java層播放,是將jni層解碼后的rgb數(shù)據(jù)返回,然后在java層中轉(zhuǎn)化成Bitmap進(jìn)行顯示。
C端的代碼(ffmpeg_videofile_decoder.c文件):
//
// Created by lizhiping03 on 2018/2/1.
//
#include "com_lzp_decoder_VideoDecoder.h"
#include <stdlib.h>
#include <stdio.h>
#include <unistd.h>
#include <android/log.h>
#include <android/native_window_jni.h>
#include <android/native_window.h>
#include "libyuv.h"
// 封裝格式
#include "libavformat/avformat.h"
// 解碼
#include "libavcodec/avcodec.h"
// 縮放
#include "libswscale/swscale.h"
#include "queue.h"
#define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO, "ffmpeg", FORMAT, ##__VA_ARGS__);
#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR, "ffmpeg", FORMAT, ##__VA_ARGS__);
static int s_mwidth;
static int s_mheight;
int cnt = 0;
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
/*
* Class: com_lzp_decoder_VideoDecoder
* Method: init
* Signature: (Ljava/lang/String;)V
*/
JNIEXPORT void JNICALL Java_com_lzp_decoder_VideoDecoder_init
(JNIEnv *env, jobject jobj, jstring input_jstr)
{
cnt = 0;
const char* input_cstr = (*env)->GetStringUTFChars(env, input_jstr, NULL);
// 1.注冊組件
av_register_all();
// 封裝格式上下文
pFormatCtx = avformat_alloc_context();
// 2.打開輸入視頻文件
if (avformat_open_input(&pFormatCtx, input_cstr, NULL, NULL) != 0) {
LOGE("%s", "打開輸入視頻文件失敗");
return;
}
// 3.獲取視頻信息
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("%s", "獲取視頻信息失敗");
return;
}
// 視頻解碼,需要找到視頻對應(yīng)的AVStream所在pFormatCtx->streams的索引位置
int video_stream_idx = -1;
int i = 0;
for (; i < pFormatCtx->nb_streams; i++) {
// 根據(jù)類型判斷,是否是視頻流
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_idx = i;
break;
}
}
// 4.獲取視頻解碼器
pCodecCtx = pFormatCtx->streams[video_stream_idx]->codec;
AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
LOGE("%s", "無法解碼");
return;
}
// 5.打開解碼器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("%s", "解碼器無法打開");
return;
}
// 編碼數(shù)據(jù)
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
// 像素?cái)?shù)據(jù)(解碼數(shù)據(jù))
AVFrame *yuv_frame = av_frame_alloc();
int len, got_frame, framecount = 0;
create_queue();
s_mwidth = pCodecCtx->width;
s_mheight = pCodecCtx->height;
// 6.一陣一陣讀取壓縮的視頻數(shù)據(jù)AVPacket
while (av_read_frame(pFormatCtx, packet) >= 0) {
// 解碼AVPacket->AVFrame
len = avcodec_decode_video2(pCodecCtx, yuv_frame, &got_frame, packet);
// Zero if no frame could be decompressed
// 非零,正在解碼
if (got_frame) {
LOGI("解碼%d幀", framecount++);
queue_append_last(yuv_frame);
usleep(1000 * 16);
}
av_free_packet(packet);
}
av_frame_free(&yuv_frame);
(*env)->ReleaseStringUTFChars(env, input_jstr, input_cstr);
}
/*
* Class: com_lzp_decoder_VideoDecoder
* Method: readFrame
* Signature: ([BII)I
*/
JNIEXPORT jint JNICALL Java_com_lzp_decoder_VideoDecoder_readFrame
(JNIEnv *env, jobject jobj, jbyteArray rgbdata, const jint width, const jint height)
{
jbyte *rgb_data = (jbyte *)(*env)->GetByteArrayElements(env, rgbdata, 0);
AVFrame *yuv_frame = queue_get_first();
AVFrame *rgb_frame = NULL;
rgb_frame = avcodec_alloc_frame();
struct SwsContext *swsctx = NULL;
if (yuv_frame) {
LOGE("got frame: %d", cnt++);
queue_delete_first();
avpicture_fill((AVPicture *) rgb_frame, (uint8_t *)rgb_data, PIX_FMT_RGB565, width, height);
swsctx = sws_getContext(s_mwidth, s_mheight, pCodecCtx->pix_fmt, width, height, PIX_FMT_RGB565, SWS_BICUBIC, NULL, NULL, NULL);
sws_scale(swsctx, (const uint8_t * const *)yuv_frame->data, yuv_frame->linesize, 0, s_mheight, rgb_frame->data, rgb_frame->linesize);
//av_free(yuv_frame);
av_free(rgb_frame);
(*env)->ReleaseByteArrayElements(env, rgbdata, rgb_data, 0);
return 1;
}
av_free(yuv_frame);
av_free(rgb_frame);
(*env)->ReleaseByteArrayElements(env, rgbdata, rgb_data, 0);
return -1;
}
這里的init(JNIEnv *env, jobject jobj, jstring input_jstr)負(fù)責(zé)將視頻文件解碼,并將yuv數(shù)據(jù)放出隊(duì)列中,供讀幀函數(shù)readFrame取。每讀一次會(huì)從隊(duì)列頭取到一幀。
JAVA端代碼:
package com.lzp.decoder;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.nfc.Tag;
import android.nfc.TagLostException;
import android.os.Environment;
import android.os.Handler;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import com.lzp.decoder.utils.FileUtils;
import java.io.File;
import java.nio.ByteBuffer;
public class BitmapShowActivity extends Activity implements SurfaceHolder.Callback {
public final String TAG = BitmapShowActivity.class.getSimpleName();
private int width = 320;
private int height = 640;
private VideoDecoder mDecoder;
private boolean isRunning = false;
private byte[] rgbdata;
private ByteBuffer imagBuf;
private SurfaceView mSurfaceView;
private SurfaceHolder mSurfaceHolder;
private Rect mPreviewWindow = new Rect(0, 0, width, height);
private Bitmap mBitmap;
private Handler mHandler;
private String mFileUrl;
private Button mPlayerBtn;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_file);
mPlayerBtn = findViewById(R.id.player);
mSurfaceView = findViewById(R.id.sufaceView);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mDecoder = new VideoDecoder();
mHandler = new Handler();
mFileUrl = new File(Environment.getExternalStorageDirectory(), "sintel.mp4").getAbsolutePath();
rgbdata = new byte[width * height * 2];
imagBuf = ByteBuffer.wrap(rgbdata);
mBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
mPlayerBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if ("開始播放".equals(mPlayerBtn.getText().toString())) {
isRunning = true;
mPlayerBtn.setText("停止播放");
new DecoderThread().start();
// new RenderThread().start();
} else {
isRunning = false;
mPlayerBtn.setText("開始播放");
}
}
});
}
...
private class RenderThread extends Thread {
@Override
public void run() {
while (isRunning) {
try {
sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (mDecoder.readFrame(rgbdata, width, height) > 0) {
mHandler.post(mPreviewTask);
}
}
}
}
private class DecoderThread extends Thread {
@Override
public void run() {
mDecoder.init(mFileUrl);
}
}
final Runnable mPreviewTask = new Runnable() {
@Override
public void run() {
mBitmap.copyPixelsFromBuffer(imagBuf);
if (mBitmap != null) {
FileUtils.saveBitmap(BitmapShowActivity.this, mBitmap);
Canvas canvas = mSurfaceHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(mBitmap, null, mPreviewWindow, null);
}
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
imagBuf.clear();
}
};
}
2018/2/11更新-----------------------
上面的代碼在java層開了兩個(gè)線程分別做解碼與播放:線程1將解碼后的yuv數(shù)據(jù)放入隊(duì)列,線程2從隊(duì)列中取出數(shù)據(jù)實(shí)現(xiàn)播放。存在很大的問題,兩個(gè)線程都在java層開啟,沒有辦法做解碼與播放的同步,也沒有對隊(duì)列進(jìn)行上鎖,丟幀是必然的。因此,接下來用回調(diào)的方式來進(jìn)行視頻播放。
回調(diào),即從c/c++通過反射的方式調(diào)java層代碼,c/c++調(diào)java的方式,可參考:FFmpeg4Android:jni中c/c++調(diào)用java。
改進(jìn)后C端代碼(ffmpeg_videofile_decoder.c文件):
/*
* Class: com_lzp_decoder_VideoDecoder
* Method: init
* Signature: (Ljava/lang/String;)V
*/
JNIEXPORT void JNICALL Java_com_lzp_decoder_VideoDecoder_decodeVideo
(JNIEnv *env, jobject jobj, jstring input_jstr, const jint width, const jint height)
{
cnt = 0;
const char* input_cstr = (*env)->GetStringUTFChars(env, input_jstr, NULL);
// 1.注冊組件
av_register_all();
// 封裝格式上下文
pFormatCtx = avformat_alloc_context();
// 2.打開輸入視頻文件
if (avformat_open_input(&pFormatCtx, input_cstr, NULL, NULL) != 0) {
LOGE("%s", "打開輸入視頻文件失敗");
return;
}
// 3.獲取視頻信息
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("%s", "獲取視頻信息失敗");
return;
}
// 視頻解碼,需要找到視頻對應(yīng)的AVStream所在pFormatCtx->streams的索引位置
int video_stream_idx = -1;
int i = 0;
for (; i < pFormatCtx->nb_streams; i++) {
// 根據(jù)類型判斷,是否是視頻流
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_idx = i;
break;
}
}
// 4.獲取視頻解碼器
pCodecCtx = pFormatCtx->streams[video_stream_idx]->codec;
AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
LOGE("%s", "無法解碼");
return;
}
// 5.打開解碼器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("%s", "解碼器無法打開");
return;
}
// 編碼數(shù)據(jù)
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
// 像素?cái)?shù)據(jù)(解碼數(shù)據(jù))
AVFrame *yuv_frame = av_frame_alloc();
int len, got_frame, framecount = 0;
create_queue();
// jclass clazz = (*env)->FindClass(env, "com/lzp/decoder/VideoDecoder");
jclass clazz = (*env)->GetObjectClass(env, jobj);
jmethodID jmID = (*env)->GetMethodID(env, clazz, "frameCallback", "([BII)V");
struct SwsContext *swsctx = NULL;
// 6.一陣一陣讀取壓縮的視頻數(shù)據(jù)AVPacket
while (av_read_frame(pFormatCtx, packet) >= 0) {
// 解碼AVPacket->AVFrame
len = avcodec_decode_video2(pCodecCtx, yuv_frame, &got_frame, packet);
// Zero if no frame could be decompressed
// 非零,正在解碼
if (got_frame) {
LOGI("解碼%d幀", framecount++);
// queue_append_last(yuv_frame);
//jstring newStr = (*env)->NewStringUTF(env, "yuv_frame");
AVFrame *rgb_frame = av_frame_alloc();
jbyteArray rgbdata = (*env)->NewByteArray(env, 2 * width * height);
jbyte *rgb_data = (jbyte *)(*env)->GetByteArrayElements(env, rgbdata, NULL);
avpicture_fill((AVPicture *) rgb_frame, (uint8_t *)rgb_data, PIX_FMT_RGB565, width, height);
swsctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, width, height, PIX_FMT_RGB565, SWS_BICUBIC, NULL, NULL, NULL);
sws_scale(swsctx, (const uint8_t * const *)yuv_frame->data, yuv_frame->linesize, 0, pCodecCtx->height, rgb_frame->data, rgb_frame->linesize);
(*env)->ReleaseByteArrayElements(env, rgbdata, rgb_data, 0);
// 回調(diào)java中的frameCallback方法
(*env)->CallVoidMethod(env, jobj, jmID, rgbdata, width, height);
(*env)->DeleteLocalRef(env, rgbdata);
usleep(1000 * 16);
av_free(rgb_frame);
}
av_free_packet(packet);
}
av_frame_free(&yuv_frame);
(*env)->ReleaseStringUTFChars(env, input_jstr, input_cstr);
}
java端代碼:VideoDecoder.java
VideoDecoder變成了抽象類,并添加native decodeVideo()方法與frameCallback()抽象方法。調(diào)用decodeVideo方法后開始解碼視頻,每解碼一幀會(huì)回調(diào)frameCallback()一次。
\package com.lzp.decoder;
/**
* Created by lizhiping03 on 2018/2/1.
*/
public abstract class VideoDecoder {
/**
* 打開視頻文件并開始解碼
* @param file_url
*/
public native void init(String file_url);
/**
* 取一幀rgb數(shù)據(jù)
*/
public native int readFrame(byte[] rgbdata, int width, int height);
/**
* 打開視頻文件并開始解碼
* @param file_url
*/
public native void decodeVideo(String file_url, int width, int height);
public abstract void frameCallback(byte[] rgbdata, int width, int heigth);
static {
System.loadLibrary("myffmpeg");
}
}
BitmapShowActivity.java代碼:
package com.lzp.decoder;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.nfc.Tag;
import android.nfc.TagLostException;
import android.os.Environment;
import android.os.Handler;
import android.os.Bundle;
import android.os.Message;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import com.lzp.decoder.utils.FileUtils;
import java.io.File;
import java.nio.ByteBuffer;
public class BitmapShowActivity extends Activity implements SurfaceHolder.Callback {
public final String TAG = BitmapShowActivity.class.getSimpleName();
private int width = 320;
private int height = 640;
private VideoDecoder mDecoder;
private boolean isRunning = false;
private byte[] rgbdata;
private ByteBuffer imagBuf;
private ByteBuffer imagBuf2;
private SurfaceView mSurfaceView;
private SurfaceHolder mSurfaceHolder;
private Rect mPreviewWindow = new Rect(0, 0, width, height);
private Bitmap mBitmap;
private Handler mHandler;
private String mFileUrl;
private Button mPlayerBtn;
private Button mPlayerBtn2;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_file);
mPlayerBtn = findViewById(R.id.player);
mPlayerBtn2 = findViewById(R.id.player2);
mSurfaceView = findViewById(R.id.sufaceView);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
rgbdata = new byte[width * height * 2];
imagBuf = ByteBuffer.wrap(rgbdata);
imagBuf2 = ByteBuffer.wrap(rgbdata);
mBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what) {
case 1:
imagBuf2 = ByteBuffer.wrap((byte[])msg.obj);
Log.e(TAG, "len: " + ((byte[])msg.obj).length);
mBitmap.copyPixelsFromBuffer(imagBuf2);
if (mBitmap != null) {
Log.e(TAG, "mBitmap");
//FileUtils.saveBitmap(BitmapShowActivity.this, mBitmap);
Canvas canvas = mSurfaceHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(mBitmap, null, mPreviewWindow, null);
}
mSurfaceHolder.unlockCanvasAndPost(canvas);
}
imagBuf2.clear();
break;
default:
break;
}
}
};
mFileUrl = new File(Environment.getExternalStorageDirectory(), "sintel.mp4").getAbsolutePath();
...
mPlayerBtn2.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if ("回調(diào)播放".equals(mPlayerBtn2.getText().toString())) {
isRunning = true;
mPlayerBtn2.setText("停止播放");
new DecoderThread2().start();
} else {
isRunning = false;
mPlayerBtn2.setText("回調(diào)播放");
}
}
});
mDecoder = new VideoDecoder() {
@Override
public void frameCallback(byte[] rgb_data, int width, int heigth) {
String str = String.format("rgbata size: %d, width: %d, height: %d", rgb_data.length, width, heigth);
Log.e("BitmapShowActivity", str);
Message msg = mHandler.obtainMessage();
msg.what = 1;
msg.obj = rgb_data;
mHandler.sendMessage(msg);
}
};
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
mPreviewWindow = new Rect(0, 0, width, height);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
}
...
private class DecoderThread2 extends Thread {
@Override
public void run() {
mDecoder.decodeVideo(mFileUrl, width, height);
}
}
...
}
這時(shí)候會(huì)發(fā)現(xiàn)播放正常了,不會(huì)有丟幀的情況。
代碼+視頻文件下載:http://download.csdn.net/download/itismelzp/10249569