FFmpeg4Android:视频播放过程
5 FFmpeg4Android:视频播放
视频文件的播放过程,就是将视频中的压缩数据解码成一帧帧的RGB数据,绘制到Native或Android的控件上。因此,视频的播放可方式分为两种:
1)jni层播放,即将视频文件通过ffmpeg解码,同时将java层的显示控件surface传入到jni层, 在jni层进行渲染;
2)java层播放,即将视频文件通过ffmpeg解码,然后将解码一帧一帧数据返回到java层,并转化成Bitmap显示到相应控件中(如:ImageView、SurfaceView等)。
可以看出方式1)效率会更高,只会显示的话用1)方法即可;但是有些时候出于某种原因需要对显示的Bitmap图片进行处理,就需要用到方式2)。
5.1 视频的jni层播放
视频的jni层播放,是在jni中的通过ANativeWindow进行渲染显示。
先看C端的代码(ffmpeg_video_player.c文件):
#include #include #include #include #include #include #include "libyuv.h" // 封装格式 #include "libavformat/avformat.h" // 解码 #include "libavcodec/avcodec.h" // 缩放 #include "libswscale/swscale.h" #include "queue.h" #define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO, "ffmpeg", FORMAT, ##__VA_ARGS__); #define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR, "ffmpeg", FORMAT, ##__VA_ARGS__); static int s_mwidth; static int s_mheight; JNIEXPORT void JNICALL Java_com_lzp_decoder_Player_render (JNIEnv *env, jobject jobj, jstring input_jstr, jobject surface) { const char* input_cstr = (*env)->GetStringUTFChars(env, input_jstr, NULL); // 1.注册组件 av_register_all(); // 封装格式上下文 AVFormatContext *pFormatCtx = avformat_alloc_context(); // 2.打开输入视频文件 if (avformat_open_input(&pFormatCtx, input_cstr, NULL, NULL) != 0) { LOGE("%s", "打开输入视频文件失败"); return; } // 3.获取视频信息 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { LOGE("%s", "获取视频信息失败"); return; } // 视频解码,需要找到视频对应的AVStream所在pFormatCtx->streams的索引位置 int video_stream_idx = -1; int i = 0; for (; i < pFormatCtx->nb_streams; i++) { // 根据类型判断,是否是视频流 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { video_stream_idx = i; break; } } // 4.获取视频解码器 AVCodecContext *pCodeCtx = pFormatCtx->streams[video_stream_idx]->codec; AVCodec *pCodec = avcodec_find_decoder(pCodeCtx->codec_id); if (pCodec == NULL) { LOGE("%s", "无法解码"); return; } // 5.打开解码器 if (avcodec_open2(pCodeCtx, pCodec, NULL) < 0) { LOGE("%s", "解码器无法打开"); return; } // 编码数据 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket)); // 像素数据(解码数据) AVFrame *yuv_frame = av_frame_alloc(); AVFrame *rgb_frame = av_frame_alloc(); // native绘制 // 窗体 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface); // 绘制时的缓冲区 ANativeWindow_Buffer outBuffer; int len, got_frame, framecount = 0; // 6.一阵一阵读取压缩的视频数据AVPacket while (av_read_frame(pFormatCtx, packet) >= 0) { // 解码AVPacket->AVFrame len = avcodec_decode_video2(pCodeCtx, yuv_frame, &got_frame, packet); // Zero if no frame could be decompressed // 非零,正在解码 if (got_frame) { LOGI("解码%d帧", framecount++); // lock // 设置缓冲区的属性(宽、高、像素格式) ANativeWindow_setBuffersGeometry(nativeWindow, pCodeCtx->width, pCodeCtx->height, WINDOW_FORMAT_RGBA_8888); ANativeWindow_lock(nativeWindow, &outBuffer, NULL); // 设置rgb_frame的属性(像素格式、宽高)和缓冲区 // rgb_frame缓冲区与outBuffer.bits是同一块内存 avpicture_fill((AVPicture *)rgb_frame, outBuffer.bits, AV_PIX_FMT_RGBA, pCodeCtx->width, pCodeCtx->height); // YUV->RGBA_8888 I420ToARGB( yuv_frame->data[0], yuv_frame->linesize[0], yuv_frame->data[2], yuv_frame->linesize[2], yuv_frame->data[1], yuv_frame->linesize[1], rgb_frame->data[0], rgb_frame->linesize[0], pCodeCtx->width, pCodeCtx->height); // unlock ANativeWindow_unlockAndPost(nativeWindow); usleep(1000 * 16); } av_free_packet(packet); } ANativeWindow_release(nativeWindow); av_frame_free(&yuv_frame); avcodec_close(pCodeCtx); avformat_free_context(pFormatCtx); (*env)->ReleaseStringUTFChars(env, input_jstr, input_cstr); }
可以看到函数Java_com_lzp_decoder_Player_render
(JNIEnv *env, jobject jobj, jstring input_jstr, jobject surface)中的第3个参数传入的是jobject surface,对应java层中的Surface。
将绘制窗体ANativeWindow与绘制缓冲区ANativeWindow_Buffer相关联,通过操作缓冲区的方式来达到渲染的效果。
JAVA端代码如下:
package com.lzp.decoder; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioTrack; import android.util.Log; import android.view.Surface; /** * 视频播放的控制器 */ public class Player { // 解码视频 public native void render(String input, Surface surface); static { System.loadLibrary("avutil-54"); System.loadLibrary("swresample-1"); System.loadLibrary("avcodec-56"); System.loadLibrary("avformat-56"); System.loadLibrary("swscale-3"); System.loadLibrary("postproc-53"); System.loadLibrary("avfilter-5"); System.loadLibrary("avdevice-56"); System.loadLibrary("myffmpeg"); } }
5.2 视频的java层播放
视频的java层播放,是将jni层解码后的rgb数据返回,然后在java层中转化成Bitmap进行显示。
C端的代码(ffmpeg_videofile_decoder.c文件):
// // Created by lizhiping03 on 2018/2/1. // #include "com_lzp_decoder_VideoDecoder.h" #include #include #include #include #include #include #include "libyuv.h" // 封装格式 #include "libavformat/avformat.h" // 解码 #include "libavcodec/avcodec.h" // 缩放 #include "libswscale/swscale.h" #include "queue.h" #define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO, "ffmpeg", FORMAT, ##__VA_ARGS__); #define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR, "ffmpeg", FORMAT, ##__VA_ARGS__); static int s_mwidth; static int s_mheight; int cnt = 0; AVFormatContext *pFormatCtx; AVCodecContext *pCodecCtx; /* * Class: com_lzp_decoder_VideoDecoder * Method: init * Signature: (Ljava/lang/String;)V */ JNIEXPORT void JNICALL Java_com_lzp_decoder_VideoDecoder_init (JNIEnv *env, jobject jobj, jstring input_jstr) { cnt = 0; const char* input_cstr = (*env)->GetStringUTFChars(env, input_jstr, NULL); // 1.注册组件 av_register_all(); // 封装格式上下文 pFormatCtx = avformat_alloc_context(); // 2.打开输入视频文件 if (avformat_open_input(&pFormatCtx, input_cstr, NULL, NULL) != 0) { LOGE("%s", "打开输入视频文件失败"); return; } // 3.获取视频信息 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { LOGE("%s", "获取视频信息失败"); return; } // 视频解码,需要找到视频对应的AVStream所在pFormatCtx->streams的索引位置 int video_stream_idx = -1; int i = 0; for (; i < pFormatCtx->nb_streams; i++) { // 根据类型判断,是否是视频流 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { video_stream_idx = i; break; } } // 4.获取视频解码器 pCodecCtx = pFormatCtx->streams[video_stream_idx]->codec; AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL) { LOGE("%s", "无法解码"); return; } // 5.打开解码器 if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { LOGE("%s", "解码器无法打开"); return; } // 编码数据 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket)); // 像素数据(解码数据) AVFrame *yuv_frame = av_frame_alloc(); int len, got_frame, framecount = 0; create_queue(); s_mwidth = pCodecCtx->width; s_mheight = pCodecCtx->height; // 6.一阵一阵读取压缩的视频数据AVPacket while (av_read_frame(pFormatCtx, packet) >= 0) { // 解码AVPacket->AVFrame len = avcodec_decode_video2(pCodecCtx, yuv_frame, &got_frame, packet); // Zero if no frame could be decompressed // 非零,正在解码 if (got_frame) { LOGI("解码%d帧", framecount++); queue_append_last(yuv_frame); usleep(1000 * 16); } av_free_packet(packet); } av_frame_free(&yuv_frame); (*env)->ReleaseStringUTFChars(env, input_jstr, input_cstr); } /* * Class: com_lzp_decoder_VideoDecoder * Method: readFrame * Signature: ([BII)I */ JNIEXPORT jint JNICALL Java_com_lzp_decoder_VideoDecoder_readFrame (JNIEnv *env, jobject jobj, jbyteArray rgbdata, const jint width, const jint height) { jbyte *rgb_data = (jbyte *)(*env)->GetByteArrayElements(env, rgbdata, 0); AVFrame *yuv_frame = queue_get_first(); AVFrame *rgb_frame = NULL; rgb_frame = avcodec_alloc_frame(); struct SwsContext *swsctx = NULL; if (yuv_frame) { LOGE("got frame: %d", cnt++); queue_delete_first(); avpicture_fill((AVPicture *) rgb_frame, (uint8_t *)rgb_data, PIX_FMT_RGB565, width, height); swsctx = sws_getContext(s_mwidth, s_mheight, pCodecCtx->pix_fmt, width, height, PIX_FMT_RGB565, SWS_BICUBIC, NULL, NULL, NULL); sws_scale(swsctx, (const uint8_t * const *)yuv_frame->data, yuv_frame->linesize, 0, s_mheight, rgb_frame->data, rgb_frame->linesize); //av_free(yuv_frame); av_free(rgb_frame); (*env)->ReleaseByteArrayElements(env, rgbdata, rgb_data, 0); return 1; } av_free(yuv_frame); av_free(rgb_frame); (*env)->ReleaseByteArrayElements(env, rgbdata, rgb_data, 0); return -1; }
这里的init(JNIEnv *env, jobject jobj, jstring input_jstr)负责将视频文件解码,并将yuv数据放出队列中,供读帧函数readFrame取。每读一次会从队列头取到一帧。
JAVA端代码:
package com.lzp.decoder; import android.app.Activity; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Rect; import android.nfc.Tag; import android.nfc.TagLostException; import android.os.Environment; import android.os.Handler; import android.os.Bundle; import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.widget.Button; import android.widget.Toast; import com.lzp.decoder.utils.FileUtils; import java.io.File; import java.nio.ByteBuffer; public class BitmapShowActivity extends Activity implements SurfaceHolder.Callback { public final String TAG = BitmapShowActivity.class.getSimpleName(); private int width = 320; private int height = 640; private VideoDecoder mDecoder; private boolean isRunning = false; private byte[] rgbdata; private ByteBuffer imagBuf; private SurfaceView mSurfaceView; private SurfaceHolder mSurfaceHolder; private Rect mPreviewWindow = new Rect(0, 0, width, height); private Bitmap mBitmap; private Handler mHandler; private String mFileUrl; private Button mPlayerBtn; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_video_file); mPlayerBtn = findViewById(R.id.player); mSurfaceView = findViewById(R.id.sufaceView); mSurfaceHolder = mSurfaceView.getHolder(); mSurfaceHolder.addCallback(this); mDecoder = new VideoDecoder(); mHandler = new Handler(); mFileUrl = new File(Environment.getExternalStorageDirectory(), "sintel.mp4").getAbsolutePath(); rgbdata = new byte[width * height * 2]; imagBuf = ByteBuffer.wrap(rgbdata); mBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565); mPlayerBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if ("开始播放".equals(mPlayerBtn.getText().toString())) { isRunning = true; mPlayerBtn.setText("停止播放"); new DecoderThread().start(); // new RenderThread().start(); } else { isRunning = false; mPlayerBtn.setText("开始播放"); } } }); } ... private class RenderThread extends Thread { @Override public void run() { while (isRunning) { try { sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } if (mDecoder.readFrame(rgbdata, width, height) > 0) { mHandler.post(mPreviewTask); } } } } private class DecoderThread extends Thread { @Override public void run() { mDecoder.init(mFileUrl); } } final Runnable mPreviewTask = new Runnable() { @Override public void run() { mBitmap.copyPixelsFromBuffer(imagBuf); if (mBitmap != null) { FileUtils.saveBitmap(BitmapShowActivity.this, mBitmap); Canvas canvas = mSurfaceHolder.lockCanvas(); if (canvas != null) { canvas.drawBitmap(mBitmap, null, mPreviewWindow, null); } mSurfaceHolder.unlockCanvasAndPost(canvas); } imagBuf.clear(); } }; }