Android Studio通过JNI(CMake方式)集成FFMpeg音视频处理框架
android studio通过jni(cmake方式)集成ffmpeg音视频处理框架。
本ffmpeg的多采用c/c++代码编写的,想要使用ffmpeg提供的库函数,需要将其打包成动态链接库文件。如linux的so库文件,windows上的dll库文件,android的底层是基于linux系统内核的,因此要在android平台上使用ffmpeg框架提供的库函数,需要将其打包成so库文件。而在linux系统编译打包,需要在ffmpeg框架源码的目录下建立linux的批文件shell脚本文件,即可在linux系统运行命令的脚本文件(windows系统为bat文件)。本篇博文介绍如何用jni的方式集成ffmpeg,如果你还了解如何用cmake集成ffmpeg,可以查看android cmake集成jni开发环境和 android jni实现java与c/c++互相调用,以及so库的生成和应用(jni方式调用美图秀秀so)。本篇文件按如下步骤进行介绍
windows下编译ffmpeg源码,得到so库文件 利用jni将so库集成到android项目中 通过native方法在c++文件中调用ffmpeg框架的库函数下面我们看看具体的实现步骤
编译ffmpeg,取得so库文件
windows环境下编译ffmpeg,需要下载如下工具和源码
下载安装mingw 下载yasm 下载ffmpeg下载好ffmpeg源码和mingw工具后,就可以开始编译ffmpeg了
首先需要对源代码中的configure文件进行修改,由于编译出来的动态库文件名的版本号在.so之后(例如 “libaodec.so.5.100.1”),而android平台不能识别这样的文件名,所以需要修改这种文件名。找到ffmpeg文件夹下面的configure文件,做如下修改
slibname_with_major='$(slibname).$(libmajor)' lib_install_extra_cmd='$$(ranlib) "$(libdir)/$(libname)"' slib_install_name='$(slibname_with_version)' slib_install_links='$(slibname_with_major) $(slibname)' 将其修改成: slibname_with_major='$(slibpref)$(fullname)-$(libmajor)$(slibsuf)' lib_install_extra_cmd='$$(ranlib) "$(libdir)/$(libname)"' slib_install_name='$(slibname_with_major)' slib_install_links='$(slibname)'
然后新建一个build_android.sh文件。注意:要根据环境配置前四项,且每行末尾不能有空格
#!/bin/bash export tmpdir="c:/users/jacket/desktop/ff" ndk=c:/users/jacket/appdata/local/android/sdk/ndk-bundle sysroot=$ndk/platforms/android-21/arch-arm/ toolchain=$ndk/toolchains/arm-linux-androideabi-4.9/prebuilt/windows-x86_64 function build_one { ./configure \ --prefix=$prefix \ --enable-shared \ --disable-static \ --disable-doc \ --disable-ffmpeg \ --disable-ffplay \ --disable-ffprobe \ --disable-ffserver \ --disable-avdevice \ --disable-doc \ --disable-symver \ --cross-prefix=$toolchain/bin/arm-linux-androideabi- \ --target-os=linux \ --arch=arm \ --enable-cross-compile \ --sysroot=$sysroot \ --extra-cflags="-os -fpic $addi_cflags" \ --extra-ldflags="$addi_ldflags" \ $additional_configure_flag make clean make make install } cpu=arm prefix=$(pwd)/android/$cpu addi_cflags="
启动mingw进入ffmpeg源代码对应目录下执行命令脚本
chmod ./build_android.sh
./build_android.sh然后就开始编译了
下面开始移植到android studio中
得到so库文件后,在libs目录下新建armeabi和include文件夹,将上面编译得到的android文件夹下对应的armeabi文件夹下的so库文件和include文件夹下的c头文件分别拷贝到libs下的armeabi和include文件夹下。如下图所示:
编写cmakelists.txt文件,在其中指明ffmpeg库文件目录
# for more information about using cmake with android studio, read the # documentation: https://d.android.com/studio/projects/add-native-code.html # sets the minimum version of cmake required to build the native library. cmake_minimum_required(version 3.4.1) # creates and names a library, sets it as either static # or shared, and provides the relative paths to its source code. # you can define multiple libraries, and cmake builds them for you. # gradle automatically packages shared libraries with your apk. add_library( # sets the name of the library. native-lib # sets the library as a shared library. shared # provides a relative path to your source file(s). src/main/cpp/native-lib.cpp ) # searches for a specified prebuilt library and stores the path as a # variable. because cmake includes system libraries in the search path by # default, you only need to specify the name of the public ndk library # you want to add. cmake verifies that the library exists before # completing its build. find_library( # sets the name of the path variable. log-lib # specifies the name of the ndk library that # you want cmake to locate. log ) # specifies libraries cmake should link to your target library. you # can link multiple libraries, such as libraries you define in this # build script, prebuilt third-party libraries, or system libraries. set(distribution_dir ../../../../libs) add_library( avcodec-57 shared imported) set_target_properties( avcodec-57 properties imported_location ${distribution_dir}/armeabi/libavcodec-57.so) add_library( avfilter-6 shared imported) set_target_properties( avfilter-6 properties imported_location ${distribution_dir}/armeabi/libavfilter-6.so) add_library( avformat-57 shared imported) set_target_properties( avformat-57 properties imported_location ${distribution_dir}/armeabi/libavformat-57.so) add_library( avutil-55 shared imported) set_target_properties( avutil-55 properties imported_location ${distribution_dir}/armeabi/libavutil-55.so) add_library( swresample-2 shared imported) set_target_properties( swresample-2 properties imported_location ${distribution_dir}/armeabi/libswresample-2.so) add_library( swscale-4 shared imported) set_target_properties( swscale-4 properties imported_location ${distribution_dir}/armeabi/libswscale-4.so) include_directories(libs/include) target_link_libraries( # specifies the target library. native-lib avcodec-57 avfilter-6 avformat-57 avutil-55 swresample-2 swscale-4 # links the target library to the log library # included in the ndk. ${log-lib} )
3.编写native方法,在c/c++文件中调用ffmpeg库提供的函数
#include #include #include extern "c" { //编码 #include "libavcodec/avcodec.h" //封装格式处理 #include "libavformat/avformat.h" //像素处理 #include "libswscale/swscale.h" //视频滤镜 #include "libavfilter/avfilter.h" } #define fflogi(format,...) __android_log_print(android_log_info,"ffmpeg",format,##__va_args__); #define ffloge(format,...) __android_log_print(android_log_error,"ffmpeg",format,##__va_args__); extern "c" jniexport jstring jnicall java_com_jacket_ffmpeg_mainactivity_stringfromjni( jnienv *env, jobject /* this */) { // std::string hello = "hello from c++"; // return env->newstringutf(hello.c_str()); // return env->newstringutf(av_version_info()); return env->newstringutf(avcodec_configuration()); } extern "c" jniexport void jnicall java_com_jacket_ffmpeg_mainactivity_decode(jnienv *env, jclass type, jstring input_, jstring output_) { //获取输入输出文件名 const char *input = env->getstringutfchars(input_, 0); const char *output = env->getstringutfchars(output_, 0); //1.注册所有组件 av_register_all(); //封装格式上下文,统领全局的结构体,保存了视频文件封装格式的相关信息 avformatcontext *pformatctx = avformat_alloc_context(); //2.打开输入视频文件 if (avformat_open_input(&pformatctx, input, null, null) != 0) { ffloge("%s","无法打开输入视频文件"); return; } //3.获取视频文件信息 if (avformat_find_stream_info(pformatctx,null) < 0) { ffloge("%s","无法获取视频文件信息"); return; } //获取视频流的索引位置 //遍历所有类型的流(音频流、视频流、字幕流),找到视频流 int v_stream_idx = -1; int i = 0; //number of streams for (; i < pformatctx->nb_streams; i++) { //流的类型 if (pformatctx->streams[i]->codec->codec_type == avmedia_type_video) { v_stream_idx = i; break; } } if (v_stream_idx == -1) { ffloge("%s","找不到视频流\n"); return; } //只有知道视频的编码方式,才能够根据编码方式去找到解码器 //获取视频流中的编解码上下文 avcodeccontext *pcodecctx = pformatctx->streams[v_stream_idx]->codec; //4.根据编解码上下文中的编码id查找对应的解码 avcodec *pcodec = avcodec_find_decoder(pcodecctx->codec_id); if (pcodec == null) { ffloge("%s","找不到解码器\n"); return; } //5.打开解码器 if (avcodec_open2(pcodecctx,pcodec,null)<0) { ffloge("%s","解码器无法打开\n"); return; } //输出视频信息 fflogi("视频的文件格式:%s",pformatctx->iformat->name); fflogi("视频时长:%d", (pformatctx->duration)/1000000); fflogi("视频的宽高:%d,%d",pcodecctx->width,pcodecctx->height); fflogi("解码器的名称:%s",pcodec->name); //准备读取 //avpacket用于存储一帧一帧的压缩数据(h264) //缓冲区,开辟空间 avpacket *packet = (avpacket*)av_malloc(sizeof(avpacket)); //avframe用于存储解码后的像素数据(yuv) //内存分配 avframe *pframe = av_frame_alloc(); //yuv420 avframe *pframeyuv = av_frame_alloc(); //只有指定了avframe的像素格式、画面大小才能真正分配内存 //缓冲区分配内存 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(av_pix_fmt_yuv420p, pcodecctx->width, pcodecctx->height)); //初始化缓冲区 avpicture_fill((avpicture *)pframeyuv, out_buffer, av_pix_fmt_yuv420p, pcodecctx->width, pcodecctx->height); //用于转码(缩放)的参数,转之前的宽高,转之后的宽高,格式等 struct swscontext *sws_ctx = sws_getcontext(pcodecctx->width,pcodecctx->height,pcodecctx->pix_fmt, pcodecctx->width, pcodecctx->height, av_pix_fmt_yuv420p, sws_bicubic, null, null, null); int got_picture, ret; file *fp_yuv = fopen(output, "wb+"); int frame_count = 0; //6.一帧一帧的读取压缩数据 while (av_read_frame(pformatctx, packet) >= 0) { //只要视频压缩数据(根据流的索引位置判断) if (packet->stream_index == v_stream_idx) { //7.解码一帧视频压缩数据,得到视频像素数据 ret = avcodec_decode_video2(pcodecctx, pframe, &got_picture, packet); if (ret < 0) { ffloge("%s","解码错误"); return; } //为0说明解码完成,非0正在解码 if (got_picture) { //avframe转为像素格式yuv420,宽高 //2 6输入、输出数据 //3 7输入、输出画面一行的数据的大小 avframe 转换是一行一行转换的 //4 输入数据第一列要转码的位置 从0开始 //5 输入画面的高度 sws_scale(sws_ctx, pframe->data, pframe->linesize, 0, pcodecctx->height, pframeyuv->data, pframeyuv->linesize); //输出到yuv文件 //avframe像素帧写入文件 //data解码后的图像像素数据(音频采样数据) //y 亮度 uv 色度(压缩了) 人对亮度更加敏感 //u v 个数是y的1/4 int y_size = pcodecctx->width * pcodecctx->height; fwrite(pframeyuv->data[0], 1, y_size, fp_yuv); fwrite(pframeyuv->data[1], 1, y_size / 4, fp_yuv); fwrite(pframeyuv->data[2], 1, y_size / 4, fp_yuv); frame_count++; fflogi("解码第%d帧",frame_count); } } //释放资源 av_free_packet(packet); } fclose(fp_yuv); av_frame_free(&pframe); avcodec_close(pcodecctx); avformat_free_context(pformatctx); env->releasestringutfchars(input_, input); env->releasestringutfchars(output_, output); } extern "c" jniexport jstring jnicall java_com_jacket_ffmpeg_mainactivity_avfilterinfo( jnienv * env,jobject){ char info[40000] = {0}; avfilter_register_all(); avfilter *f_temp = (avfilter *)avfilter_next(null); while (f_temp != null){ sprintf(info,"%s%s\n",info,f_temp->name); f_temp = f_temp->next; } return env->newstringutf(info); } extern "c" jniexport jstring jnicall java_com_jacket_ffmpeg_mainactivity_avcodecinfo( jnienv * env,jobject){ char info[40000] = {0}; av_register_all(); avcodec *c_temp = av_codec_next(null); while(c_temp != null){ if(c_temp->decode != null){ sprintf(info,"%sdecode:",info); } else{ sprintf(info,"%sencode",info); } switch(c_temp->type){ case avmedia_type_video: sprintf(info,"%s(video):",info); break; case avmedia_type_audio: sprintf(info,"%s(audio):",info); break; default: sprintf(info,"%s(other):",info); break; } sprintf(info,"%s[%10s]\n",info,c_temp->name); c_temp = c_temp->next; } return env->newstringutf(info); } extern "c" jniexport jstring jnicall java_com_jacket_ffmpeg_mainactivity_avformatinfo( jnienv * env,jobject){ char info[40000] = {0}; av_register_all(); avinputformat *if_temp = av_iformat_next(null); avoutputformat *of_temp = av_oformat_next(null); while (if_temp != null){ sprintf(info,"%sinput:%s\n",info,of_temp->name); if_temp = if_temp->next; } while (of_temp != null){ sprintf(info,"%soutput: %s\n",info,of_temp->name); of_temp = of_temp->next; } return env->newstringutf(info); } extern "c" jniexport jstring jnicall java_com_jacket_ffmpeg_mainactivity_urlprotocolinfo( jnienv * env,jobject){ char info[40000] = {0}; av_register_all(); struct urlprotocol *pup = null; struct urlprotocol **p_temp = &pup; avio_enum_protocols((void **)p_temp,0); while ((*p_temp) != null){ sprintf(info,"%sinput: %s\n",info,avio_enum_protocols((void **)p_temp,0)); } pup = null; avio_enum_protocols((void **)p_temp,1); while ((*p_temp) != null){ sprintf(info,"%sinput: %s\n",info,avio_enum_protocols((void **)p_temp,1)); } return env->newstringutf(info); }
可以看出,java_com_jacket_ffmepg_mainactivity_stringfromjni()根据宏定义判定了系统类型并且返回了一个字符串。在这里要注意,c语言中的char[]是不能直接对应为java中的string类型的(即jstring)。char[]转换为string需要通过jnienv的newstringutf()函数。为了调用ffmpeg而经过修改后的java_com_jacket_ffmpeg_mainactivity_stringfromjn的源代码如下:
java_com_jacket_ffmpeg_mainactivity_stringfromjni( jnienv *env, jobject /* this */) { return env->newstringutf(avcodec_configuration()); }
在native-lib.cpp文件中调用libavcodec的avcodec_configuration()方法,用于获取ffmpeg的配置信息。该程序会输出ffmpeg类库下列信息
protocol: ffmpeg类库支持的协议 avformat: ffmpeg类库支持的封装格式 avcodec: ffmpeg类库支持的编解码器 avfilter: ffmpeg类库支持的滤镜 configure: ffmpeg类库的配置信息最后在mainactivity中通过native方法调用c/c++函数
public class mainactivity extends appcompatactivity implements view.onclicklistener { // used to load the 'native-lib' library on application startup. static { system.loadlibrary("native-lib"); // system.loadlibrary("avcodec-57"); // system.loadlibrary("avfilter-6"); // system.loadlibrary("avformat-57"); // system.loadlibrary("avutil-55"); // system.loadlibrary("swresample-2"); // system.loadlibrary("swscale-4"); } private button protocol,format,codec,filter; private textview tv_info; @override protected void oncreate(bundle savedinstancestate) { super.oncreate(savedinstancestate); setcontentview(r.layout.activity_main); init(); } private void init() { protocol = (button) findviewbyid(r.id.btn_protocol); format = (button) findviewbyid(r.id.btn_format); codec = (button) findviewbyid(r.id.btn_codec); filter = (button) findviewbyid(r.id.btn_filter); tv_info = (textview) findviewbyid(r.id.tv_info); protocol.setonclicklistener(this); format.setonclicklistener(this); codec.setonclicklistener(this); filter.setonclicklistener(this); } @override public void onclick(view view) { switch (view.getid()) { case r.id.btn_protocol: tv_info.settext(urlprotocolinfo()); break; case r.id.btn_format: tv_info.settext(avformatinfo()); break; case r.id.btn_codec: tv_info.settext(avcodecinfo()); break; case r.id.btn_filter: tv_info.settext(avfilterinfo()); break; default: break; } } @override public boolean oncreateoptionsmenu(menu menu) { // inflate the menu; this adds items to the action bar if it is present. getmenuinflater().inflate(r.menu.menu_main, menu); return true; } @override public boolean onoptionsitemselected(menuitem item) { // handle action bar item clicks here. the action bar will // automatically handle clicks on the home/up button, so long // as you specify a parent activity in androidmanifest.xml. int id = item.getitemid(); //noinspection simplifiableifstatement if (id == r.id.action_settings) { return true; } return super.onoptionsitemselected(item); } /** * a native method that is implemented by the 'native-lib' native library, * which is packaged with this application. */ public native string stringfromjni(); public native void decode(string input, string output); public native string avfilterinfo(); public native string avcodecinfo(); public native string avformatinfo(); public native string urlprotocolinfo(); }
运行结果如下所示:
上一篇: Android 屏幕适配问题全分析