Android直接播放yuv数据
程序员文章站
2022-03-29 14:07:37
...
#include <jni.h>
#include <stdio.h>
#include <libavcodec/avcodec.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>
#include <android/log.h>
#include <android/native_window_jni.h>
#include <unistd.h>
#define TAG "recordvideo-lib" // 这个是自定义的LOG的标识
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,TAG ,__VA_ARGS__) // 定义LOGI类型
JNIEXPORT jint JNICALL
Java_com_uestc_smileteeth_view_recordvideo_RecordVideoLib_videoPlay(JNIEnv *env, jclass type,
jbyteArray data_, jint width,
jint height, jint pts,
jobject surface) {
jint *data = (*env)->GetByteArrayElements(env, data_, NULL);
uint8_t *picture_buf;
int y_size = width * height;
int picture_size;
//原视频格式;
enum AVPixelFormat srcFormat = AV_PIX_FMT_YUV420P;
//目标视频格式;
enum AVPixelFormat dstFormat = AV_PIX_FMT_RGBA;
int src_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(srcFormat));
//LOGI("YUV420P 格式每个像素占的bit:%d",src_bpp);
int dst_bpp = av_get_bits_per_pixel(av_pix_fmt_desc_get(dstFormat));
picture_size = avpicture_get_size(srcFormat, width, height);
picture_buf = (uint8_t *) av_malloc(picture_size);
//分配解码后的每一数据信息的结构体(指针)
AVFrame *frame = av_frame_alloc();
frame->format = srcFormat;
frame->width = width;
frame->height = height;
int ret = av_image_alloc(frame->data, frame->linesize, width, height,
srcFormat, 2);
if (ret < 0) {
LOGI("Could not allocate raw picture buffer!");
}
//分配最终显示出来的目标帧信息的结构体(指针)
AVFrame *outFrame = av_frame_alloc();
uint8_t *out_buffer = (uint8_t *) av_malloc(
(size_t) av_image_get_buffer_size(dstFormat, width, width,
1));
//更具指定的数据初始化/填充缓冲区
av_image_fill_arrays(outFrame->data, outFrame->linesize, out_buffer, dstFormat,
width, height, 1);
//初始化SwsContext
struct SwsContext *swsContext = sws_getContext(
width //原图片的宽
, height //源图高
, srcFormat //源图片format
, width //目标图的宽
, height //目标图的高
, dstFormat, SWS_BILINEAR, NULL, NULL, NULL
);
if (swsContext == NULL) {
LOGI("swsContext==NULL");
return -1;
}
//Android 原生绘制工具
ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);
//定义绘图缓冲区
ANativeWindow_Buffer outBuffer;
//通过设置宽高限制缓冲区中的像素数量,而非屏幕的物流显示尺寸。
//如果缓冲区与物理屏幕的显示尺寸不相符,则实际显示可能会是拉伸,或者被压缩的图像
ANativeWindow_setBuffersGeometry(nativeWindow, width, height,
WINDOW_FORMAT_RGBA_8888);
//数据;
picture_buf = data;
frame->data[0] = picture_buf; // Y
frame->data[1] = picture_buf + y_size; // U
frame->data[2] = picture_buf + y_size * 5 / 4; // V
//锁定窗口绘图界面
ANativeWindow_lock(nativeWindow, &outBuffer, NULL);
//对输出图像进行色彩,分辨率缩放,滤波处理(转换一帧图像)
sws_scale(swsContext, (const uint8_t *const *) frame->data,
frame->linesize, 0,
frame->height, outFrame->data, outFrame->linesize);
uint8_t *dst = (uint8_t *) outBuffer.bits;
//解码后的像素数据首地址
//这里由于使用的是RGBA格式,所以解码图像数据只保存在data[0]中。但如果是YUV就会有data[0]
//data[1],data[2]
uint8_t *src = outFrame->data[0];
//获取一行字节数
int oneLineByte = outBuffer.stride * 4;
//复制一行内存的实际数量
int srcStride = outFrame->linesize[0];
for (int i = 0; i < height; i++) {
memcpy(dst + i * oneLineByte, src + i * srcStride, srcStride);
}
//解锁
ANativeWindow_unlockAndPost(nativeWindow);
//进行短暂休眠。如果休眠时间太长会导致播放的每帧画面有延迟感,如果短会有加速播放的感觉。
//一般一每秒30帧
usleep(pts);
//内存释放
ANativeWindow_release(nativeWindow);
av_frame_free(&outFrame);
av_frame_free(&frame);
(*env)->
ReleaseByteArrayElements(env, data_, data,
0);
}