欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

ffmpeg拼接两张RGB和YUV图像数据

程序员文章站 2022-03-21 09:02:17
...

#define FRAMEWITH 640
#define FRAMEHEIGTH 360


#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include <stdio.h>
#include <stdlib.h>


int main(int argc, char* argv[])
{
	AVFormatContext *pFormatCtx;
	int             i, videoindex;
	AVCodecContext  *pCodecCtx;
	AVCodec         *pCodec;
	AVFrame *pFrame, *pDstFrame, *pFrameYUV;
	uint8_t *out_buffer;
	AVPacket *packet;
	int y_size;
	int ret, got_picture;
	struct SwsContext *img_convert_ctx;
	struct SwsContext *rgb_convert_ctx;

	//输入文件路径
	char filepath[] = "my_video.mp4";

	int frame_cnt;
	av_register_all();
	pFormatCtx = avformat_alloc_context();

	if (avformat_open_input(&pFormatCtx, filepath, NULL, NULL) != 0) {
		printf("Couldn't open input stream.\n");
		return -1;
	}
	if (avformat_find_stream_info(pFormatCtx, NULL)<0) {
		printf("Couldn't find stream information.\n");
		return -1;
	}
	videoindex = -1;
	for (i = 0; i<pFormatCtx->nb_streams; i++)
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
			videoindex = i;
			break;
		}
	if (videoindex == -1) {
		printf("Didn't find a video stream.\n");
		return -1;
	}

	pCodecCtx = pFormatCtx->streams[videoindex]->codec;
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if (pCodec == NULL) {
		printf("Codec not found.\n");
		return -1;
	}
	if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
		printf("Could not open codec.\n");
		return -1;
	}
	/*
	* 在此处添加输出视频信息的代码
	* 取自于pFormatCtx,使用fprintf()
	*/
	pFrame = av_frame_alloc();
	pFrameYUV = av_frame_alloc();
	out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, FRAMEWITH, FRAMEHEIGTH));
	avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, FRAMEWITH, FRAMEHEIGTH);

	AVFrame *pFrameRGB = av_frame_alloc();
	uint8_t *out_buffer_rgb = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_RGB24, FRAMEWITH, FRAMEHEIGTH));
	avpicture_fill((AVPicture *)pFrameRGB, out_buffer_rgb, AV_PIX_FMT_RGB24, FRAMEWITH, FRAMEHEIGTH);

	pDstFrame = av_frame_alloc();
	int nDstSize = avpicture_get_size(AV_PIX_FMT_YUV420P, FRAMEWITH * 2, FRAMEHEIGTH);
	uint8_t *dstbuf = (uint8_t*)malloc(nDstSize);
	avpicture_fill((AVPicture*)pDstFrame, dstbuf, AV_PIX_FMT_YUV420P, FRAMEWITH * 2, FRAMEHEIGTH);

	AVFrame *pExtendFrame = av_frame_alloc();
	int nExtendSize = avpicture_get_size(AV_PIX_FMT_RGB24, FRAMEWITH * 2, FRAMEHEIGTH);
	uint8_t *Extendbuf = (uint8_t*)malloc(nExtendSize);
	avpicture_fill((AVPicture*)pExtendFrame, Extendbuf, AV_PIX_FMT_RGB24, FRAMEWITH * 2, FRAMEHEIGTH);

	pExtendFrame->width = FRAMEWITH * 2;
	pExtendFrame->height = FRAMEHEIGTH;
	pExtendFrame->format = AV_PIX_FMT_RGB24;

	pDstFrame->width = FRAMEWITH * 2;
	pDstFrame->height = FRAMEHEIGTH;
	pDstFrame->format = AV_PIX_FMT_YUV420P;

	packet = (AVPacket *)av_malloc(sizeof(AVPacket));
	//Output Info-----------------------------
	printf("--------------- File Information ----------------\n");
	av_dump_format(pFormatCtx, 0, filepath, 0);
	printf("-------------------------------------------------\n");
	img_convert_ctx = sws_getContext(FRAMEWITH*2, FRAMEHEIGTH, AV_PIX_FMT_RGB24,
		FRAMEWITH*2, FRAMEHEIGTH, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
	rgb_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
		FRAMEWITH, FRAMEHEIGTH, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
	int count = 0;
	frame_cnt = 0;

	FILE *fp_yuv420 = fopen("test.yuv", "wb+");
	while (av_read_frame(pFormatCtx, packet) >= 0) {
		if (packet->stream_index == videoindex) {
			ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
			if (ret < 0) {
				printf("Decode Error.\n");
				return -1;
			}
			if (got_picture) {
				sws_scale(rgb_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
					pFrameRGB->data, pFrameRGB->linesize);
				for (int i = 0; i < FRAMEHEIGTH; i++) {
					memcpy(pExtendFrame->data[0] + i * FRAMEWITH * 2*3, pFrameRGB->data[0] + i * FRAMEWITH*3, FRAMEWITH*3);
					memcpy(pExtendFrame->data[0] + i * FRAMEWITH * 2*3 + FRAMEWITH*3, pFrameRGB->data[0] + i * FRAMEWITH*3, FRAMEWITH*3);
				}
				sws_scale(img_convert_ctx, (const uint8_t* const*)pExtendFrame->data, pExtendFrame->linesize, 0, FRAMEHEIGTH,
					pDstFrame->data, pDstFrame->linesize);
				printf("Decoded frame index: %d\n", frame_cnt);
				/*YUV数据的拼接,原始数据为FRAMEWITH *FRAMEHEIGTH,拼接得到2*FRAMEWITH *FRAMEHEIGTH的YUV图像
				if (pFrameYUV)
				{
					int nYIndex = 0;
					int nUVIndex = 0;

					for (int i = 0; i < FRAMEHEIGTH; i++)
					{
						//Y  
						memcpy(pDstFrame->data[0] + i * FRAMEWITH * 2, pFrameYUV->data[0] + nYIndex * FRAMEWITH, FRAMEWITH);
						memcpy(pDstFrame->data[0] + FRAMEWITH + i * FRAMEWITH * 2, pFrameYUV->data[0] + nYIndex * FRAMEWITH, FRAMEWITH);

						nYIndex++;
					}

					for (int i = 0; i < FRAMEHEIGTH / 4; i++)
					{
						//U
						memcpy(pDstFrame->data[1] + i * FRAMEWITH * 2, pFrameYUV->data[1] + nUVIndex * FRAMEWITH, FRAMEWITH);
						memcpy(pDstFrame->data[1] + FRAMEWITH + i * FRAMEWITH * 2, pFrameYUV->data[1] + nUVIndex * FRAMEWITH, FRAMEWITH);

						//V  
						memcpy(pDstFrame->data[2] + i * FRAMEWITH * 2, pFrameYUV->data[2] + nUVIndex * FRAMEWITH, FRAMEWITH);
						memcpy(pDstFrame->data[2] + FRAMEWITH + i * FRAMEWITH * 2, pFrameYUV->data[2] + nUVIndex * FRAMEWITH, FRAMEWITH);

						nUVIndex++;
					}
				}*/
				fwrite(pDstFrame->data[0], 1, FRAMEWITH*FRAMEHEIGTH * 2, fp_yuv420);
				fwrite(pDstFrame->data[1], 1, FRAMEWITH*FRAMEHEIGTH / 2, fp_yuv420);
				fwrite(pDstFrame->data[2], 1, FRAMEWITH*FRAMEHEIGTH / 2, fp_yuv420);
				frame_cnt++;
			}
		}
		count++;
		av_free_packet(packet);
	}

	fclose(fp_yuv420);
	sws_freeContext(img_convert_ctx);

	av_frame_free(&pFrameYUV);
	av_frame_free(&pFrame);
	av_frame_free(&pExtendFrame);
	av_frame_free(&pDstFrame);
	avcodec_close(pCodecCtx);
	avformat_close_input(&pFormatCtx);

	return 0;
}

 

原始图像:

ffmpeg拼接两张RGB和YUV图像数据

拼接后图像:

ffmpeg拼接两张RGB和YUV图像数据

相关标签: ffmpeg