欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

ffmpeg 读取摄像头并通过rtmp推流同时在本地显示(新版ffmpeg接口+SDL2)

程序员文章站 2022-07-01 08:26:29
...

结合雷博的例子,将ffmpeg换成新版的接口。

看的其他博主的博客,终于勉强能用了。

遇到的坑:推流在本地通过SDL播放正常,推流收到的图片部分是花的,用ffmpeg命令却是正常的

ffmpeg -f dshow -i video="USB2.0 PC CAMERA" -vcodec libx264 -acodec copy -preset:v ultrafast -tune:v zerolatency -f flv rtmp://localhost:1935/live/room

开始是去掉

av_dict_set(&param, "tune", "zerolatency", 0);

虽然解决了,但是这样延时很大

后来对比了下命令输出的信息,发现编码的方式不一样,雷博使用的是YUV420P  而命令采用的是YUV422P。

改正过后,确实解决了问题。

本人新手入门,肯定有写的不够好的地方,恳请各位指正,相互提高。

废话不说上代码

#include <stdio.h>

#define __STDC_CONSTANT_MACROS

#ifdef _WIN32
//Windows
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "libavutil/imgutils.h"
#include "libavutil/time.h"
#include "SDL.h"
};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
#include <SDL/SDL.h>
#ifdef __cplusplus
};
#endif
#endif

//Output YUV420P 
#define OUTPUT_YUV420P  0
//'1' Use Dshow 
//'0' Use VFW
#define USE_DSHOW 1

#define USE_SDL 0


//Refresh Event
#define SFM_REFRESH_EVENT  (SDL_USEREVENT + 1)

#define SFM_BREAK_EVENT  (SDL_USEREVENT + 2)

int thread_exit = 0;

int sfp_refresh_thread(void *opaque)
{
	thread_exit = 0;
	while (!thread_exit) {
		SDL_Event event;
		event.type = SFM_REFRESH_EVENT;
		SDL_PushEvent(&event);
		SDL_Delay(40);
	}
	thread_exit = 0;
	//Break
	SDL_Event event;
	event.type = SFM_BREAK_EVENT;
	SDL_PushEvent(&event);

	return 0;
}


//Show Dshow Device
void show_dshow_device() {
	AVFormatContext *ifmt_ctx = avformat_alloc_context();
	AVDictionary* options = NULL;
	av_dict_set(&options, "list_devices", "true", 0);
	AVInputFormat *iformat = av_find_input_format("dshow");
	printf("========Device Info=============\n");
	avformat_open_input(&ifmt_ctx, "video=dummy", iformat, &options);
	printf("================================\n");
}

//Show Dshow Device Option
void show_dshow_device_option() {
	AVFormatContext *ifmt_ctx = avformat_alloc_context();
	AVDictionary* options = NULL;
	av_dict_set(&options, "list_options", "true", 0);
	AVInputFormat *iformat = av_find_input_format("dshow");
	printf("========Device Option Info======\n");
	avformat_open_input(&ifmt_ctx, "video=USB2.0 PC CAMERA", iformat, &options);
	printf("================================\n");
}

//Show VFW Device
void show_vfw_device() {
	AVFormatContext *ifmt_ctx = avformat_alloc_context();
	AVInputFormat *iformat = av_find_input_format("vfwcap");
	printf("========VFW Device Info======\n");
	avformat_open_input(&ifmt_ctx, "list", iformat, NULL);
	printf("=============================\n");
}

//Show AVFoundation Device
void show_avfoundation_device() {
	AVFormatContext *ifmt_ctx = avformat_alloc_context();
	AVDictionary* options = NULL;
	av_dict_set(&options, "list_devices", "true", 0);
	AVInputFormat *iformat = av_find_input_format("avfoundation");
	printf("==AVFoundation Device Info===\n");
	avformat_open_input(&ifmt_ctx, "", iformat, &options);
	printf("=============================\n");
}


int main(int argc, char* argv[])
{
	AVOutputFormat *ofmt = NULL;
	AVFormatContext	*ifmt_ctx = NULL;
	AVFormatContext *ofmt_ctx = NULL;

	int				i, videoindex;
	AVCodecContext	*iCodecCtx;
	AVCodecContext	*oCodecCtx;
	AVStream		*video_st;
	AVCodec			*iCodec;
	AVCodec			*oCodec;
	//AVPacket		pkt;
	int				ret = -1;
	int				got_decpicture = -1;
	int				got_encpicture = -1;

	int frame_index = 0;
	const char  *out_filename;

	int64_t start_time = 0;

	//av_register_all();
	avformat_network_init();
	ifmt_ctx = avformat_alloc_context();

	//Open File
	//char filepath[]="src01_480x272_22.h265";
	//avformat_open_input(&ifmt_ctx,filepath,NULL,NULL)

	out_filename = "rtmp://localhost:1935/live/room";
	//Register Device
	avdevice_register_all();
	//Network
	avformat_network_init();
	//Windows
#ifdef _WIN32

	//Show Dshow Device
	show_dshow_device();
	//Show Device Options
	show_dshow_device_option();
	//Show VFW Options
	show_vfw_device();

	//输入部分
#if USE_DSHOW
	AVInputFormat *ifmt = av_find_input_format("dshow");
	//Set own video device's name
	if (avformat_open_input(&ifmt_ctx, "video=USB2.0 PC CAMERA", ifmt, NULL) != 0) {
		printf("Couldn't open input stream.(无法打开输入流)\n");
		return -1;
	}
#else
	AVInputFormat *ifmt = av_find_input_format("vfwcap");
	if (avformat_open_input(&ifmt_ctx, "0", ifmt, NULL) != 0) {
		printf("Couldn't open input stream.\n");
		return -1;
	}
#endif
#elif defined linux
	//Linux
	AVInputFormat *ifmt = av_find_input_format("video4linux2");
	if (avformat_open_input(&ifmt_ctx, "/dev/video0", ifmt, NULL) != 0) {
		printf("Couldn't open input stream.\n");
		return -1;
	}
#else
	show_avfoundation_device();
	//Mac
	AVInputFormat *ifmt = av_find_input_format("avfoundation");
	//Avfoundation
	//[video]:[audio]
	if (avformat_open_input(&ifmt_ctx, "0", ifmt, NULL) != 0) {
		printf("Couldn't open input stream.\n");
		return -1;
	}
#endif

	if (avformat_find_stream_info(ifmt_ctx, NULL)<0)
	{
		printf("Couldn't find stream information.(无法获取流信息)\n");
		return -1;
	}
	videoindex = -1;
	for (i = 0; i<ifmt_ctx->nb_streams; i++)
		if (ifmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			videoindex = i;
			break;
		}
	if (videoindex == -1)
	{
		printf("Couldn't find a video stream.(没有找到视频流)\n");
		return -1;
	}
	//pCodecCtx = ifmt_ctx->streams[videoindex]->codec;
	iCodecCtx = avcodec_alloc_context3(NULL);
	avcodec_parameters_to_context(iCodecCtx, ifmt_ctx->streams[videoindex]->codecpar);

	iCodec = avcodec_find_decoder(ifmt_ctx->streams[i]->codecpar->codec_id);
	if (iCodec == NULL)
	{
		printf("Codec not found.(无法找到解码器编码)\n");
		return -1;
	}
	if (avcodec_open2(iCodecCtx, iCodec, NULL)<0)
	{
		printf("Could not open codec.(无法打开解码器)\n");
		return -1;
	}
	//Dump Format------------------
	av_dump_format(ifmt_ctx, 0, "video=USB2.0 PC CAMERA",0);

	//输出


	avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename);
	if (!ofmt_ctx) {
		printf("Could not create output context(不能创建输出上下文)\n");
		return -1;
	}
	//ofmt = ofmt_ctx->oformat;

	oCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
	if (!oCodec) {
		printf("Can not find encoder! (没有找到合适的编码器!)\n");
		return -1;
	}

	oCodecCtx = avcodec_alloc_context3(oCodec);
	oCodecCtx->pix_fmt = AV_PIX_FMT_YUV422P;
	oCodecCtx->width = ifmt_ctx->streams[videoindex]->codecpar->width;
	oCodecCtx->height = ifmt_ctx->streams[videoindex]->codecpar->height;
	oCodecCtx->time_base.num = 1;
	oCodecCtx->time_base.den = 25;
	oCodecCtx->bit_rate = 400000;
	oCodecCtx->gop_size = 250;

	if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
		oCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
	//H264 codec param  
	//pCodecCtx->me_range = 16;  
	//pCodecCtx->max_qdiff = 4;  
	//pCodecCtx->qcompress = 0.6;  
	oCodecCtx->qmin = 10;
	oCodecCtx->qmax = 51;
	//Optional Param  
	oCodecCtx->max_b_frames = 3;
	// Set H264 preset and tune  
	
	AVDictionary *param = 0;
	av_dict_set(&param, "preset", "ultrafast", 0);
	//解码时花屏,加上有花屏,去掉有延时
	av_dict_set(&param, "tune", "zerolatency", 0);
	//av_dict_set(&param, "tune", "zerolatency", 0);
	if (avcodec_open2(oCodecCtx, oCodec, &param) < 0) {
		printf("Failed to open encoder! (编码器打开失败!)\n");
		return -1;
	}
	//Add a new stream to output,should be called by the user before avformat_write_header() for muxing  
	video_st = avformat_new_stream(ofmt_ctx, oCodec);
	if (video_st == NULL) {
		return -1;
	}
	video_st->time_base.num = 1;
	video_st->time_base.den = 25;
	//video_st->codec = oCodecCtx;
	avcodec_parameters_from_context(video_st->codecpar,oCodecCtx);
	//Open output URL,set before avformat_write_header() for muxing  
	if (avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_READ_WRITE) < 0) {
		printf("Failed to open output file! (输出文件打开失败!)\n");
		return -1;
	}

	//Dump Format------------------
	av_dump_format(ofmt_ctx, 0, out_filename, 1);

	//写文件头(Write file header)
	ret = avformat_write_header(ofmt_ctx, NULL);
	if (ret < 0) {
		printf("Error occurred when opening output URL\n");
		return -1;
	}

	

	//prepare before decode and encode
	AVPacket *dec_packet = (AVPacket *)av_malloc(sizeof(AVPacket));
	AVPacket enc_packet;
	//
	////AVPacket pkt;
	//int y_size = oCodecCtx->width * oCodecCtx->height;
	//av_new_packet(&enc_packet, y_size * 3);

	//转码上下文
	struct SwsContext *img_convert_ctx;
	img_convert_ctx = sws_getContext(iCodecCtx->width, iCodecCtx->height,
		iCodecCtx->pix_fmt, oCodecCtx->width, oCodecCtx->height, AV_PIX_FMT_YUV422P, SWS_BICUBIC, NULL, NULL, NULL);
	
	AVFrame	*pFrame, *pFrameYUV;
	pFrameYUV = av_frame_alloc();
	//设置帧的格式、宽度和高度,否则会出现
	//1.AVFrame.format is not set
	//2.AVFrame.width or height is not set
	pFrameYUV->format = oCodecCtx->pix_fmt;
	pFrameYUV->width = oCodecCtx->width;
	pFrameYUV->height = oCodecCtx->height;
	// 存储图像数据
	uint8_t *out_buffer;
	// av_image_get_buffer_size:返回使用给定参数存储图像所需的数据量的字节大小
	out_buffer = (uint8_t *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV422P, oCodecCtx->width, oCodecCtx->height, 1));
	// 根据指定的图像参数和提供的数组设置数据指针和线条(data pointers and linesizes)
	av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV422P, oCodecCtx->width, oCodecCtx->height, 1);
#if USE_SDL
	//SDL----------------------------
	if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
		printf("Could not initialize SDL - %s\n", SDL_GetError());
		return -1;
	}
	int screen_w = 0, screen_h = 0;
	SDL_Window *screen;
	screen_w = oCodecCtx->width;
	screen_h = oCodecCtx->height;
	//screen = SDL_SetVideoMode(screen_w, screen_h, 0, 0);
	//SDL 2.0 Support for multiple windows
	screen = SDL_CreateWindow("Simplest FFmpeg Device", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
		screen_w, screen_h, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
	if (!screen) {
		printf("SDL: could not set video mode - exiting:%s\n", SDL_GetError());
		return -1;
	}
	SDL_Texture *sdlTexture;
	//bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen);

	SDL_Renderer* sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
	Uint32 pixformat = 0;
	//IYUV: Y + U + V  (3 planes)
	//YV12: Y + V + U  (3 planes)
	pixformat = SDL_PIXELFORMAT_IYUV;
	sdlTexture = SDL_CreateTexture(sdlRenderer, pixformat, SDL_TEXTUREACCESS_STREAMING, oCodecCtx->width, oCodecCtx->height);
	SDL_Rect rect;
	rect.x = 0;
	rect.y = 0;
	rect.w = screen_w;
	rect.h = screen_h;
	//SDL End------------------------
	//------------------------------
	SDL_Thread *video_tid = SDL_CreateThread(sfp_refresh_thread, NULL, NULL);
	//
	//SDL_WM_SetCaption("Simplest FFmpeg Read Camera", NULL);
	//Event Loop
	SDL_Event event;
#endif // USE_SDL


	start_time = av_gettime();
	for (;;) 
	{
#if USE_SDL
		//Wait
		SDL_WaitEvent(&event);

		if (event.type == SFM_REFRESH_EVENT) {
#endif
			//------------------------------
			if (av_read_frame(ifmt_ctx, dec_packet) >= 0) 
			{
				if (dec_packet->stream_index == videoindex)
				{
					pFrame = av_frame_alloc();
					if (!pFrame) {
						printf("alloc pFrame Failed.\n");
						return -1;
					}
					//ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
					ret = avcodec_send_packet(iCodecCtx, dec_packet);
					got_decpicture = avcodec_receive_frame(iCodecCtx, pFrame);
					if (ret < 0) 
					{
						av_frame_free(&pFrame);
						printf("Decode Error.\n");
						return -1;
					}
					if (got_decpicture == 0)
					{
						
						//转码成YUV格式
						sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, iCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
						
						//初始化封装包
						enc_packet.data = NULL;
						enc_packet.size = 0;
						av_init_packet(&enc_packet);
						
						//编码
						//ret = avcodec_encode_video2(oCodecCtx, &enc_packet, pFrameYUV, &got_encpicture);
						ret = avcodec_send_frame(oCodecCtx, pFrameYUV);
						//FIX : non-strictly-monotonic PTS
						pFrameYUV->pts++;
						if (ret < 0) {
							av_frame_free(&pFrame);
							printf("Encode Error.\n");
							return -1;
						}
						got_encpicture = avcodec_receive_packet(oCodecCtx, &enc_packet);
						
						if (got_encpicture==0)
						{
							frame_index++;
							enc_packet.stream_index = video_st->index;
							//FIX:No PTS (Example: Raw H.264)
							//Simple Write PTS
							AVStream *in_stream, *out_stream;
							if (enc_packet.pts == AV_NOPTS_VALUE) 
							{
								//Write PTS
								AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
								//Duration between 2 frames (us)
								int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
								//Parameters
								enc_packet.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
								enc_packet.dts = enc_packet.pts;
								enc_packet.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
							}

							//Write PTS
							AVRational time_base = ofmt_ctx->streams[videoindex]->time_base;//{ 1, 1000 };
							AVRational r_framerate1 = ifmt_ctx->streams[videoindex]->r_frame_rate;// { 50, 2 };
							AVRational time_base_q = { 1, AV_TIME_BASE };
							//Duration between 2 frames (us)
							int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));	//内部时间戳
																										//Parameters
																										//enc_pkt.pts
							//enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
							enc_packet.pts = av_rescale_q(frame_index*calc_duration, time_base_q, time_base);
							enc_packet.dts = enc_packet.pts;
							enc_packet.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
							enc_packet.pos = -1;
							//Delay
							int64_t pts_time = av_rescale_q(enc_packet.dts, time_base, time_base_q);
							int64_t now_time = av_gettime() - start_time;
							if (pts_time > now_time)
								av_usleep(pts_time - now_time);

							//Print to Screen
							if (enc_packet.stream_index == videoindex) {
								printf("Send %8d video frames to output URL\n", frame_index);
								
							}
							//写到输出上下文
							ret = av_interleaved_write_frame(ofmt_ctx, &enc_packet);
							
						}
						av_packet_unref(&enc_packet);
#if USE_SDL
						//SDL---------------------------
						// 设置纹理数据
						SDL_UpdateTexture(sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0]);
						// 清理渲染器
						SDL_RenderClear(sdlRenderer);

						// 将纹理数据copy到渲染器
						//将sdlRect区域的图像显示到dstsdlRect区域
						//SDL_RenderCopy( sdlRenderer, sdlTexture, &sdlRect, &sdlRect );
						SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, NULL);
						// 显示
						SDL_RenderPresent(sdlRenderer);
						//SDL End-----------------------
#endif
					}
					av_frame_free(&pFrame);
				}
				av_packet_unref(dec_packet);
			}
#if USE_SDL
			else 
			{
				//Exit Thread
				thread_exit = 1;
			}
#endif
#if USE_SDL
		}
		else if (event.type == SDL_QUIT) {
			thread_exit = 1;
		}
		else if (event.type == SFM_BREAK_EVENT) {
			break;
		}
#endif
	}
#if USE_SDL
	SDL_Quit();
#endif
	//写文件尾(Write file trailer)
	av_write_trailer(ofmt_ctx);

	av_free(out_buffer);
end:
	if(video_st)
		avcodec_close(oCodecCtx);

	avformat_close_input(&ifmt_ctx);
	/* close output */
	if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
		avio_close(ofmt_ctx->pb);
	avformat_free_context(ifmt_ctx);
	avformat_free_context(ofmt_ctx);
	if (ret < 0 && ret != AVERROR_EOF) {
		printf("Error occurred.\n");
		return -1;
	}


	return 0;
}

文件退出的部分没有测试,担心会有内存泄漏。

相关标签: ffmpeg c++ sdl