使用FFMPeg解码显示ts流
程序员文章站
2022-07-14 20:04:13
...
公司最近项目不是很忙,就将之前弄东西整理了一下,解码直播流使用FFMPeg,
之前做的一个项目是智能家居的控制系统,加监控,这个监控有多个摄像头,每一个都要显示出来,并将预览的画面在不同的位置显示出来,达到同步的效果,之前使用的VLC解码显示的,但是太多预览源的话,每一个都要解码一次,一个是耗费性能,另一个是每次拖拽到一个控制器上就预览一个解码一次,显然这种方式,不符合要求,
理想状态是,一个源直解码一次,同一个源在不同的地方显示多次的话,是直接拿数据渲染,
下面是是我的做成的效果
由于录屏软件的限制,不能操作,这三个画面显示的是解码一次,显示在不同的地方………就是我只解码了一路流将数据显示在多个不同的位置
这是我在别人的基础上进行线程优化的,借鉴了别人的代码,我将代码封装在一个类中,调用很简单 最后面有源代码文件下载连接
@implementation CQMovieView
{
AVFormatContext *FormatCtx;
AVCodecContext *codecCtx;
AVFrame *avframe;
AVStream *stream;
AVPacket packet;
AVPicture picture;
int videoStram;
double fps;
BOOL isReleaseResources;
dispatch_queue_t queue;
UIImageView *imageView;
}
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
self.frame = frame;
}
return self;
}
-(void)Video:(NSString *)moviePath
{
queue = dispatch_queue_create("LABEL", DISPATCH_QUEUE_SERIAL);
self.cruutenPath = [moviePath copy];
imageView = [[UIImageView alloc]initWithFrame:self.bounds];
[self addSubview:imageView];
dispatch_async(queue, ^{
[self initializeResources:[moviePath UTF8String]];
});
}
-(void)displayNextFrame:(NSTimer *)timer {
NSTimeInterval startTime = [NSDate timeIntervalSinceReferenceDate];
// self.TimerLabel.text = [NSString stringWithFormat:@"%f s",video.currentTime];
if (![self stepFrame]) {
[timer invalidate];
return;
}
imageView.image = self.currentImage;
// _ImageSubView.image = video.currentImage;
float frameTime = 1.0 / ([NSDate timeIntervalSinceReferenceDate] - startTime);
if (_lastFrameTime < 0) {
_lastFrameTime = frameTime;
} else {
_lastFrameTime = LERP(frameTime, _lastFrameTime, 0.8);
}
}
- (BOOL)initializeResources:(const char *)filePath {
isReleaseResources = NO;
AVCodec *pCodec;
//注册所有解码器
avcodec_register_all();
av_register_all();
avformat_network_init();
//打开视屏文件
if (avformat_open_input(&FormatCtx, filePath, NULL, NULL) != 0) {
NSLog(@"打开文件失败");
}
//检查数据流
if (avformat_find_stream_info(FormatCtx, NULL) < 0) {
NSLog(@"检查数据流失败");
}
//根据数据流,找到第一个视屏流
if ((videoStram = av_find_best_stream(FormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pCodec, 0)) < 0) {
NSLog(@"没有找到第一个视屏流");
// goto initError;
}
//获取视屏流的编解码上下文的指针
stream = FormatCtx->streams[videoStram];
codecCtx = stream->codec;
#if DEBUG
//打印视屏流的详细信息
av_dump_format(FormatCtx, videoStram, filePath, 0);
#endif
if (stream->avg_frame_rate.den && stream->avg_frame_rate.num) {
fps = av_q2d(stream->avg_frame_rate);
}else{fps = 30;}
//查找解码器
pCodec = avcodec_find_decoder(codecCtx->codec_id);
if (pCodec == NULL) {
NSLog(@"没有找到解码器");
}
//打开解码器
if (avcodec_open2(codecCtx, pCodec, NULL) < 0) {
NSLog(@"打开解码器失败");
// goto initError;
}
//分配视屏帧
avframe = av_frame_alloc();
_outputWidth = codecCtx->width;
_outputHeight = codecCtx->height;
dispatch_async(dispatch_get_main_queue(), ^{
[self seekTime:0.0];
[NSTimer scheduledTimerWithTimeInterval: 1 / fps
target:self
selector:@selector(displayNextFrame:)
userInfo:nil
repeats:YES];
});
return YES;
return NO;
}
- (void)seekTime:(double)seconds {
AVRational timeBase = FormatCtx->streams[videoStram]->time_base;
int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num *seconds);
avformat_seek_file(FormatCtx, videoStram, 0, targetFrame, targetFrame, AVSEEK_FLAG_FRAME);
avcodec_flush_buffers(codecCtx);
}
- (BOOL)stepFrame
{
int frameFinished = 0;
while (!frameFinished && av_read_frame(FormatCtx, &packet) >= 0) {
if (packet.stream_index == videoStram) {
avcodec_decode_video2(codecCtx, avframe, &frameFinished, &packet);
}
}
if (frameFinished == 0 && isReleaseResources == NO) {
[self releaseResources];
}
return frameFinished != 0;
}
- (void)replaceTheResources:(NSString *)moviePath {
if (!isReleaseResources) {
[self releaseResources];
}
self.cruutenPath = [moviePath copy];
[self initializeResources:[moviePath UTF8String]];
}
-(void)redialPaly
{
[self initializeResources:[self.cruutenPath UTF8String]];
}
#pragma mark 重写属性访问方法
-(void)setOutputWidth:(int)newValue {
if (_outputWidth == newValue)return;
_outputWidth = newValue;
}
-(void)setOutputHeight:(int)newValue {
if (_outputHeight == newValue) return;
_outputHeight = newValue;
}
-(UIImage *)currentImage {
if (!avframe->data[0]) return nil;
return [self imageFromAVPicture];
return nil;
}
-(double)duration {
return (double)FormatCtx->duration / AV_TIME_BASE;
}
- (double)currentTime {
AVRational timeBase = FormatCtx->streams[videoStram]->time_base;
return packet.pts * (double)timeBase.num / timeBase.den;
}
- (int)sourceWidth {
return codecCtx->width;
}
- (int)sourceHeight {
return codecCtx->height;
}
- (double)fps {
return fps;
}
#pragma mark - 内部方法
- (UIImage *)imageFromAVPicture
{
avpicture_free(&picture);
avpicture_alloc(&picture, AV_PIX_FMT_RGB24, _outputWidth, _outputHeight);
struct SwsContext *imgConverCtx = sws_getContext(avframe->width,
avframe->height,
AV_PIX_FMT_YUV420P,
_outputWidth,
_outputHeight,
AV_PIX_FMT_RGB24,
SWS_FAST_BILINEAR,
NULL,
NULL,
NULL);
if(imgConverCtx == nil)return nil;
sws_scale(imgConverCtx,
avframe->data,
avframe->linesize,
0,
avframe->height,
picture.data,
picture.linesize);
sws_freeContext(imgConverCtx);
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CFDataRef data = CFDataCreate(kCFAllocatorDefault, picture.data[0], picture.linesize[0] * _outputHeight);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImage = CGImageCreate(_outputWidth, _outputHeight, 8, 24, picture.linesize[0], colorSpace, bitmapInfo, provider, NULL, NO, kCGRenderingIntentDefault);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CGColorSpaceRelease(colorSpace);
CGDataProviderRelease(provider);
CFRelease(data);
return image;
}
#pragma mark --------------------------
#pragma mark - 释放资源
- (void)releaseResources {
NSLog(@"释放资源");
// SJLogFunc
isReleaseResources = YES;
// 释放RGB
avpicture_free(&picture);
// 释放frame
av_packet_unref(&packet);
// 释放YUV frame
av_free(avframe);
// 关闭解码器
if (codecCtx) avcodec_close(codecCtx);
// 关闭文件
if (FormatCtx) avformat_close_input(&FormatCtx);
avformat_network_deinit();
}
这是文件的详细代码资源下载 代码文件下载
上一篇: Manjaro安装及配置
下一篇: 生成由数字大小写字母组成的随机字符串