欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

Qt+ffmpeg接受rtsp并显示

程序员文章站 2022-03-17 14:50:51
...

效果

Qt+ffmpeg接受rtsp并显示

接受

#include "rtspplayer.h"
#include <QDebug>
VideoPlayer::VideoPlayer()
{

}

VideoPlayer::~VideoPlayer()
{
}

void VideoPlayer::run()
{
    //变量
    AVFormatContext *pFormatCtx;
    char filepath[] = "rtsp://192.168.0.188:554/stream/main";
    //AVPacket *packet;
    //初始化
    av_register_all();
    avformat_network_init();
    pFormatCtx = avformat_alloc_context();
    AVDictionary* options = NULL;
    av_dict_set(&options, "buffer_size", "1024000", 0); //设置缓存大小,1080p可将值调大
    av_dict_set(&options, "rtsp_transport", "udp", 0); //以udp方式打开,如果以tcp方式打开将udp替换为tcp
    av_dict_set(&options, "stimeout", "20000000", 0); //设置超时断开连接时间,单位微秒
    av_dict_set(&options, "max_delay", "500000", 0); //设置最大时延
    //av_dict_set(&options, "fps", "30", 0); //设置帧数
    //packet = (AVPacket *)av_malloc(sizeof(AVPacket));
    //打开网络流或文件流
    if (avformat_open_input(&pFormatCtx, filepath, NULL, &options) != 0)
    {
        printf("Couldn't open input stream.\n");
        return;
    }

    //查找码流信息
    //设置查找时间以避免耗时过长
    pFormatCtx->probesize = 1000;
    pFormatCtx->max_analyze_duration = AV_TIME_BASE;
    if (avformat_find_stream_info(pFormatCtx, NULL)<0)
    {
        printf("Couldn't find stream information.\n");
        return;
    }
    //查找码流中是否有视频流
    int videoindex = -1;
    for (int i = 0; i<pFormatCtx->nb_streams; i++)
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            videoindex = i;
            break;
        }
    if (videoindex == -1)
    {
        printf("Didn't find a video stream.\n");
        return;
    }
    printf("-----------rtsp流输入信息--------------\n");
    av_dump_format(pFormatCtx, 0, filepath,0);
    printf("---------------------------------------\n");
    /*********************************************/
    AVCodecContext *pAVCodecContext;
       AVFrame *pAVFrame;
       SwsContext * pSwsContext;
       AVPicture  pAVPicture;
    //获取视频流的分辨率大小
        pAVCodecContext = pFormatCtx->streams[0]->codec;
        int videoWidth=pAVCodecContext->width;
        int videoHeight=pAVCodecContext->height;

        avpicture_alloc(&pAVPicture,AV_PIX_FMT_RGB32,videoWidth,videoHeight);

        AVCodec *pAVCodec;

        //获取视频流解码器
        pAVCodec = avcodec_find_decoder(pAVCodecContext->codec_id);
        pSwsContext = sws_getContext(videoWidth,videoHeight,pAVCodecContext->pix_fmt,videoWidth,videoHeight,AV_PIX_FMT_RGB32,SWS_BICUBIC,0,0,0);

        //打开对应解码器
        int result=avcodec_open2(pAVCodecContext,pAVCodec,NULL);
        if (result<0){
            qDebug()<<"打开解码器失败";
            return;
        }

        pAVFrame = av_frame_alloc();
        int y_size = pAVCodecContext->width * pAVCodecContext->height;
            AVPacket *packet = (AVPacket *) malloc(sizeof(AVPacket)); //分配一个packet
            av_new_packet(packet, y_size); //分配packet的数据
        qDebug()<<"初始化视频流成功" <<","<<videoWidth << "," << videoHeight << "," ;
    //
      //  return;
    //保存一段时间的视频流,写入文件中
    //FILE  * fpSave;
    //fopen_s(&fpSave, "geth264.h264", "wb");
        int m_i_frameFinished =-1;
        int ret =-1;
//    for (int i = 0; i < 1000; i++)   //这边可以调整i的大小来改变文件中的视频时间
//    {

       while(av_read_frame(pFormatCtx, packet) >= 0)
       {
            if (packet->stream_index == videoindex)
            {
                //printf("pts : %d     size :%d one pkt\n",packet->pts,packet->size);
                //fwrite(packet->data, 1, packet->size, fpSave);
                qDebug() << "pkt pts:" << packet->pts;
                ret = avcodec_decode_video2(pAVCodecContext, pAVFrame, &m_i_frameFinished, packet);
                if(ret < 0)
                {
                    qDebug() << "解码失败!!";
                    return;
                }
                if (m_i_frameFinished)
                {

                    sws_scale(pSwsContext,(const uint8_t* const *)pAVFrame->data,pAVFrame->linesize,0,videoHeight,pAVPicture.data,pAVPicture.linesize);
                    //发送获取一帧图像信号

                    QImage image(pAVPicture.data[0],videoWidth,videoHeight,QImage::Format_RGB32);
                    emit sig_GetOneFrame(image);
                }
             }
         }
       // av_packet_unref(packet);
        av_free_packet(packet);
         msleep(0.02);
    //}

      //}
       // fclose(fpSave);
        if(pFormatCtx)
            avformat_close_input(&pFormatCtx);
        //av_free(packet);
        av_frame_free(&pAVFrame);
        sws_freeContext(pSwsContext);
        av_free(pAVFrame);
        printf("is end  !!! \n");
}

显示

#include "mainwindow.h"
#include "ui_mainwindow.h"

MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::MainWindow)
{
    ui->setupUi(this);
    m_player = new VideoPlayer;
    connect(m_player,SIGNAL(sig_GetOneFrame(QImage)),this,SLOT(slotGetOneFrame(QImage)));
    m_player->start();
}

MainWindow::~MainWindow()
{
    delete m_player;
    delete ui;
}


void MainWindow::paintEvent(QPaintEvent *event)
{
    QPainter painter(this);
    painter.setBrush(Qt::black);
    painter.drawRect(0, 0, this->width(), this->height()); //先画成黑色

    if (mImage.size().width() <= 0) return;

    ///将图像按比例缩放成和窗口一样大小
    QImage img = mImage.scaled(this->size(),Qt::KeepAspectRatio);

    int x = this->width() - img.width();
    int y = this->height() - img.height();

    x /= 2;
    y /= 2;

    painter.drawImage(QPoint(x,y),img); //画出图像

}

void MainWindow::slotGetOneFrame(QImage img)
{
    mImage = img;
    this->update(); //调用update将执行 paintEvent函数
}
相关标签: ffmpeg Qt rtsp