欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

Linux基于Live555从共享内存 获取rstp实时H264视频流并转发 附源码

程序员文章站 2022-07-14 17:00:25
...

1、编译生成库

官网上下载,下载地址:http://www.live555.com/liveMedia/public/,选择live555-latest.tar.gz下载

下载的为2017.10.28版 

解压tar xzf live555-latest.tar.gz

修改权限: chmod+777 live -R

修改交叉编译工具 :cp config.armlinux  cpconfig.arm

vi config.arm

CROSS_COMPILE?=        arm-buildroot-linux-uclibcgnueabi-

生成Makefile:    ./genMakefiles arm

make

生成mediaServer/live555MediaServer

使用 H.264文件测试  live555MediaServer test.264

  

获取IP错误一直为127.0.1.10.0.0.0

Linux基于Live555从共享内存 获取rstp实时H264视频流并转发 附源码

而机器IP 127.0.0.1 &  192.168.55.3

Linux基于Live555从共享内存 获取rstp实时H264视频流并转发 附源码

内核不支持端口复用需要注释掉groupsock/GroupsockHelper.cpp

SO_REUSEPORT

#if 0
#if defined(__WIN32__) || defined(_WIN32)
  // Windoze doesn't properly handle SO_REUSEPORT or IP_MULTICAST_LOOP
#else
#ifdef SO_REUSEPORT
  if (setsockopt(newSocket, SOL_SOCKET, SO_REUSEPORT,
		 (const char*)&reuseFlag, sizeof reuseFlag) < 0) {
    socketErr(env, "setsockopt(SO_REUSEPORT) error: ");
    closeSocket(newSocket);
    return -1;
  }
#endif

#ifdef IP_MULTICAST_LOOP
  const u_int8_t loop = 1;
  if (setsockopt(newSocket, IPPROTO_IP, IP_MULTICAST_LOOP,
		 (const char*)&loop, sizeof loop) < 0) {
    socketErr(env, "setsockopt(IP_MULTICAST_LOOP) error: ");
    closeSocket(newSocket);
    return -1;
  }
#endif
#endif
#endif


参考:

Rtsp server 创建过程分析 

    https://www.cnblogs.com/dchipnau/p/5458943.html

windows 本地获取实例 

     http://blog.csdn.net/xiejiashu/article/details/8269873


2、添加获取实时帧类

创建 rtsp server

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
#include "H264LiveVideoServerMediaSubsession.hh"

UsageEnvironment* env;

// To make the second and subsequent client for each stream reuse the same
// input stream as the first client (rather than playing the file from the
// start for each client), change the following "False" to "True":
Boolean reuseFirstSource = True;//False;

static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
			   char const* streamName)
{
  char* url = rtspServer->rtspURL(sms);
  UsageEnvironment& env = rtspServer->envir();

  env << "\n"<<"Play this stream using the URL \"" << url << "\"\n";
  delete[] url;
}


int main(int argc, char** argv) 
{

    // Begin by setting up our usage environment:
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    env = BasicUsageEnvironment::createNew(*scheduler);
    UserAuthenticationDatabase* authDB = NULL;  


#ifdef ACCESS_CONTROL
    // To implement client access control to the RTSP server, do the following:
    authDB = new UserAuthenticationDatabase;
    authDB->addUserRecord("username1", "password1"); // replace these with real strings
    // Repeat the above with each <username>, <password> that you wish to allow
    // access to the server.
#endif

  // Create the RTSP server:
    RTSPServer* rtspServer = RTSPServer::createNew(*env, 554, authDB);
    if (rtspServer == NULL) 
    {
        *env << "Failed to create svt RTSP server: " << env->getResultMsg() << "\n";
        exit(1);
    }
    
  char const* descriptionString = "Session streamed by \"720p-stream\"";

  // Set up each of the possible streams that can be served by the
  // RTSP server.  Each such stream is implemented using a
  // "ServerMediaSession" object, plus one or more
  // "ServerMediaSubsession" objects for each audio/video substream.

     char const* streamName = "720p-stream";

  // A H.264 video elementary stream:

    ServerMediaSession* sms = ServerMediaSession::createNew(*env, streamName, streamName, descriptionString,0);     
    	      
    sms->addSubsession(H264LiveVideoServerMediaSubsession
    ::createNew(*env, streamName,reuseFirstSource));//replace From H264VideoFileServerMediaSubsession			     

    rtspServer->addServerMediaSession(sms);
    announceStream(rtspServer, sms, streamName);


  // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling.
  // Try first with the default HTTP port (80), and then with the alternative HTTP
  // port numbers (8000 and 8080).

  if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) {
    *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
  } else {
    *env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
  }

  env->taskScheduler().doEventLoop(); // does not return
  
  
  printf("Exit server.....!\n");

 
  return 0; // only to prevent compiler warning
}


创建会话

char const* H264LiveVideoServerMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource) {
 //   printf("~~~~~~~%s line :%d \n ",__func__,__LINE__);
  //return "a=fmtp:96 packetization-mode=1;profile-level-id=4D6028;sprop-parameter-sets=J01gKI1oBQBbpsgAAAMACAAAAwDweKEV,KO4D0kg=\r\n";
  if (fAuxSDPLine != NULL) return fAuxSDPLine; // it's already been set up (for a previous client)

  if (fDummyRTPSink == NULL) { // we're not already setting it up for another, concurrent stream
    // Note: For H264 video files, the 'config' information ("profile-level-id" and "sprop-parameter-sets") isn't known
    // until we start reading the file.  This means that "rtpSink"s "auxSDPLine()" will be NULL initially,
    // and we need to start reading data from our file until this changes.
    fDummyRTPSink = rtpSink;

    // Start reading the file:
    fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);

    // Check whether the sink's 'auxSDPLine()' is ready:
    checkForAuxSDPLine(this);
  }
  envir().taskScheduler().doEventLoop(&fDoneFlag);

  return fAuxSDPLine;
}

FramedSource* H264LiveVideoServerMediaSubsession::createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate) 
{    
 //   printf("~~~~~%s line %d \n",__func__,__LINE__);

    estBitrate = 500; // kbps, estimate
    // Create the video source:
    H264LiveVideoSource* liveSource = H264LiveVideoSource::createNew(envir(), fChanID);
    if (liveSource == NULL) return NULL;
    // Create a framer for the Video Elementary Stream:
    H264VideoStreamFramerBase   *pBase=H264VideoStreamFramerBase::createNew(envir(), liveSource);
    liveSource->SetBase(pBase);
    return pBase;
  	//return H264VideoStreamFramer::createNew(envir(), liveSource);

}

RTPSink* H264LiveVideoServerMediaSubsession::createNewRTPSink(Groupsock* rtpGroupsock,unsigned char rtpPayloadTypeIfDynamic,FramedSource* /*inputSource*/) 
{
    printf("~~~~~%s line %d \n",__func__,__LINE__);
	return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);

}

获取video 

void H264LiveVideoSource::doGetNextFrame() {
    static int i=0;
    i++;
    fFrameSize = 0;
#ifdef GET_VIDEO_FRAME_FROM_SHM
    char tmpbuf[SHM_VIDEO_STREAM_720P_MAX_FRAME_SIZE];
    int framelen = sizeof(tmpbuf);
    //if (SHM_VideoReadFrameWithExtras(shmVid, tmpbuf, &framelen, (char*)&struVExtra) > 1)
    int rtn=-1;
    do
    {
        rtn = SHM_VideoReadFrame(shmVid, tmpbuf, &framelen);
        if(rtn >= 2)break;
        usleep(10000);
    }while(rtn<2);
    
    if(rtn > 1)
    {
       // printf("i=%d framelen=%d time=%ld \n",i,framelen,struVExtra.ullTimeStamp);
        fFrameSize = framelen;
		if(fFrameSize > fMaxSize)
			{
				fFrameSize = fMaxSize;
				int frBufUsedBytes = fMaxSize;
				fNumTruncatedBytes = framelen- frBufUsedBytes;
				//printf("Truncat %d bytes\n",fNumTruncatedBytes);
				memmove(fTo,tmpbuf,frBufUsedBytes);
				memmove(fTruncatedBytes,tmpbuf + frBufUsedBytes,fNumTruncatedBytes);
				fTruncatedBytesNum = fNumTruncatedBytes;
			}
			else
			{
				if(fTruncatedBytesNum > 0)
				{
					memmove(fTo,fTruncatedBytes,fTruncatedBytesNum);
					memmove(fTo + fTruncatedBytesNum,tmpbuf,framelen);
					fFrameSize += fTruncatedBytesNum;
				//	printf("send last truncted %d bytes\n",fTruncatedBytesNum);
					fTruncatedBytesNum = 0;
				}
				else
				{
					memmove(fTo,tmpbuf,framelen);
				}
			}

    }
    fDurationInMicroseconds = 1000000/22;
    struct timeval *  nextPT=m_pBase->GetNextPresentationTime();
    gettimeofday(&fPresentationTime,NULL);
    *nextPT=fPresentationTime;
#endif


//    printf("fPresentationTime.tv_sec =%ld fMaxSize=%d frame size =%d\n",fPresentationTime.tv_sec,fMaxSize,framelen);
    nextTask() = envir().taskScheduler().scheduleDelayedTask(0,(TaskFunc*)FramedSource::afterGetting, this);
    //FramedSource::afterGetting(this);
}


获取视频帧 播放越久延时越大 追踪后fPresentationTime 与 fNextPresentationTime 相差越来越大,直接用当前时间赋给fNextPresentationTime  后还是有延时,

后查看liveMedia/MultiFramedRTPSink.cpp->MultiFramedRTPSink::sendPacketIfNecessary() 最后延时列队uSecondsToGo 每帧都有延时时间。将uSecondsToGo 值赋为0后,延时解决了。但感觉不应该修改库中的设置。

struct timeval *  nextPT=m_pBase->GetNextPresentationTime();
    gettimeofday(&fPresentationTime,NULL);
    *nextPT=fPresentationTime;

void MultiFramedRTPSink::sendPacketIfNecessary() {


。。。

//printf("MultiFramedRTPSink::sendPacketIfNecessary() delay uSecondsToGo %d \n",uSecondsToGo);
    uSecondsToGo=0;
    // Delay this amount of time:
    nextTask() = envir().taskScheduler().scheduleDelayedTask(uSecondsToGo, (TaskFunc*)sendNext, this);
  }
}




编译通过nfs运行

Linux基于Live555从共享内存 获取rstp实时H264视频流并转发 附源码

VLC 播放

Linux基于Live555从共享内存 获取rstp实时H264视频流并转发 附源码

Linux基于Live555从共享内存 获取rstp实时H264视频流并转发 附源码


源码地址http://download.csdn.net/download/xwu122930/10186456