天天看点

Live555实时视频流应用总结

1,linux 环境:

官网上下载,下载地址:http://www.live555.com/liveMedia/public/

live555 版本:“2018.12.14”

参考:http://www.live555.com/liveMedia/faq.html 这个FAQ要仔细阅读。

2,编译

ARM:

修改交叉编译工具

cp config.armlinux config.arm

vi config.arm

CROSS_COMPILE?= arm-buildroot-linux-uclibcgnueabi-

生成Makefile: ./genMakefiles arm

x86-64:

./genMakefiles linux-64bit

x86:

./genMakefiles linux-64

make

生成mediaServer/live555MediaServer

testProgs 目录下各种测试文件。

3,测试

  • 使用 test.264文件测试

    3.1,mediaServer下 live555MediaServer test.264

    如果出现Correct this by increasing “OutPacketBuffer::maxSize” to at least 186818, before creating this ‘RTPSink’. (Current value is 100000.)

    在DynamicRTSPServer.cpp文件ServerMediaSession* createNewSMS()

    里修改OutPacketBuffer::maxSize

    if (strcmp(extension, “.264”) == 0) {

    // Assumed to be a H.264 Video Elementary Stream file:

    NEW_SMS(“H.264 Video”);

    OutPacketBuffer::maxSize = 300000; //100000;// allow for some possibly large H.264 frames

    sms->addSubsession(H264VideoFileServerMediaSubsession::createNew(env, fileName, reuseSource));

    }

    createNewSMS是在RTSP setup时调用的。

    3.2,testProgs

    3.2.1 ./testOnDemandRTSPServer

    3.2.2 ./testH264VideoStreamer

  • 不用文件,使用实时视频流

    3.3 最简单的方法:将实时视频流推送到一个FIFO管道(或stdin),将文件名改为这个管道的文件名

    3.4 live stream

    3.4.1 使用testH264VideoStreamer

    参考"liveMedia/DeviceSource.cpp"

    定义一个H264LiveVideoSource例并继承DeviceSource,填充其成员,

    在void play() {

    // Open the input file as a ‘byte-stream file source’:

    ByteStreamFileSource* fileSource

    =ByteStreamFileSource::createNew(*env, inputFileName);

    }用H264LiveVideoSource代替ByteStreamFileSource

H264LiveVideoSource类后面会给出具体的代码。

修改testH264VideoStreamer.cpp main()

ServerMediaSession* sms
      = ServerMediaSession::createNew(*env, "testStream", NULL,
             "Session streamed by \"testH264VideoStreamer\"",
                         True /*SSM*/);
           

修改play()函数如下:

void play() {
  // Open the input file as a 'byte-stream file source':
  #if 1
  H264LiveVideoSource* fileSource
     = new H264LiveVideoSource(*env);
   if (fileSource == NULL) {
     *env << "Unable to open file \"" << inputFileName
          << "\" as a byte-stream file source\n";
     exit(1);
   }

  #else
  ByteStreamFileSource* fileSource
    = ByteStreamFileSource::createNew(*env, inputFileName);
  if (fileSource == NULL) {
    *env << "Unable to open file \"" << inputFileName
         << "\" as a byte-stream file source\n";
    exit(1);
  }
  #endif
  FramedSource* videoES = fileSource;

  // Create a framer for the Video Elementary Stream:
  videoSource = H264VideoStreamFramer::createNew(*env, videoES);

  // Finally, start playing:
  *env << "Beginning to read from file...\n";
  videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}

           

3.4.2 使用testOnDemandRTSPServer

1)set the variable “reuseFirstSource” to “True”

2)根据类H264VideoFileServerMediaSubsession,新建一个新类H264LiveVideoServerMediaSubsession, implementation of the two pure virtual functions “createNewStreamSource()” and “createNewRTPSink()”

在createNewStreamSource()里用上面的H264LiveVideoSource代替ByteStreamFileSource。

下面列出具体实现的代码。

#ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
#define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH

#include "OnDemandServerMediaSubsession.hh"
#include "liveMedia.hh"
#include "UsageEnvironment.hh"
#include "GroupsockHelper.hh"
 
class H264LiveVideoServerMediaSubsession: public OnDemandServerMediaSubsession
{
public:
    H264LiveVideoServerMediaSubsession(UsageEnvironment & env,Boolean reuseFirstSource);
    ~H264LiveVideoServerMediaSubsession();
     static H264LiveVideoServerMediaSubsession* createNew(UsageEnvironment& env,Boolean reuseFirstSource);
public: // new virtual functions, defined by all subclasses
    virtual FramedSource* createNewStreamSource(unsigned clientSessionId,
					      unsigned& estBitrate) ;
      // "estBitrate" is the stream's estimated bitrate, in kbps
    virtual RTPSink* createNewRTPSink(Groupsock* rtpGroupsock,
				    unsigned char rtpPayloadTypeIfDynamic,
				    FramedSource* inputSource);
    virtual char const * getAuxSDPLine(RTPSink * rtpSink, FramedSource * inputSource);
    static H264LiveVideoServerMediaSubsession* createNew(UsageEnvironment & env, FramedSource * source);
    static void afterPlayingDummy(void * ptr);
 
	static void chkForAuxSDPLine(void * ptr);
	void chkForAuxSDPLine1();
private:
	FramedSource * m_pSource;
	char * m_pSDPLine;
	RTPSink * m_pDummyRTPSink;
	char m_done;    
};

#endif

           

H264LiveVideoServerMediaSubsession.cpp

H264LiveVideoServerMediaSubsession::H264LiveVideoServerMediaSubsession(UsageEnvironment & env,Boolean reuseFirstSource):OnDemandServerMediaSubsession(env,reuseFirstSource)
{
    m_pSource = NULL;//source;
	m_pSDPLine = NULL;
	m_pDummyRTPSink =NULL;
	m_done=0;
	
}

H264LiveVideoServerMediaSubsession::~H264LiveVideoServerMediaSubsession()
{
    if (m_pSDPLine)
	{
		free(m_pSDPLine);
	}


}
H264LiveVideoServerMediaSubsession * H264LiveVideoServerMediaSubsession::createNew(UsageEnvironment& env,Boolean reuseFirstSource)
{
   return new H264LiveVideoServerMediaSubsession(env,reuseFirstSource); 
}
FramedSource * H264LiveVideoServerMediaSubsession::createNewStreamSource(unsigned clientSessionId, unsigned & estBitrate)
{
    //printf("===========createNewStreamSource===================\n");
    estBitrate = 500; 
    
	return H264VideoStreamFramer::createNew(envir(), new H264LiveVideoSource(envir()));
}
 
RTPSink * H264LiveVideoServerMediaSubsession::createNewRTPSink(Groupsock * rtpGroupsock, unsigned char rtpPayloadTypeIfDynamic, FramedSource * inputSource)
{
	return H264VideoRTPSink::createNew(envir(), rtpGroupsock, rtpPayloadTypeIfDynamic);
}

 char const *H264LiveVideoServerMediaSubsession::getAuxSDPLine(RTPSink * rtpSink, FramedSource * inputSource)
 {
    if (m_pSDPLine)
	{
		return m_pSDPLine;
	}
 
	m_pDummyRTPSink = rtpSink;
	if(NULL == m_pDummyRTPSink)
	    return NULL;


	//mp_dummy_rtpsink->startPlaying(*source, afterPlayingDummy, this);
	m_pDummyRTPSink->startPlaying(*inputSource, 0, 0);
 
	chkForAuxSDPLine(this);
 
	m_done = 0;
    char const *dasl = m_pDummyRTPSink->auxSDPLine();
	envir().taskScheduler().doEventLoop(&m_done);
    if(dasl)
	    m_pSDPLine = strdup(dasl);
 
	m_pDummyRTPSink->stopPlaying();
 
	return m_pSDPLine;

 }
void H264LiveVideoServerMediaSubsession::afterPlayingDummy(void * ptr)
{
	H264LiveVideoServerMediaSubsession * This = (H264LiveVideoServerMediaSubsession *)ptr;
 
	This->m_done = ~0;
}

void H264LiveVideoServerMediaSubsession::chkForAuxSDPLine(void * ptr)
{
	H264LiveVideoServerMediaSubsession * This = (H264LiveVideoServerMediaSubsession *)ptr;
 
	This->chkForAuxSDPLine1();
}
 
void H264LiveVideoServerMediaSubsession::chkForAuxSDPLine1()
{
	if (m_pDummyRTPSink->auxSDPLine())
	{
		m_done =  ~0;
	}
	else
	{
		double delay = 1000.0 / (FRAME_PER_SEC);  // ms
		int to_delay = delay * 1000;  // us
 
		nextTask() = envir().taskScheduler().scheduleDelayedTask(to_delay, chkForAuxSDPLine, this);
	}
}
           
#ifndef _H264_LIVE_VIDEO_SOURCE_HH
#define _H264_LIVE_VIDEO_SOURCE_HH

#ifndef _FRAMED_SOURCE_HH
#include "FramedSource.hh"
#endif
#include "DeviceSource.hh"


class H264LiveVideoSource: public FramedSource {
public:
    H264LiveVideoSource(UsageEnvironment& env);
	// called only by createNew()

    virtual ~H264LiveVideoSource();

private:
    // redefined virtual functions:
    virtual void doGetNextFrame();
    //virtual void doStopGettingFrames();
    int maxFrameSize();
    static void getNextFrame(void * ptr);
    void GetFrameData();


private:
    void *m_pToken;
	char *m_pFrameBuffer;
	char *fTruncatedBytes;
	int fTruncatedBytesNum;
};

#endif
           

H264LiveVideoSource.cpp

#include "H264LiveVideoSource.hh"
//#include "InputFile.hh"
#include "GroupsockHelper.hh"

#define FRAME_BUF_SIZE  (1024*1024)
#define FMAX (300000)
H264LiveVideoSource::H264LiveVideoSource(UsageEnvironment& env):FramedSource(env),
m_pToken(0),
m_pFrameBuffer(0),fTruncatedBytesNum(0),fTruncatedBytes(0)
{
    m_pFrameBuffer = new char[FRAME_BUF_SIZE];
    fTruncatedBytes = new char[FRAME_BUF_SIZE];
	if(m_pFrameBuffer == NULL || fTruncatedBytes== NULL )
	{
		printf("[MEDIA SERVER] error malloc data buffer failed\n");
		return;
	}
	memset(m_pFrameBuffer,0,FRAME_BUF_SIZE);
	//fMaxSize =  FMAX;
    printf("[H264LiveVideoSource] fMaxSize:%d\n",fMaxSize);

}

H264LiveVideoSource::~H264LiveVideoSource()
{
    envir().taskScheduler().unscheduleDelayedTask(m_pToken);
 
	if(m_pFrameBuffer)
	{
    	delete[] m_pFrameBuffer;
		m_pFrameBuffer = NULL;
	}
	if(fTruncatedBytes)
	{
    	delete[] fTruncatedBytes;
		fTruncatedBytes = NULL;
	}
}
int H264LiveVideoSource::maxFrameSize()
{
    return FRAME_BUF_SIZE;
}


void H264LiveVideoSource::doGetNextFrame()
{

    int uSecsToDelay = 40000; // 40 ms
    m_pToken  = envir().taskScheduler().scheduleDelayedTask(uSecsToDelay,
              (TaskFunc*)getNextFrame, this);
              //printf("m_pToken =%p \n" ,m_pToken);
}
void H264LiveVideoSource::getNextFrame(void *ptr)
{
    H264LiveVideoSource *p=(H264LiveVideoSource *)ptr;
    if(NULL  == p)
        printf("null point \n");
    p->GetFrameData();
  
   
}
#include <sys/types.h>
#include <sys/stat.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <limits.h>

typedef struct
{
	unsigned long long timeTick;			//时间(ms)
	unsigned int dataLen;					//数据长度
	unsigned char dataType;					//数据类型(DataType_E)
	unsigned char rsv[3];	
	unsigned long long timeStamp;		//编码时间戳(us)
	unsigned char iFrame;				//是否为关键帧
	unsigned char frameRate;			//帧率
	int encodeType;			//编码类型VideoEncodeType_E
	unsigned short width;				//视频宽度
	unsigned short height;				//视频高度
	unsigned char rsv1[8];
	unsigned char data[0];
}IFVFrameHeader_S;
void H264LiveVideoSource::GetFrameData()
{
#if 1
  //memcpy(fTo,m_pFrameBuffer,fFrameSize);
  int read = ShareBufGetOneFrame(g_BufHandle[0], FRAME_BUF_SIZE, (char *)m_pFrameBuffer); //这里要改成你自己实际获取视频帧的函数
	 		if (read == 0)
	 			{
	 					printf("read byte =0 \n");
	 					fFrameSize =0;
	 					// FramedSource::afterGetting(this);
	 				return;	
	 			}
	 			
     IFVFrameHeader_S *pFrameHead  = reinterpret_cast<IFVFrameHeader_S *>(m_pFrameBuffer);
     if(pFrameHead == NULL )
     	{
      	printf("pFrameHead =0 \n");
      		fFrameSize =0;
      	return;
     	}
     	if(iframetype == 0)
     		{ 
     		if(1==pFrameHead->iFrame)
     		{
     				iframetype =1;
     		
     		}
     		else
     		{
      		//printf("no i frame \n");
      		//fFrameSize =0;
      	 	//gettimeofday(&fPresentationTime,NULL);
     			//FramedSource::afterGetting(this);
      		//return;
     		}
     			
     	}
     	int framelen=pFrameHead->dataLen;
     	#if 0
     		if(pFrameHead->dataLen > fMaxSize)
     				pFrameHead->dataLen = fMaxSize;
     		memcpy(fTo,m_pFrameBuffer+sizeof(IFVFrameHeader_S),pFrameHead->dataLen);
     		fFrameSize =pFrameHead->dataLen;
     	#else
     	//printf("pFrameHead->dataLen =%d fMaxSize=%u\n",pFrameHead->dataLen,fMaxSize);
     	if(framelen > fMaxSize)
     	{
     			framelen = fMaxSize;
     			fNumTruncatedBytes = pFrameHead->dataLen-fMaxSize;
     			
     			memcpy(fTo,pFrameHead->data,framelen);
     			memmove(fTruncatedBytes,pFrameHead->data + framelen,fNumTruncatedBytes);
     			fFrameSize =framelen;
    	}
    	else
    	{
    			if(fNumTruncatedBytes > 0 )	
    				{
    					memmove(fTo,fTruncatedBytes,fTruncatedBytesNum);
							memmove(fTo + fTruncatedBytesNum,pFrameHead->data,framelen);
							fFrameSize += fTruncatedBytesNum;
							//	printf("send last truncted %d bytes\n",fTruncatedBytesNum);
								fTruncatedBytesNum = 0;
    				}
    				else
    				{
    					memcpy(fTo,pFrameHead->data,framelen);
    					fFrameSize =framelen;	
    				}
    		
    	}
    	#endif
    	fDurationInMicroseconds = 1000000/25;
    	gettimeofday(&fPresentationTime,NULL);
    	//*nextPT=fPresentationTime;
     	FramedSource::afterGetting(this);
#else
    #define FIFO_NAME "./test.264"
    //#define BUFFER_SIZE (30000)
    static int fd=-1;
    //static u_int64_t fFileSize =0;
    if(fd ==-1)
    {
        fd = open(FIFO_NAME,O_RDONLY);
        if(fd > 0)
        {
           // fFileSize =5316637;// GetFileSize(FIFO_NAME, fd);
        }
    }
    if(fd ==-1)
    {
        printf("open file %s fail \n",FIFO_NAME);
        return;
    }
    int len =0;
    int remain = fMaxSize;
    //if(remain >fMaxSize)
    //    remain =fMaxSize;
	if((len = read(fd,fTo,remain))>0)
	{
		//memmove(fTo,m_pFrameBuffer,len);
		gettimeofday(&fPresentationTime, NULL);
		fFrameSize=len;
	}
	else
	{
	    if(fd >0)
	    {
	    ::close(fd);
	    fd = -1;
	    //printf("GetFrameData close file %d\n",len);
	    }
	}
	fDurationInMicroseconds = 1000000/25;
    gettimeofday(&fPresentationTime,NULL);

	// printf("fMaxSize=%d fFrameSize=%d\n",fMaxSize,fFrameSize);
	//nextTask() = envir().taskScheduler().scheduleDelayedTask(0,
	//			(TaskFunc*)FramedSource::afterGetting, this);
	 FramedSource::afterGetting(this);
   
#endif
}
           

GetFrameData()这里还是以文件来模拟获取视频帧,实际环境中把真实的视频帧推送到这里来就可以了。

下面修改testOnDemandRTSPServer.cpp文件如下:

在main()里加入下面的代码

// A H.264 live video  stream:
  {
    OutPacketBuffer::maxSize = 300000;
    char const* streamName = "h264LiveVideo";
    char const* inputFileName = "test";
    ServerMediaSession* sms
      = ServerMediaSession::createNew(*env, streamName, streamName,
				      descriptionString,True);
	UsageEnvironment& envr = rtspServer->envir();
	envr << "\n\"" << sms<< "\"\n" ;
	if(NULL == sms)
	    printf("sms is null  \n");
    sms->addSubsession(H264LiveVideoServerMediaSubsession ::createNew(*env,True));
    rtspServer->addServerMediaSession(sms);
    
    announceStream(rtspServer, sms, streamName, inputFileName);
  }
           

如播放越久延时越大,liveMedia/MultiFramedRTPSink.cpp->MultiFramedRTPSink::sendPacketIfNecessary() 最后延时列队uSecondsToGo 每帧都有延时时间。将uSecondsToGo 值赋为0。