天天看点

庖丁解牛-----Live555源码彻底解密(根据OpenRTSP讲解)

OpenRtsp中写H264文件注意的地方:

OpenRtsp客户端包括OpenRtsp.cpp和playCommon.cpp 两个文件

1)接受buffer保存到文件

void H264VideoFileSink::afterGettingFrame(unsignedframeSize,unsignednumTruncatedBytes,structtimeval presentationTime) {

  unsigned char const start_code[4] = {0x00, 0x00, 0x00, 0x01};

  if (!fHaveWrittenFirstFrame) {

    // If we have PPS/SPS NAL units encoded in a "sprop parameter string", prepend these to the file:

    unsigned numSPropRecords;

    SPropRecord* sPropRecords = parseSPropParameterSets(fSPropParameterSetsStr,numSPropRecords);

    for (unsignedi = 0;i < numSPropRecords; ++i) {

      addData(start_code, 4,presentationTime);

      addData(sPropRecords[i].sPropBytes,sPropRecords[i].sPropLength,presentationTime);

    }

    delete[] sPropRecords;

    fHaveWrittenFirstFrame = True; // for next time

  }

  // Write the input data to the file, with the start code in front:

  addData(start_code, 4,presentationTime);

  // Call the parent class to complete the normal file write with the input data:

  FileSink::afterGettingFrame(frameSize,numTruncatedBytes,presentationTime);

}

2)获取到路径名

              // Create an output file for each desired stream:

                   char outFileName[1000];

                   if (singleMedium ==NULL) {

                       // Output file name is

                       //     "<filename-prefix><medium_name>-<codec_name>-<counter>"

                       static unsigned streamCounter = 0;

                       //outFileName为文件名

                       snprintf(outFileName,sizeofoutFileName,"%s%s-%s-%d",

                            fileNamePrefix,subsession->mediumName(),

                            subsession->codecName(), ++streamCounter);

                   } else {

                       sprintf(outFileName,"stdout");

                   }

3)保存文件

void FileSink::afterGettingFrame(unsignedframeSize,

                    unsigned numTruncatedBytes,

                    struct timeval presentationTime) {

  if (numTruncatedBytes > 0) {

    envir() << "FileSink::afterGettingFrame(): The input frame data was too large for our buffer size ("

         << fBufferSize << ").  "

            << numTruncatedBytes <<" bytes of trailing data was dropped! Correct this by increasing the \"bufferSize\" parameter in the \"createNew()\" call to at least "

            << fBufferSize + numTruncatedBytes << "\n";

  }

  addData(fBuffer,frameSize,presentationTime);

  if (fOutFid ==NULL ||fflush(fOutFid) ==EOF) {

    // The output file has closed.  Handle this the same way as if the input source had closed:

    if (fSource !=NULL)fSource->stopGettingFrames();

    onSourceClosure(this);

    return;

  }

  if (fPerFrameFileNameBuffer !=NULL) {

    if (fOutFid !=NULL) {fclose(fOutFid);fOutFid =NULL; }

  }

  // Then try getting the next frame:

  continuePlaying();

}

1)   写文件

void FileSink::addData(unsignedcharconst* data, unsigned dataSize,

                struct timeval presentationTime) {

  if (fPerFrameFileNameBuffer !=NULL) {

    // Special case: Open a new file on-the-fly for this frame

    sprintf(fPerFrameFileNameBuffer,"%s-%lu.%06lu",fPerFrameFileNamePrefix,

         presentationTime.tv_sec,presentationTime.tv_usec);

    fOutFid = OpenOutputFile(envir(), fPerFrameFileNameBuffer);

  }

  // Write to our file:

#ifdef TEST_LOSS

  static unsigned const framesPerPacket = 10;

  static unsigned const frameCount = 0;

  static Boolean const packetIsLost;

  if ((frameCount++)%framesPerPacket == 0) {

    packetIsLost = (our_random()%10 == 0); // simulate 10% packet loss #####

  }

  if (!packetIsLost)

#endif

  if (fOutFid !=NULL &&data !=NULL) {

    fwrite(data, 1,dataSize,fOutFid);

  }

}

其中  unsigned char* fBuffer; 用来保存文件的Buff;

  fBuffer = new unsigned char[bufferSize]; 缓冲区的size,默认多少?

#ifndef _H264_VIDEO_FILE_SINK_HH

#define _H264_VIDEO_FILE_SINK_HH

#ifndef _FILE_SINK_HH

#include "FileSink.hh"

#endif

class H264VideoFileSink: public FileSink {

public:

  static H264VideoFileSink* createNew(UsageEnvironment&env,char const* fileName,

                         char const* sPropParameterSetsStr = NULL,

  // An optional 'SDP format' string (comma-separated Base64-encoded) representing SPS and/or PPS NAL-units to prepend to the output

                         unsigned bufferSize = 100000,

                         BooleanoneFilePerFrame =False);

  // See "FileSink.hh" for a description of these parameters.

protected:

  H264VideoFileSink(UsageEnvironment&env,FILE* fid,

             char const* sPropParameterSetsStr,

             unsigned bufferSize, char const* perFrameFileNamePrefix);

      // called only by createNew()

  virtual ~H264VideoFileSink();

protected: // redefined virtual functions:

  virtual void afterGettingFrame(unsignedframeSize,unsignednumTruncatedBytes,structtimeval presentationTime);

private:

  char const*fSPropParameterSetsStr;

  Boolean fHaveWrittenFirstFrame;

};

#endif

默认的是100k 可以修改,在OpenRtsp中修改unsignedfileSinkBufferSize = 1000000;           //modify by zh 100000 to 1000000

OpenRtsp还需要获取帧率的信息,进行录像,帧率的参数可以从OpenRtsp中进行修改;

unsigned movieFPS = 30; // default                 //帧率信息modify by zhongh 15 to 30

相机1080p的 30帧,每秒1.18M

OpenRtsp将文件写成AVI的代码如下:

else if (outputAVIFile) {

              // Create an "AVIFileSink", to write to 'stdout':

              //如何将stdout修改成写avi 文件

              aviOut = AVIFileSink::createNew(*env, *session,"d:\\test1.avi",

                   fileSinkBufferSize,

                   movieWidth,movieHeight,

                   movieFPS,              //帧率

                   packetLossCompensate);//包丢失补偿;

              if (aviOut ==NULL) {

                   *env << "Failed to create AVI file sink for stdout: " << env->getResultMsg();

                   shutdown();

              }

aviOut->startPlaying(sessionAfterPlaying,NULL);