rtsp客户端发送播放请求后,rtsp服务器调用流程如下
h264 video rtsp
1.ServerMediaSubsession::startStream -> OnDemandServerMediaSubsession::startStream
2.startStream -> StreamState::startPlaying
3.MediaSink::startPlaying
4.MediaSink::continuePlaying->MultiFramedRTPSink::continuePlaying
5.MultiFramedRTPSink::buildAndSendPacket
6.MultiFramedRTPSink::packFrame() fSource->getNextFrame fSource type is H264FUAFragmenter
7.FramedSource::getNextFrame
8.FramedSource::doGetNextFrame ->H264FUAFragmenter::doGetNextFrame -> fInputSource->getNextFrame fInputSource type is H264VideoStreamFramer
9.H264VideoStreamFramer::getNextFrame -> MPEGVideoStreamFramer::doGetNextFrame
10.MPEGVideoStreamFramer::continueReadProcessing -> fParser->parse() fParser type is H264VideoStreamParser
11.H264VideoStreamParser::parse() -> ByteStreamFileSource::doGetNextFrame -> ByteStreamFileSource::doReadFromFile
时间戳生成流程
1.MultiFramedRTPSink::buildAndSendPacket 中组建rtp包头时,获取时间戳的位置fTimestampPosition,然后等待获取数据与时间戳后写入
2.MultiFramedRTPSink::afterGettingFrame1 ->H264VideoRTPSink::doSpecialFrameHandling->MultiFramedRTPSink::setTimestamp 将时间戳写入到rtp包中
3.H264VideoStreamParser::parse 中调用H264VideoStreamFramer::setPresentationTime { fPresentationTime = fNextPresentationTime; } 此fPresentationTime最终将传到步骤2中
4.fNextPresentationTime 初始化由构造函数获取当前时间戳 ,后续在H264VideoStreamParser::parse 当thisNALUnitEndsAccessUnit=TRUE后,根据帧率计算帧duration,然后计算下一帧的时间戳
if (thisNALUnitEndsAccessUnit) {
#ifdef DEBUG
fprintf(stderr, "*****This NAL unit ends the current access unit*****\n");
#endif
usingSource()->fPictureEndMarker = True;
++usingSource()->fPictureCount;
// Note that the presentation time for the next NAL unit will be different:
struct timeval& nextPT = usingSource()->fNextPresentationTime; // alias
nextPT = usingSource()->fPresentationTime;
// printf("*************time:%d_%d^^^^%.5f\n",usingSource()->fPresentationTime.tv_sec,usingSource()->fPresentationTime.tv_usec,usingSource()->fFrameRate);//sun
double nextFraction = nextPT.tv_usec/1000000.0 + 1/usingSource()->fFrameRate;
unsigned nextSecsIncrement = (long)nextFraction;
nextPT.tv_sec += (long)nextSecsIncrement;
nextPT.tv_usec = (long)((nextFraction - nextSecsIncrement)*1000000);
}
void H264FramedLiveSource::doGetNextFrame()
{
//用光该帧的所有数据,再去用下一帧,如果当前没有数据,则framesize 设置成 0
fDurationInMicroseconds = 0;
fFrameSize = 0;
fNumTruncatedBytes = 0;
{
if (m_BufferCtrl.GetBufferSize() <=0)
{
{
FRAME_INFO *pFrameInfo = NULL;
m_pPullDataObj->GetVideoFrame(pFrameInfo);
if (pFrameInfo != NULL)
{
if (fPresentationTime.tv_sec == 0 && fPresentationTime.tv_usec == 0) {
// This is the first frame, so use the current time:
//m_pPullDataObj->SetCurVideoStamp(pFrameInfo->dts);
fDurationInMicroseconds = 0;
gettimeofday(&fPresentationTime, NULL);
m_startPts = pFrameInfo->dts;
m_startTmpPts = pFrameInfo->dts;
gettimeofday(&fTempTime,NULL);
//{
// char buf[100] = {0};
// sprintf(buf,"Init video:==================%d:%d:frame pts:%d\n",fPresentationTime.tv_sec,fPresentationTime.tv_usec/1000,pFrameInfo->pts);
// OutputDebugStringA(buf);
//}
} else {
// Increment by the play time of the previous frame:
static int nCount = 0;
unsigned long long uSeconds = (pFrameInfo->dts-m_startTmpPts)*1000;
printf("==============video frame:pts %lld\n",pFrameInfo->dts);
//printf("===============video pts:%lld , %lld ,%lld\n",uSeconds,pFrameInfo->pts,m_startTmpPts);
fTempTime.tv_sec += (fTempTime.tv_usec+uSeconds)/1000000;
fTempTime.tv_usec = (fTempTime.tv_usec+uSeconds)%1000000;
fPresentationTime = fTempTime;
printf("============time:%lld_%lld\n", fPresentationTime.tv_sec, fPresentationTime.tv_usec);
// m_pPullDataObj->SetCurVideoStamp(pFrameInfo->dts);
fDurationInMicroseconds = (pFrameInfo->dts - m_startPts)*1000;
// m_startPts = pFrameInfo->dts;
m_startTmpPts = pFrameInfo->dts;
// long sec = pFrameInfo->dts*1000/1000000 ;
// long usec = pFrameInfo->dts*1000%1000000;
//
// fPresentationTime.tv_sec = fTempTime.tv_sec + sec;
// unsigned uSeconds = fPresentationTime.tv_usec + fDurationInMicroseconds;
// fPresentationTime.tv_sec += uSeconds/1000000;
// fPresentationTime.tv_usec = uSeconds%1000000;
printf("<<<<<<<<<<<<<<<<<<<<<<<<<<<time:%d_%d,0x%x\n",fPresentationTime.tv_sec,fPresentationTime.tv_usec,this);//sun
//{
// char buf[100] = {0};
// sprintf(buf,"video:==================%d:%d:frame pts:%d",fPresentationTime.tv_sec,fPresentationTime.tv_usec/1000,pFrameInfo->pts);
// OutputDebugStringA(buf);
//}
}
m_BufferCtrl.PullBuffer((char*)pFrameInfo->pBuffer,pFrameInfo->dwSize);
delete pFrameInfo;
pFrameInfo = NULL;
}
else
{
int iiii = 0;
}
}
}
}
{
int nBufferSize = m_BufferCtrl.GetBufferSize();
{
if (nBufferSize >0 && nBufferSize <= fMaxSize)
{
int nsize = -1;
m_BufferCtrl.PopBufferAll(nsize,(char* &)fTo);
fFrameSize = nsize;
}
else if (nBufferSize > fMaxSize)
{
m_BufferCtrl.PopBuffer(fMaxSize,(char* &)fTo);
fFrameSize = fMaxSize;
// fNumTruncatedBytes = nBufferSize - fMaxSize;
OutputDebugStringA("\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\n");
}
else
{
int iii = 0;
}
}
}
{
char buf[100] = {0};
sprintf(buf,"video:==================duration:%d\n",fDurationInMicroseconds);
OutputDebugStringA(buf);
}
//fDurationInMicroseconds = 40000;
//m_pPullDataObj->SysncFrame();
nextTask() = envir().taskScheduler().scheduleDelayedTask( 0,
(TaskFunc*)FramedSource::afterGetting, this);//表示延迟0秒后再执行 afterGetting 函数
}