标签:
2014-03-08 22:05:58
描述live555 client即openRTSP的流程,简单点说,playCommon.cpp,流为h264和g726。在实际项目中已成功应用。
以下为我所见所得,有错误之处请指正,谢谢!
socket handler,event handler,delay task。
这三种任务的特点是,前两个加入执行队列后会一直存在,而delay task在执行完一次后会立即弃掉。
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 | /*** socket handler ***/ //定义 // For handling socket operations in the background (from the event loop): typedef void BackgroundHandlerProc( void * clientData, int mask); //注册 void BasicTaskScheduler ::setBackgroundHandling( int socketNum, int conditionSet, BackgroundHandlerProc* handlerProc, void * clientData) {} //执行 BasicTaskScheduler::SingleStep(unsigned maxDelayTime) { (*handler->handlerProc)(handler->clientData, resultConditionSet); } /*** event handler ***/ //定义 typedef void TaskFunc( void * clientData); //注册 EventTriggerId EventTriggerId BasicTaskScheduler0 ::createEventTrigger(TaskFunc* eventHandlerProc) {} //执行 BasicTaskScheduler::SingleStep(unsigned maxDelayTime) { (*fTriggeredEventHandlers[i])(fTriggeredEventClientDatas[i]); } /*** delay task ***/ //定义 typedef void TaskFunc( void * clientData); //跟event handler一样。 //注册 TaskToken BasicTaskScheduler0:: scheduleDelayedTask(int64_t microseconds,TaskFunc* proc, void * clientData) {} //执行 BasicTaskScheduler::SingleStep(unsigned maxDelayTime) { fDelayQueue.handleAlarm(); } void DelayQueue::handleAlarm() { if (head()->fDeltaTimeRemaining != DELAY_ZERO) synchronize(); if (head()->fDeltaTimeRemaining == DELAY_ZERO) { // This event is due to be handled: DelayQueueEntry* toRemove = head(); removeEntry(toRemove); // do this first, in case handler accesses queue toRemove->handleTimeout(); //仅执行一次后就remove } } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 | //OPTIONS--->DESCRIBE--->SETUP--->PLAY,这是最通用的交互了。 getOptions()--->continueAfterOPTIONS()---> getSDPDescription()--->continueAfterDESCRIBE() { session = MediaSession::createNew(*env, sdpDescription); while () { //音视频子会话 subsession->initiate(); } setupStreams(); } ---> //setupStreams为递归函数(setupStreams-->continueAfterSETUP-->setupStreams) //setupSubsession所有的子会话 setupStreams() { while () { setupSubsession(subsession, streamUsingTCP, forceMulticastOnUnspecified, continueAfterSETUP); } startPlayingSession(session, initialSeekTime, endTime, scale, continueAfterPLAY); } |
getOptions(continueAfterOPTIONS),getOptions后怎么调用到continueAfterOPTIONS的,如下:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 | //responseHandler* afterFunc 都由handler() 执行 getOptions(continueAfterOPTIONS)--->sendOptionsCommand()--->sendRequest() { --->openConnection() { --->connectToServer() { setBackgroundHandling(,SOCKET_WRITABLE|SOCKET_EXCEPTION,connectionHandler,); } //连接server ok { setBackgroundHandling(,SOCKET_READABLE|SOCKET_EXCEPTION,incomingDataHandler,); } } if (connectionIsPending) { fRequestsAwaitingConnection.enqueue(request); return request->cseq(); } } --->doEventLoop--->SingleStep() { //socket状态符合,就执行注册好的函数,例如connectionHandler/incomingDataHandler等 (*handler->handlerProc)(handler->clientData, resultConditionSet); } SingleStep() //1th step,执行connectionHandler,SOCKET_WRITABLE { handler->handlerProc = connectionHandler; } SingleStep() //2th step,执行incomingDataHandler,SOCKET_READABLE { handler->handlerProc = incomingDataHandler; } //incomingDataHandler会调用到continueAfterOPTIONS void RTSPClient::incomingDataHandler( void * instance, int /*mask*/ ) { RTSPClient* client = (RTSPClient*)instance; client->incomingDataHandler1(); } void RTSPClient::incomingDataHandler1() { struct sockaddr_in dummy; // ‘from‘ address - not used int bytesRead = readSocket(envir(), fInputSocketNum, (unsigned char *)&fResponseBuffer[fResponseBytesAlreadySeen], fResponseBufferBytesLeft, dummy); handleResponseBytes(bytesRead) { //call continueAfterOPTIONS() ,etc. (*foundRequest->handler())( this , resultCode, resultString); } } |
● 先从setupStreams先入手吧
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 | void setupStreams() { createOutputFiles() { while ((subsession = iter.next()) != NULL) { //h264 fileSink = H264VideoFileSink::createNew(*env, outFileName, subsession->fmtp_spropparametersets(), fileSinkBufferSize, oneFilePerFrame); //g726 // Normal case: fileSink = FileSink::createNew(*env, outFileName, fileSinkBufferSize, oneFilePerFrame); subsession->sink->startPlaying(*(subsession->readSource()), subsessionAfterPlaying, subsession); } } } //-------> Boolean MediaSink::startPlaying(MediaSource& source, afterPlayingFunc* afterFunc, void * afterClientData) { fSource = (FramedSource*)&source; fAfterFunc = afterFunc; fAfterClientData = afterClientData; return continuePlaying(); } //-------> Boolean FileSink::continuePlaying() { if (fSource == NULL) return False; fSource->getNextFrame(fBuffer, fBufferSize, afterGettingFrame, this , onSourceClosure, this ); return True; } |
● 再从FileSink::continuePlaying入手
FileSink::continuePlaying()
FramedSource::getNextFrame()
MultiFramedRTPSource::doGetNextFrame()
MultiFramedRTPSource::doGetNextFrame1()
//以下::仅表示static func声明所在的类
static void FramedSource::afterGetting(FramedSource* source);
static void FileSink::afterGettingFrame(void* clientData, unsigned frameSize,unsigned numTruncatedBytes,
struct timeval presentationTime,unsigned durationInMicroseconds);
MultiFramedRTPSource::doGetNextFrame1()
是递归函数,退出条件为
1 2 3 4 5 6 7 | while (fNeedDelivery) //正常测试接收时,fNeedDelivery == 1 { if (nextPacket == NULL) { break ; } } |
(Enter->Exit):即时
(Enter,) :和最近的(,Exit)配对
(,Exit):和最近的(Enter,)配对
static afterGetting::nth(Enter,) <--->static afterGetting::n+1th(,Exit)
从第一次调用continuePlaying()跟踪。可以直接跳到3th。
continuePlaying()--->getNextFrame()--->doGetNextFrame()--->
doGetNextFrame1(Enter->Exit[nextPacket == NULL])--->......自己可以trace--->
startPlayingSession()--->setupStreams()--->SingleStep()::1th--->
networkReadHandler1(Enter,)--->doGetNextFrame1(Enter,)--->
static afterGetting(Enter,)--->static afterGettingFrame(Enter,)--->H264or5VideoFileSink::afterGettingFrame()-->
FileSink::afterGettingFrame(){addData();continuePlaying()}-->
continuePlaying()-->getNextFrame()--->doGetNextFrame()--->
doGetNextFrame1(Enter->Exit[nextPacket == NULL])--->static afterGettingFrame(,Exit)--->static afterGetting(,Exit)--->
doGetNextFrame1(,Exit[nextPacket == NULL])--->networkReadHandler1(,Eixt)--->
SingleStep()::2th--->incomingReportHandler1()--->
SingleStep()::3th--->incomingDataHandler1()--->continueAfterPLAY()--->
SingleStep()::4th--->
networkReadHandler1(Enter,)--->doGetNextFrame1(Enter,)--->
static afterGetting(Enter,)--->static afterGettingFrame(Enter,)--->H264or5VideoFileSink::afterGettingFrame()-->
FileSink::afterGettingFrame(){addData();continuePlaying()}-->
//上述1/2th还有rtsp交互和一些初始化以及h264 SPS的处理,从3th就纯粹了。
//当接收到一帧完整的帧才会调用static afterGetting()
//FileSink::afterGettingFrame(){addData();continuePlaying()}
//addData():保存一帧h264到文件。continuePlaying()继续干活吧。
continuePlaying()-->getNextFrame()--->doGetNextFrame()--->
doGetNextFrame1(Enter->Exit[nextPacket == NULL])--->static afterGettingFrame(,Exit)--->static afterGetting(,Exit)--->
doGetNextFrame1(,Exit[nextPacket == NULL])--->networkReadHandler1(,Eixt)--->
SingleStep()::5th--->
networkReadHandler1(Enter,)--->doGetNextFrame1(Enter,)--->
static afterGetting(Enter,)--->static afterGettingFrame(Enter,)--->H264or5VideoFileSink::afterGettingFrame()-->
FileSink::afterGettingFrame(){addData();continuePlaying()}-->
continuePlaying()-->getNextFrame()--->doGetNextFrame()--->
doGetNextFrame1(Enter->Exit[nextPacket == NULL])--->static afterGettingFrame(,Exit)--->static afterGetting(,Exit)--->
doGetNextFrame1( , Exit[nextPacket == NULL])--->networkReadHandler1(,Eixt)--->
//以上为针对3th的Exit,结合networkReadHandler1::3th(Enter,)和networkReadHandler1::4th(,Eixt)可以看出doGetNextFrame1为递归函数。
//networkReadHandler1--->doGetNextFrame1--->static afterGetting--->continuePlaying()--->doGetNextFrame1
// SingleStep()生生不息,networkReadHandler1()进进出出
SingleStep()::6th--->
networkReadHandler1(Enter,)--->doGetNextFrame1(Enter,)--->
static afterGetting(Enter,)--->static afterGettingFrame(Enter,)--->H264or5VideoFileSink::afterGettingFrame()-->
FileSink::afterGettingFrame(){addData();continuePlaying()}-->
//nth :multi-packet frame,会有多次SingleStep(),但不会调用static afterGetting()
//只有在收完所有的sliece,即一个完整的frame后,才会调用static afterGetting()
continuePlaying()-->getNextFrame()--->doGetNextFrame()--->
doGetNextFrame1(Enter->Exit[nextPacket == NULL])--->static afterGettingFrame(,Exit)--->static afterGetting(,Exit)--->
doGetNextFrame1(,Exit[nextPacket == NULL])--->networkReadHandler1(,Eixt)--->
SingleStep()::nth--->
networkReadHandler1(Enter,)--->doGetNextFrame1(Enter,Exit)--->
networkReadHandler1(,Exit)--->
SingleStep()::n+1th--->
networkReadHandler1(Enter,)--->doGetNextFrame1(Enter,Exit)--->
networkReadHandler1(,Exit)--->
。。。。。。。。。。。。。
//最后一个packet
SingleStep()::n+mth--->
networkReadHandler1(Enter,)--->doGetNextFrame1(Enter,)--->
//此处才会调用static afterGetting
static afterGetting(Enter,)--->static afterGettingFrame(Enter,)--->H264or5VideoFileSink::afterGettingFrame()-->
FileSink::afterGettingFrame(){addData();continuePlaying()}-->
//n+1th
continuePlaying()-->getNextFrame()--->doGetNextFrame()--->
doGetNextFrame1(Enter->Exit[nextPacket == NULL])--->static afterGettingFrame(,Exit)--->static afterGetting(,Exit)--->?
doGetNextFrame1(,Exit[nextPacket == NULL])--->networkReadHandler1(,Eixt)--->
SingleStep()::n+m+1th--->
networkReadHandler1(Enter,)--->doGetNextFrame1(Enter,Exit)--->
networkReadHandler1(,Exit)--->
。。。。。。。。。。。。。。。。
标签:
原文地址:http://www.cnblogs.com/freezlz/p/5324220.html