RTSP流中實(shí)際傳輸音頻或視頻數(shù)據(jù)為一個(gè)個(gè)RTP包购对,每個(gè)RTP包的Header的第5個(gè)到第8個(gè)字節(jié)為RTP Timestamp
(時(shí)間戳),是個(gè)32bit的整數(shù)。
而live555中類似H264or5VideoFileSink::afterGettingFrame
(接收每幀數(shù)據(jù))函數(shù)中:
void H264or5VideoFileSink::afterGettingFrame(unsigned frameSize,
unsigned numTruncatedBytes,
struct timeval presentationTime)
{
...
}
實(shí)際接收到的時(shí)間戳為 struct timeval presentationTime
,
其中:
struct timeval {
long tv_sec; /* seconds */
long tv_usec; /* and microseconds */
};
舉例示意如下:
轉(zhuǎn)換前:
rtpTimestamp:439803124
轉(zhuǎn)換后:
presentationTime.tv_sec: 1482476415
presentationTime.tv_usec:183008
在live555中,兩者是如何實(shí)現(xiàn)轉(zhuǎn)換的呢?
在liveMedia\RTPSource.cpp
的RTPReceptionStats::noteIncomingPacket
函數(shù)中實(shí)現(xiàn)這一轉(zhuǎn)換屑柔。
RTPReceptionStats::noteIncomingPacket
void RTPReceptionStats::noteIncomingPacket(u_int16_t seqNum,
u_int32_t rtpTimestamp,
unsigned timestampFrequency,
Boolean useForJitterCalculation,
struct timeval& resultPresentationTime,
Boolean& resultHasBeenSyncedUsingRTCP,
unsigned packetSize)
{
...
// Record the inter-packet delay
struct timeval timeNow;
gettimeofday(&timeNow, NULL);
...
fLastPacketReceptionTime = timeNow;
...
// Return the 'presentation time' that corresponds to "rtpTimestamp":
if (fSyncTime.tv_sec == 0 && fSyncTime.tv_usec == 0)
{
// 第一個(gè)時(shí)間戳
// 用當(dāng)前系統(tǒng)時(shí)間作為同步時(shí)刻
// 后續(xù)將會(huì)根據(jù)接收到的RTCP SRs進(jìn)行校正.
fSyncTimestamp = rtpTimestamp;
fSyncTime = timeNow;
}
int timestampDiff = rtpTimestamp - fSyncTimestamp;
// Note: This works even if the timestamp wraps around
// (as long as "int" is 32 bits)
// Divide this by the timestamp frequency to get real time:
double timeDiff = timestampDiff/(double)timestampFrequency;
// Add this to the 'sync time' to get our result:
unsigned const million = 1000000;
unsigned seconds, uSeconds;
if (timeDiff >= 0.0)
{
// 核心算法
seconds = fSyncTime.tv_sec + (unsigned)(timeDiff);
uSeconds = fSyncTime.tv_usec + (unsigned)((timeDiff - (unsigned)timeDiff)*million);
if (uSeconds >= million)
{
uSeconds -= million;
++seconds;
}
}
else
{
timeDiff = -timeDiff;
seconds = fSyncTime.tv_sec - (unsigned)(timeDiff);
uSeconds = fSyncTime.tv_usec - (unsigned)((timeDiff - (unsigned)timeDiff)*million);
if ((int)uSeconds < 0)
{
uSeconds += million;
--seconds;
}
}
resultPresentationTime.tv_sec = seconds;
resultPresentationTime.tv_usec = uSeconds;
resultHasBeenSyncedUsingRTCP = fHasBeenSynchronized;
// Save these as the new synchronization timestamp & time:
fSyncTimestamp = rtpTimestamp;
fSyncTime = resultPresentationTime;
fPreviousPacketRTPTimestamp = rtpTimestamp;
}
輸入rtpTimestamp
等,輸出struct timeval& resultPresentationTime
珍剑。
RTPReceptionStats::noteIncomingPacket() 的調(diào)用堆棧
RTPReceptionStats::noteIncomingPacket()
RTPReceptionStatsDB::noteIncomingPacket()
MultiFramedRTPSource::networkReadHandler1()
MultiFramedRTPSource::networkReadHandler()
SocketDescriptor::tcpReadHandler1(int mask, bool callAgain)
SocketDescriptor::tcpReadHandler()
BasicTaskScheduler::SingleStep(unsigned int maxDelayTime)
BasicTaskScheduler0::doEventLoop(volatile char * watchVariable)
MultiFramedRTPSource
class FramedSource: public MediaSource {
...
struct timeval fPresentationTime; // out
...
};
class RTPSource: public FramedSource {
...
}
class MultiFramedRTPSource: public RTPSource {
...
static void networkReadHandler(MultiFramedRTPSource* source, int /*mask*/);
void networkReadHandler1();
};
class BufferedPacket {
...
struct timeval fPresentationTime; // corresponding to "fRTPTimestamp"
...
};
MultiFramedRTPSource::networkReadHandler1()
void MultiFramedRTPSource::networkReadHandler1() {
BufferedPacket* bPacket = fPacketReadInProgress;
if (bPacket == NULL) {
// Normal case: Get a free BufferedPacket descriptor to hold the new network packet:
bPacket = fReorderingBuffer->getFreePacket(this);
}
...
struct timeval presentationTime; // computed by:
Boolean hasBeenSyncedUsingRTCP; // computed by:
// 此函數(shù)中調(diào)用RTPReceptionStats::noteIncomingPacket()
// 生成的時(shí)間保存在presentationTime
receptionStatsDB().noteIncomingPacket(rtpSSRC,
rtpSeqNo,
rtpTimestamp,
timestampFrequency(),
usableInJitterCalculation,
presentationTime,
hasBeenSyncedUsingRTCP, bPacket->dataSize());
// Fill in the rest of the packet descriptor, and store it:
struct timeval timeNow;
gettimeofday(&timeNow, NULL);
// 將presentationTime保存在BufferedPacket的fPresentationTime中
bPacket->assignMiscParams(rtpSeqNo,
rtpTimestamp,
presentationTime,
hasBeenSyncedUsingRTCP,
rtpMarkerBit,
timeNow);
...
// doGetNextFrame1中調(diào)用BufferedPacket::use將保存在BufferedPacket中的fPresentationTime
// 賦值給FramedSource的fPresentationTime
doGetNextFrame1();
// If we didn't get proper data this time, we'll get another chance
}
S1:
RTPReceptionStats::noteIncomingPacket()
獲取resultPresentationTime
掸宛。S2:
BufferedPacket::assignMiscParams()
將resultPresentationTime
賦值給BufferedPacket
的fPresentationTime
。
void BufferedPacket::assignMiscParams(unsigned short rtpSeqNo,
unsigned rtpTimestamp,
struct timeval presentationTime,
Boolean hasBeenSyncedUsingRTCP,
Boolean rtpMarkerBit,
struct timeval timeReceived)
{
fRTPSeqNo = rtpSeqNo;
fRTPTimestamp = rtpTimestamp;
fPresentationTime = presentationTime;
fHasBeenSyncedUsingRTCP = hasBeenSyncedUsingRTCP;
fRTPMarkerBit = rtpMarkerBit;
fTimeReceived = timeReceived;
}
- S3:
doGetNextFrame1()
將保存在BufferedPacket
中的fPresentationTime
賦值給FramedSource
的fPresentationTime
招拙。
void MultiFramedRTPSource::doGetNextFrame1()
{
while (fNeedDelivery)
{
// If we already have packet data available, then deliver it now.
Boolean packetLossPrecededThis;
BufferedPacket* nextPacket
= fReorderingBuffer->getNextCompletedPacket(packetLossPrecededThis);
if (nextPacket == NULL) break;
...
// The packet is usable. Deliver all or part of it to our caller:
unsigned frameSize;
nextPacket->use(fTo,
fMaxSize,
frameSize,
fNumTruncatedBytes,
fCurPacketRTPSeqNum,
fCurPacketRTPTimestamp,
fPresentationTime, // 時(shí)間戳
fCurPacketHasBeenSynchronizedUsingRTCP,
fCurPacketMarkerBit);
...
}
}
void BufferedPacket::use(unsigned char* to,
unsigned toSize,
unsigned& bytesUsed,
unsigned& bytesTruncated,
unsigned short& rtpSeqNo,
unsigned& rtpTimestamp,
struct timeval& presentationTime, // out
Boolean& hasBeenSyncedUsingRTCP,
Boolean& rtpMarkerBit)
{
...
rtpTimestamp = fRTPTimestamp;
presentationTime = fPresentationTime; // 賦值
...
}
H264or5VideoFileSink::afterGettingFrame調(diào)用堆棧
H264or5VideoFileSink::afterGettingFrame(unsigned int frameSize, unsigned int numTruncatedBytes, timeval presentationTime)
FileSink::afterGettingFrame(void * clientData, unsigned int frameSize, unsigned int numTruncatedBytes, timeval presentationTime, unsigned int __formal)
FramedSource::afterGetting(FramedSource * source)
MultiFramedRTPSource::doGetNextFrame1()
MultiFramedRTPSource::networkReadHandler1()
MultiFramedRTPSource::networkReadHandler(MultiFramedRTPSource * source, int __formal)
SocketDescriptor::tcpReadHandler1(int mask)
SocketDescriptor::tcpReadHandler(SocketDescriptor * socketDescriptor, int mask)
BasicTaskScheduler::SingleStep(unsigned int maxDelayTime)
BasicTaskScheduler0::doEventLoop(volatile char * watchVariable)
FramedSource::afterGetting()
此函數(shù)中將FramedSource
的fPresentationTime
傳給FileSink::afterGettingFrame
唧瘾。
即將Source(生產(chǎn)者)的視音頻數(shù)據(jù)的buffer、數(shù)據(jù)大小别凤、時(shí)間戳等傳給Sink(消費(fèi)者)饰序。
void FramedSource::afterGetting(FramedSource* source) {
source->fIsCurrentlyAwaitingData = False;
// indicates that we can be read again
// Note that this needs to be done here, in case the "fAfterFunc"
// called below tries to read another frame (which it usually will)
// source->fPresentationTime即是FramedSource的fPresentationTime
// fPresentationTime由此傳入
if (source->fAfterGettingFunc != NULL) {
(*(source->fAfterGettingFunc))(source->fAfterGettingClientData,
source->fFrameSize, source->fNumTruncatedBytes,
source->fPresentationTime,
source->fDurationInMicroseconds);
}
}
FileSink::afterGettingFrame()
void FileSink::afterGettingFrame(void* clientData,
unsigned frameSize,
unsigned numTruncatedBytes,
struct timeval presentationTime,
unsigned /*durationInMicroseconds*/)
{
FileSink* sink = (FileSink*)clientData;
sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime);
}