分类: LINUX
2018-07-16 17:48:33
RTSP流中实际传输音频或视频数据为一个个RTP包,每个RTP包的Header的第5个到第8个字节为RTP Timestamp(时间戳),是个32bit的整数。
而live555中类似H264or5VideoFileSink::afterGettingFrame(接收每帧数据)函数中:
void H264or5VideoFileSink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime)
{
...
}
实际接收到的时间戳为 struct timeval presentationTime,
其中:
struct timeval { long tv_sec; /* seconds */ long tv_usec; /* and microseconds */ };
举例示意如下:
转换前: rtpTimestamp:439803124 转换后: presentationTime.tv_sec: 1482476415 presentationTime.tv_usec:183008
在live555中,两者是如何实现转换的呢?
在liveMedia\RTPSource.cpp的RTPReceptionStats::noteIncomingPacket函数中实现这一转换。
RTPReceptionStats::noteIncomingPacketvoid RTPReceptionStats::noteIncomingPacket(u_int16_t seqNum, u_int32_t rtpTimestamp, unsigned timestampFrequency,
Boolean useForJitterCalculation,
struct timeval& resultPresentationTime,
Boolean& resultHasBeenSyncedUsingRTCP, unsigned packetSize)
{
... // Record the inter-packet delay struct timeval timeNow; gettimeofday(&timeNow, NULL);
...
fLastPacketReceptionTime = timeNow;
... // Return the 'presentation time' that corresponds to "rtpTimestamp": if (fSyncTime.tv_sec == 0 && fSyncTime.tv_usec == 0)
{ // 第一个时间戳 // 用当前系统时间作为同步时刻 // 后续将会根据接收到的RTCP SRs进行校正. fSyncTimestamp = rtpTimestamp;
fSyncTime = timeNow;
} int timestampDiff = rtpTimestamp - fSyncTimestamp; // Note: This works even if the timestamp wraps around // (as long as "int" is 32 bits) // Divide this by the timestamp frequency to get real time: double timeDiff = timestampDiff/(double)timestampFrequency; // Add this to the 'sync time' to get our result: unsigned const million = 1000000; unsigned seconds, uSeconds; if (timeDiff >= 0.0)
{ // 核心算法 seconds = fSyncTime.tv_sec + (unsigned)(timeDiff);
uSeconds = fSyncTime.tv_usec + (unsigned)((timeDiff - (unsigned)timeDiff)*million); if (uSeconds >= million)
{
uSeconds -= million;
++seconds;
}
} else {
timeDiff = -timeDiff;
seconds = fSyncTime.tv_sec - (unsigned)(timeDiff);
uSeconds = fSyncTime.tv_usec - (unsigned)((timeDiff - (unsigned)timeDiff)*million); if ((int)uSeconds < 0)
{
uSeconds += million;
--seconds;
}
}
resultPresentationTime.tv_sec = seconds;
resultPresentationTime.tv_usec = uSeconds;
resultHasBeenSyncedUsingRTCP = fHasBeenSynchronized; // Save these as the new synchronization timestamp & time: fSyncTimestamp = rtpTimestamp;
fSyncTime = resultPresentationTime;
fPreviousPacketRTPTimestamp = rtpTimestamp;
}
输入rtpTimestamp等,输出struct timeval& resultPresentationTime。
RTPReceptionStats::noteIncomingPacket() 的调用堆栈 RTPReceptionStats::noteIncomingPacket()
RTPReceptionStatsDB::noteIncomingPacket()
MultiFramedRTPSource::networkReadHandler1()
MultiFramedRTPSource::networkReadHandler()
SocketDescriptor::tcpReadHandler1(int mask, bool callAgain)
SocketDescriptor::tcpReadHandler()
BasicTaskScheduler::SingleStep(unsigned int maxDelayTime)
BasicTaskScheduler0::doEventLoop(volatile char * watchVariable)
MultiFramedRTPSource
class FramedSource: public MediaSource {
... struct timeval fPresentationTime; // out ...
}; class RTPSource: public FramedSource {
...
} class MultiFramedRTPSource: public RTPSource {
... static void networkReadHandler(MultiFramedRTPSource* source, int /*mask*/); void networkReadHandler1();
}; class BufferedPacket { ... struct timeval fPresentationTime; // corresponding to "fRTPTimestamp" ...
};
MultiFramedRTPSource::networkReadHandler1()
void MultiFramedRTPSource::networkReadHandler1() {
BufferedPacket* bPacket = fPacketReadInProgress; if (bPacket == NULL) { // Normal case: Get a free BufferedPacket descriptor to hold the new network packet: bPacket = fReorderingBuffer->getFreePacket(this);
}
... struct timeval presentationTime; // computed by: Boolean hasBeenSyncedUsingRTCP; // computed by: // 此函数中调用RTPReceptionStats::noteIncomingPacket() // 生成的时间保存在presentationTime receptionStatsDB().noteIncomingPacket(rtpSSRC,
rtpSeqNo,
rtpTimestamp,
timestampFrequency(),
usableInJitterCalculation,
presentationTime,
hasBeenSyncedUsingRTCP, bPacket->dataSize()); // Fill in the rest of the packet descriptor, and store it: struct timeval timeNow; gettimeofday(&timeNow, NULL); // 将presentationTime保存在BufferedPacket的fPresentationTime中 bPacket->assignMiscParams(rtpSeqNo,
rtpTimestamp,
presentationTime,
hasBeenSyncedUsingRTCP,
rtpMarkerBit,
timeNow);
... // doGetNextFrame1中调用BufferedPacket::use将保存在BufferedPacket中的fPresentationTime // 赋值给FramedSource的fPresentationTime doGetNextFrame1(); // If we didn't get proper data this time, we'll get another chance }
S1: RTPReceptionStats::noteIncomingPacket()
获取resultPresentationTime。
S2: BufferedPacket::assignMiscParams()
将resultPresentationTime赋值给BufferedPacket的fPresentationTime。
void BufferedPacket::assignMiscParams(unsigned short rtpSeqNo, unsigned rtpTimestamp, struct timeval presentationTime,
Boolean hasBeenSyncedUsingRTCP,
Boolean rtpMarkerBit, struct timeval timeReceived)
{
fRTPSeqNo = rtpSeqNo;
fRTPTimestamp = rtpTimestamp;
fPresentationTime = presentationTime;
fHasBeenSyncedUsingRTCP = hasBeenSyncedUsingRTCP;
fRTPMarkerBit = rtpMarkerBit;
fTimeReceived = timeReceived;
}
void MultiFramedRTPSource::doGetNextFrame1()
{ while (fNeedDelivery)
{ // If we already have packet data available, then deliver it now. Boolean packetLossPrecededThis;
BufferedPacket* nextPacket
= fReorderingBuffer->getNextCompletedPacket(packetLossPrecededThis); if (nextPacket == NULL) break;
... // The packet is usable. Deliver all or part of it to our caller: unsigned frameSize;
nextPacket->use(fTo,
fMaxSize,
frameSize,
fNumTruncatedBytes,
fCurPacketRTPSeqNum,
fCurPacketRTPTimestamp,
fPresentationTime, // 时间戳 fCurPacketHasBeenSynchronizedUsingRTCP,
fCurPacketMarkerBit);
...
}
}
void BufferedPacket::use(unsigned char* to, unsigned toSize, unsigned& bytesUsed, unsigned& bytesTruncated, unsigned short& rtpSeqNo, unsigned& rtpTimestamp,
struct timeval& presentationTime, // out Boolean& hasBeenSyncedUsingRTCP,
Boolean& rtpMarkerBit)
{
...
rtpTimestamp = fRTPTimestamp;
presentationTime = fPresentationTime; // 赋值 ...
}
H264or5VideoFileSink::afterGettingFrame调用堆栈
H264or5VideoFileSink::afterGettingFrame(unsigned int frameSize, unsigned int numTruncatedBytes, timeval presentationTime)
FileSink::afterGettingFrame(void * clientData, unsigned int frameSize, unsigned int numTruncatedBytes, timeval presentationTime, unsigned int __formal)
FramedSource::afterGetting(FramedSource * source)
MultiFramedRTPSource::doGetNextFrame1()
MultiFramedRTPSource::networkReadHandler1()
MultiFramedRTPSource::networkReadHandler(MultiFramedRTPSource * source, int __formal)
SocketDescriptor::tcpReadHandler1(int mask)
SocketDescriptor::tcpReadHandler(SocketDescriptor * socketDescriptor, int mask)
BasicTaskScheduler::SingleStep(unsigned int maxDelayTime)
BasicTaskScheduler0::doEventLoop(volatile char * watchVariable)
FramedSource::afterGetting()
此函数中将FramedSource的fPresentationTime传给FileSink::afterGettingFrame。
即将Source(生产者)的视音频数据的buffer、数据大小、时间戳等传给Sink(消费者)。
void FramedSource::afterGetting(FramedSource* source) {
source->fIsCurrentlyAwaitingData = False; // indicates that we can be read again // Note that this needs to be done here, in case the "fAfterFunc" // called below tries to read another frame (which it usually will) // source->fPresentationTime即是FramedSource的fPresentationTime // fPresentationTime由此传入 if (source->fAfterGettingFunc != NULL) {
(*(source->fAfterGettingFunc))(source->fAfterGettingClientData,
source->fFrameSize, source->fNumTruncatedBytes,
source->fPresentationTime,
source->fDurationInMicroseconds);
}
}
FileSink::afterGettingFrame()
void FileSink::afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned /*durationInMicroseconds*/)
{
FileSink* sink = (FileSink*)clientData;
sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime);
}