bool ViEChannel::ChannelDecodeThreadFunction
bool ViEChannel::ChannelDecodeProcess()
int32_t Decode(uint16_t maxWaitTimeMs)
int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs)
VCMEncodedFrame* VCMReceiver::FrameForDecoding
VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode
void VCMJitterBuffer::UpdateJitterEstimate
void VCMJitterBuffer::UpdateJitterEstimate
VCMInterFrameDelay::CalculateDelay
// Calculates the delay of a frame with the given timestamp.
// This method is called when the frame is complete.
bool
VCMInterFrameDelay::CalculateDelay(uint32_t timestamp,
int64_t *delay,
int64_t currentWallClock)
{
if (_prevWallClock == 0)
{
// First set of data, initialization, wait for next frame
_prevWallClock = currentWallClock;
_prevTimestamp = timestamp;
*delay = 0;
return true;
}
int32_t prevWrapArounds = _wrapArounds;
CheckForWrapArounds(timestamp);
// This will be -1 for backward wrap arounds and +1 for forward wrap arounds
int32_t wrapAroundsSincePrev = _wrapArounds - prevWrapArounds;
// Account for reordering in jitter variance estimate in the future?
// Note that this also captures incomplete frames which are grabbed
// for decoding after a later frame has been complete, i.e. real
// packet losses.
if ((wrapAroundsSincePrev == 0 && timestamp < _prevTimestamp) || wrapAroundsSincePrev < 0)
{
*delay = 0;
return false;
}
// Compute the compensated timestamp difference and convert it to ms and
// round it to closest integer.
_dTS = static_cast<int64_t>((timestamp + wrapAroundsSincePrev *
(static_cast<int64_t>(1)<<32) - _prevTimestamp) / 90.0 + 0.5);
// frameDelay is the difference of dT and dTS -- i.e. the difference of
// the wall clock time difference and the timestamp difference between
// two following frames.
*delay = static_cast<int64_t>(currentWallClock - _prevWallClock - _dTS);
_prevTimestamp = timestamp;
_prevWallClock = currentWallClock;
return true;
}
currentWallClock:此幀最后一個(gè)包到達(dá)時(shí)間戳
_prevWallClock :前一幀最后一個(gè)包到達(dá)時(shí)間戳
timestamp:當(dāng)前幀時(shí)間戳
_prevTimestamp :前一幀時(shí)間戳
_wrapArounds:表示時(shí)間戳有沒(méi)有從2^32-1跳到1,即一個(gè)循環(huán)趾娃。
可以看出系吩,CalculateDelay()這個(gè)函數(shù)主要目標(biāo)是計(jì)算出上一幀最後一個(gè)包到當(dāng)前幀最后一個(gè)包的時(shí)間差懊缺。即相當(dāng)于傳輸當(dāng)前幀所耗費(fèi)的時(shí)間宜肉。
// Updates the estimates with the new measurements
void
VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS, uint32_t frameSizeBytes,
bool incompleteFrame /* = false */)
{
if (frameSizeBytes == 0)
{
return;
}
int deltaFS = frameSizeBytes - _prevFrameSize;
if (_fsCount < kFsAccuStartupSamples)
{
_fsSum += frameSizeBytes;
_fsCount++;
}
else if (_fsCount == kFsAccuStartupSamples)
{
// Give the frame size filter
_avgFrameSize = static_cast<double>(_fsSum) /
static_cast<double>(_fsCount);
_fsCount++;
}
if (!incompleteFrame || frameSizeBytes > _avgFrameSize)
{
double avgFrameSize = _phi * _avgFrameSize +
(1 - _phi) * frameSizeBytes;
if (frameSizeBytes < _avgFrameSize + 2 * sqrt(_varFrameSize))
{
// Only update the average frame size if this sample wasn't a
// key frame
_avgFrameSize = avgFrameSize;
}
// Update the variance anyway since we want to capture cases where we only get
// key frames.
_varFrameSize = VCM_MAX(_phi * _varFrameSize + (1 - _phi) *
(frameSizeBytes - avgFrameSize) *
(frameSizeBytes - avgFrameSize), 1.0);
}
// Update max frameSize estimate
_maxFrameSize = VCM_MAX(_psi * _maxFrameSize, static_cast<double>(frameSizeBytes));
if (_prevFrameSize == 0)
{
_prevFrameSize = frameSizeBytes;
return;
}
_prevFrameSize = frameSizeBytes;
// Only update the Kalman filter if the sample is not considered
// an extreme outlier. Even if it is an extreme outlier from a
// delay point of view, if the frame size also is large the
// deviation is probably due to an incorrect line slope.
double deviation = DeviationFromExpectedDelay(frameDelayMS, deltaFS);
if (fabs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
frameSizeBytes > _avgFrameSize + _numStdDevFrameSizeOutlier * sqrt(_varFrameSize))
{
// Update the variance of the deviation from the
// line given by the Kalman filter
EstimateRandomJitter(deviation, incompleteFrame);
// Prevent updating with frames which have been congested by a large
// frame, and therefore arrives almost at the same time as that frame.
// This can occur when we receive a large frame (key frame) which
// has been delayed. The next frame is of normal size (delta frame),
// and thus deltaFS will be << 0. This removes all frame samples
// which arrives after a key frame.
if ((!incompleteFrame || deviation >= 0.0) &&
static_cast<double>(deltaFS) > - 0.25 * _maxFrameSize)
{
// Update the Kalman filter with the new data
KalmanEstimateChannel(frameDelayMS, deltaFS);
}
}
else
{
int nStdDev = (deviation >= 0) ? _numStdDevDelayOutlier : -_numStdDevDelayOutlier;
EstimateRandomJitter(nStdDev * sqrt(_varNoise), incompleteFrame);
}
// Post process the total estimated jitter
if (_startupCount >= kStartupDelaySamples)
{
PostProcessEstimate();
}
else
{
_startupCount++;
}
}
獲得jitterMS,并設(shè)置到渲染赁酝,以使得平穩(wěn)的顯示葵姥。
bool ViEChannel::ChannelDecodeProcess()
int32_t Decode(uint16_t maxWaitTimeMs)
int32_t VideoReceiver::Decode
VCMEncodedFrame* VCMReceiver::FrameForDecoding
uint32_t VCMJitterBuffer::EstimatedJitterMs()
int VCMJitterEstimator::GetJitterEstimate(double rttMultiplier)
// We have a frame - Set timing and render timestamp.
timing_->SetJitterDelay(jitter_buffer_.EstimatedJitterMs());
bool ViEChannel::ChannelDecodeProcess()
int32_t Decode(uint16_t maxWaitTimeMs)
int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs)
void VCMTiming::UpdateCurrentDelay
void VCMTiming::UpdateCurrentDelay(int64_t render_time_ms,
int64_t actual_decode_time_ms) {
CriticalSectionScoped cs(crit_sect_);
uint32_t target_delay_ms = TargetDelayInternal();
int64_t delayed_ms = actual_decode_time_ms -
(render_time_ms - MaxDecodeTimeMs() - render_delay_ms_);
if (delayed_ms < 0) {
return;
}
if (current_delay_ms_ + delayed_ms <= target_delay_ms) {
current_delay_ms_ += static_cast<uint32_t>(delayed_ms);
} else {
current_delay_ms_ = target_delay_ms;
}
}
uint32_t VCMTiming::TargetDelayInternal() const {
return std::max(min_playout_delay_ms_,
jitter_delay_ms_ + MaxDecodeTimeMs() + render_delay_ms_);
}
這里通過(guò)計(jì)算wait_time_ms ,得出取出下一幀需要等待的時(shí)間。
uint32_t wait_time_ms = timing_->MaxWaitingTime(
next_render_time_ms, clock_->TimeInMilliseconds());
uint32_t VCMTiming::MaxWaitingTime(int64_t render_time_ms, int64_t now_ms)
const {
CriticalSectionScoped cs(crit_sect_);
const int64_t max_wait_time_ms = render_time_ms - now_ms -
MaxDecodeTimeMs() - render_delay_ms_;
if (max_wait_time_ms < 0) {
return 0;
}
return static_cast<uint32_t>(max_wait_time_ms);
}
獲得next_render_time_ms 琅豆,這里綜合考慮了jitter_delay+decode_delay+render_delay
next_render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms);
int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms)
const {
CriticalSectionScoped cs(crit_sect_);
const int64_t render_time_ms = RenderTimeMsInternal(frame_timestamp, now_ms);
return render_time_ms;
}
int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
int64_t now_ms) const {
int64_t estimated_complete_time_ms =
ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp);
if (estimated_complete_time_ms == -1) {
estimated_complete_time_ms = now_ms;
}
// Make sure that we have at least the playout delay.
uint32_t actual_delay = std::max(current_delay_ms_, min_playout_delay_ms_);
return estimated_complete_time_ms + actual_delay;
}