參考 webrtc 的實(shí)現(xiàn)
int Scaler::Scale(const VideoFrame& src_frame, VideoFrame* dst_frame) {
assert(dst_frame);
if (src_frame.IsZeroSize())
return -1;
if (!set_)
return -2;
// Making sure that destination frame is of sufficient size.
dst_frame->set_video_frame_buffer(
buffer_pool_.CreateBuffer(dst_width_, dst_height_));
// We want to preserve aspect ratio instead of stretching the frame.
// Therefore, we need to crop the source frame. Calculate the largest center
// aligned region of the source frame that can be used.
const int cropped_src_width =
std::min(src_width_, dst_width_ * src_height_ / dst_height_);
const int cropped_src_height =
std::min(src_height_, dst_height_ * src_width_ / dst_width_);
// Make sure the offsets are even to avoid rounding errors for the U/V planes.
const int src_offset_x = ((src_width_ - cropped_src_width) / 2) & ~1;
const int src_offset_y = ((src_height_ - cropped_src_height) / 2) & ~1;
const uint8_t* y_ptr = src_frame.buffer(kYPlane) +
src_offset_y * src_frame.stride(kYPlane) +
src_offset_x;
const uint8_t* u_ptr = src_frame.buffer(kUPlane) +
src_offset_y / 2 * src_frame.stride(kUPlane) +
src_offset_x / 2;
const uint8_t* v_ptr = src_frame.buffer(kVPlane) +
src_offset_y / 2 * src_frame.stride(kVPlane) +
src_offset_x / 2;
return libyuv::I420Scale(y_ptr,
src_frame.stride(kYPlane),
u_ptr,
src_frame.stride(kUPlane),
v_ptr,
src_frame.stride(kVPlane),
cropped_src_width, cropped_src_height,
dst_frame->buffer(kYPlane),
dst_frame->stride(kYPlane),
dst_frame->buffer(kUPlane),
dst_frame->stride(kUPlane),
dst_frame->buffer(kVPlane),
dst_frame->stride(kVPlane),
dst_width_, dst_height_,
libyuv::FilterMode(method_));
}