Webrtc-分辨率切换逻辑

449 阅读8分钟

Webrtc-分辨率切换逻辑

最大像素更新过程

1.收到资源使用状况变化通知
void ResourceAdaptationProcessor::OnResourceUsageStateMeasured(
    rtc::scoped_refptr<Resource> resource,
    ResourceUsageState usage_state) {
  MitigationResultAndLogMessage result_and_message;
  switch (usage_state) {
    case ResourceUsageState::kOveruse:
      result_and_message = OnResourceOveruse(resource);
      break;
    case ResourceUsageState::kUnderuse:
      result_and_message = OnResourceUnderuse(resource);
      break;
  }
}
2.处理资源占用过高情况

ResourceAdaptationProcessor::OnResourceOveruse(
    rtc::scoped_refptr<Resource> reason_resource) {
  RTC_DCHECK_RUN_ON(task_queue_);
  // How can this stream be adapted up?
  // 获取向下调整适配策略
  Adaptation adaptation = stream_adapter_->GetAdaptationDown();
  UpdateResourceLimitations(reason_resource, adaptation.restrictions(),
                            adaptation.counters());
  // 应用适配策略
  stream_adapter_->ApplyAdaptation(adaptation, reason_resource);
}
3.获取向下调整适配策略

Adaptation VideoStreamAdapter::GetAdaptationDown() {
  RTC_DCHECK_RUN_ON(&sequence_checker_);
  VideoStreamInputState input_state = input_state_provider_->InputState();
  ++adaptation_validation_id_;
  //  根据配置的降级策略,获取降级后的分辨率和帧率
  RestrictionsOrState restrictions_or_state =
      GetAdaptationDownStep(input_state, current_restrictions_);
  if (MinPixelLimitReached(input_state)) {
    encoder_stats_observer_->OnMinPixelLimitReached();
  }
  // Check for min_fps
  if (degradation_preference_ == DegradationPreference::BALANCED &&
      absl::holds_alternative<RestrictionsWithCounters>(
          restrictions_or_state)) {
    restrictions_or_state = AdaptIfFpsDiffInsufficient(
        input_state,
        absl::get<RestrictionsWithCounters>(restrictions_or_state));
  }
  return RestrictionsOrStateToAdaptation(restrictions_or_state, input_state);
}
4.根据降级策略,获取分辨率和帧率

VideoStreamAdapter::GetAdaptationDownStep(
    const VideoStreamInputState& input_state,
    const RestrictionsWithCounters& current_restrictions) const {
  switch (degradation_preference_) {
    case DegradationPreference::BALANCED: {
      // Try scale down framerate, if lower.
      RestrictionsOrState decrease_frame_rate =
          DecreaseFramerate(input_state, current_restrictions);
      if (absl::holds_alternative<RestrictionsWithCounters>(
              decrease_frame_rate)) {
        return decrease_frame_rate;
      }
      // else, decrease resolution.
      // 进入下一个case语句
      [[fallthrough]];
    }
    case DegradationPreference::MAINTAIN_FRAMERATE: {
      return DecreaseResolution(input_state, current_restrictions);
    }
    case DegradationPreference::MAINTAIN_RESOLUTION: {
      return DecreaseFramerate(input_state, current_restrictions);
    }
    case DegradationPreference::DISABLED:
      return Adaptation::Status::kAdaptationDisabled;
  }
}
5.获取降低后的分辨率

在webtrc中没有直接算出分辨率,而是以总的像素大小来衡量分辨率大小,降低后的总像素大小是之前的3/5。

VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseResolution(
    const VideoStreamInputState& input_state,
    const RestrictionsWithCounters& current_restrictions) {
  // 目标像素值是当前的3/5
  int target_pixels =
      GetLowerResolutionThan(input_state.frame_size_pixels().value());
  // Use single active stream if set, this stream could be lower than the input.
  int target_pixels_min =
      GetLowerResolutionThan(input_state.single_active_stream_pixels().value_or(
          input_state.frame_size_pixels().value()));
  if (!CanDecreaseResolutionTo(target_pixels, target_pixels_min, input_state,
                               current_restrictions.restrictions)) {
    return Adaptation::Status::kLimitReached;
  }
  RestrictionsWithCounters new_restrictions = current_restrictions;
  // 设置最大期望像素值
  new_restrictions.restrictions.set_max_pixels_per_frame(
      target_pixels != std::numeric_limits<int>::max()
          ? absl::optional<size_t>(target_pixels)
          : absl::nullopt);
  new_restrictions.restrictions.set_target_pixels_per_frame(absl::nullopt);
  ++new_restrictions.counters.resolution_adaptations;
  return new_restrictions;
}
​
int GetLowerResolutionThan(int pixel_count) {
  RTC_DCHECK(pixel_count != std::numeric_limits<int>::max());
  return (pixel_count * 3) / 5;
}
6.更新帧适配器的像素大小

void VideoStreamAdapter::ApplyAdaptation(
    const Adaptation& adaptation,
    rtc::scoped_refptr<Resource> resource) {
  RTC_DCHECK_RUN_ON(&sequence_checker_);
  RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_);
  if (adaptation.status() != Adaptation::Status::kValid)
    return;
  // 校验是否需要进行分辨率的调整
  if (DidIncreaseResolution(current_restrictions_.restrictions,
                            adaptation.restrictions())) {
    awaiting_frame_size_change_.emplace(
        true, adaptation.input_state().frame_size_pixels().value());
  } else if (DidDecreaseResolution(current_restrictions_.restrictions,
                                   adaptation.restrictions())) {
    awaiting_frame_size_change_.emplace(
        false, adaptation.input_state().frame_size_pixels().value());
  } else {
    awaiting_frame_size_change_ = absl::nullopt;
  }
  current_restrictions_ = {adaptation.restrictions(), adaptation.counters()};
  BroadcastVideoRestrictionsUpdate(adaptation.input_state(), resource);
}
​
void VideoStreamAdapter::BroadcastVideoRestrictionsUpdate(
    const VideoStreamInputState& input_state,
    const rtc::scoped_refptr<Resource>& resource) {
  // 根据配置的降级策略,再次将需要保持的分辨率或帧率调整为不处理
  VideoSourceRestrictions filtered = FilterRestrictionsByDegradationPreference(
      source_restrictions(), degradation_preference_);
  if (last_filtered_restrictions_ == filtered) {
    return;
  }
  for (auto* restrictions_listener : restrictions_listeners_) {
    restrictions_listener->OnVideoSourceRestrictionsUpdated(
        filtered, current_restrictions_.counters, resource,
        source_restrictions());
  }
  last_video_source_restrictions_ = current_restrictions_.restrictions;
  last_filtered_restrictions_ = filtered;
}
​
void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated(
    VideoSourceRestrictions restrictions,
    const VideoAdaptationCounters& adaptation_counters,
    rtc::scoped_refptr<Resource> reason,
    const VideoSourceRestrictions& unfiltered_restrictions) {
  latest_restrictions_ = restrictions;
​
  worker_queue_->PostTask(SafeTask(
      task_safety_.flag(), [this, restrictions = std::move(restrictions)]() {
        RTC_DCHECK_RUN_ON(worker_queue_);
        video_source_sink_controller_.SetRestrictions(std::move(restrictions));
        video_source_sink_controller_.PushSourceSinkSettings();
      }));
}
​
void VideoSourceSinkController::PushSourceSinkSettings() {
  RTC_DCHECK_RUN_ON(&sequence_checker_);
  if (!source_)
    return;
  // 更新期望的分辨率与帧率
  rtc::VideoSinkWants wants = CurrentSettingsToSinkWants();
  source_->AddOrUpdateSink(sink_, wants);
}
​
rtc::VideoSinkWants VideoSourceSinkController::CurrentSettingsToSinkWants()
    const {
  rtc::VideoSinkWants wants;
  wants.rotation_applied = rotation_applied_;
  // `wants.black_frames` is not used, it always has its default value false.
  wants.max_pixel_count =
      rtc::dchecked_cast<int>(restrictions_.max_pixels_per_frame().value_or(
          std::numeric_limits<int>::max()));
  wants.target_pixel_count =
      restrictions_.target_pixels_per_frame().has_value()
          ? absl::optional<int>(rtc::dchecked_cast<int>(
                restrictions_.target_pixels_per_frame().value()))
          : absl::nullopt;
  wants.max_framerate_fps =
      restrictions_.max_frame_rate().has_value()
          ? static_cast<int>(restrictions_.max_frame_rate().value())
          : std::numeric_limits<int>::max();
  wants.resolution_alignment = resolution_alignment_;
  wants.max_pixel_count =
      std::min(wants.max_pixel_count,
               rtc::dchecked_cast<int>(pixels_per_frame_upper_limit_.value_or(
                   std::numeric_limits<int>::max())));
  wants.max_framerate_fps =
      std::min(wants.max_framerate_fps,
               frame_rate_upper_limit_.has_value()
                   ? static_cast<int>(frame_rate_upper_limit_.value())
                   : std::numeric_limits<int>::max());
  wants.resolutions = resolutions_;
  wants.is_active = active_;
  wants.requested_resolution = requested_resolution_;
  return wants;
}
​
void VideoTrack::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
                                 const rtc::VideoSinkWants& wants) {
  RTC_DCHECK_RUN_ON(worker_thread_);
  VideoSourceBaseGuarded::AddOrUpdateSink(sink, wants);
  rtc::VideoSinkWants modified_wants = wants;
  modified_wants.black_frames = !enabled_w_;
  video_source_->internal()->AddOrUpdateSink(sink, modified_wants);
}
​
void AdaptedVideoTrackSource::AddOrUpdateSink(
    rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
    const rtc::VideoSinkWants& wants) {
  broadcaster_.AddOrUpdateSink(sink, wants);
  OnSinkWantsChanged(broadcaster_.wants());
}
​
void AdaptedVideoTrackSource::OnSinkWantsChanged(
    const rtc::VideoSinkWants& wants) {
  video_adapter_.OnSinkWants(wants);
}
​
void VideoAdapter::OnSinkWants(const rtc::VideoSinkWants& sink_wants) {
  webrtc::MutexLock lock(&mutex_);
  FileLog::trace();
  // 保存,后续适配过程使用
  resolution_request_max_pixel_count_ = sink_wants.max_pixel_count;
  resolution_request_target_pixel_count_ =
      sink_wants.target_pixel_count.value_or(
          resolution_request_max_pixel_count_);
  max_framerate_request_ = sink_wants.max_framerate_fps;
  resolution_alignment_ = cricket::LeastCommonMultiple(
      source_resolution_alignment_, sink_wants.resolution_alignment);
    .......
}
​
void VideoBroadcaster::AddOrUpdateSink(
    VideoSinkInterface<webrtc::VideoFrame>* sink,
    const VideoSinkWants& wants) {
  RTC_DCHECK(sink != nullptr);
  webrtc::MutexLock lock(&sinks_and_wants_lock_);
  if (!FindSinkPair(sink)) {
    // `Sink` is a new sink, which didn't receive previous frame.
    previous_frame_sent_to_all_sinks_ = false;
​
    if (last_constraints_.has_value()) {
      RTC_LOG(LS_INFO) << __func__ << " forwarding stored constraints min_fps "
                       << last_constraints_->min_fps.value_or(-1) << " max_fps "
                       << last_constraints_->max_fps.value_or(-1);
      sink->OnConstraintsChanged(*last_constraints_);
    }
  }
  VideoSourceBase::AddOrUpdateSink(sink, wants);
  // 更新分辨率
  UpdateWants();
}
​
void VideoBroadcaster::UpdateWants() {
  VideoSinkWants wants;
  wants.rotation_applied = false;
  wants.resolution_alignment = 1;
  wants.aggregates.emplace(VideoSinkWants::Aggregates());
  wants.is_active = false;
​
  bool ignore_inactive_encoders_old_api = false;
  for (auto& sink : sink_pairs()) {
    if (sink.wants.is_active && sink.wants.requested_resolution.has_value()) {
      ignore_inactive_encoders_old_api = true;
      break;
    }
  }
​
  for (auto& sink : sink_pairs()) {
    if (!sink.wants.is_active &&
        (sink.wants.requested_resolution || ignore_inactive_encoders_old_api)) {
      continue;
    }
    // wants.rotation_applied == ANY(sink.wants.rotation_applied)
    if (sink.wants.rotation_applied) {
      wants.rotation_applied = true;
    }
    // wants.max_pixel_count == MIN(sink.wants.max_pixel_count)
    if (sink.wants.max_pixel_count < wants.max_pixel_count) {
      wants.max_pixel_count = sink.wants.max_pixel_count;
    }
    // Select the minimum requested target_pixel_count, if any, of all sinks so
    // that we don't over utilize the resources for any one.
    // TODO(sprang): Consider using the median instead, since the limit can be
    // expressed by max_pixel_count.
    // 更新target_pixel_count
    if (sink.wants.target_pixel_count &&
        (!wants.target_pixel_count ||
         (*sink.wants.target_pixel_count < *wants.target_pixel_count))) {
      wants.target_pixel_count = sink.wants.target_pixel_count;
    }
    // Select the minimum for the requested max framerates.
    if (sink.wants.max_framerate_fps < wants.max_framerate_fps) {
      wants.max_framerate_fps = sink.wants.max_framerate_fps;
    }
    wants.resolution_alignment = cricket::LeastCommonMultiple(
        wants.resolution_alignment, sink.wants.resolution_alignment);
​
    // Pick MAX(requested_resolution) since the actual can be downscaled
    // in encoder instead.
    if (sink.wants.requested_resolution) {
      if (!wants.requested_resolution) {
        wants.requested_resolution = sink.wants.requested_resolution;
      } else {
        wants.requested_resolution->width =
            std::max(wants.requested_resolution->width,
                     sink.wants.requested_resolution->width);
        wants.requested_resolution->height =
            std::max(wants.requested_resolution->height,
                     sink.wants.requested_resolution->height);
      }
    } else if (sink.wants.is_active) {
      wants.aggregates->any_active_without_requested_resolution = true;
    }
​
    wants.is_active |= sink.wants.is_active;
  }
​
  if (wants.target_pixel_count &&
      *wants.target_pixel_count >= wants.max_pixel_count) {
    wants.target_pixel_count.emplace(wants.max_pixel_count);
  }
  current_wants_ = wants;
}

执行分辨率切换

在WebRtc采集到帧数据后,会调用adaptFrame进行帧数据适配,根据当前采集的分辨率与最大像素值大小得到合适的传输分辨率。

1.当采集到一帧数据后,进行帧适配

  public void onFrameCaptured(VideoFrame frame) {
    final VideoProcessor.FrameAdaptationParameters parameters =
        nativeAndroidVideoTrackSource.adaptFrame(frame);
    synchronized (videoProcessorLock) {
      if (videoProcessor != null) {
        videoProcessor.onFrameCaptured(frame, parameters);
        return;
      }
    }
​
    VideoFrame adaptedFrame = VideoProcessor.applyFrameAdaptationParameters(frame, parameters);
    if (adaptedFrame != null) {
      nativeAndroidVideoTrackSource.onFrameCaptured(adaptedFrame);
      adaptedFrame.release();
    }
  }
};
​
JNI_GENERATOR_EXPORT jobject Java_org_webrtc_NativeAndroidVideoTrackSource_nativeAdaptFrame(
    JNIEnv* env,
    jclass jcaller,
    jlong nativeAndroidVideoTrackSource,
    jint width,
    jint height,
    jint rotation,
    jlong timestampNs) {
  AndroidVideoTrackSource* native =
      reinterpret_cast<AndroidVideoTrackSource*>(nativeAndroidVideoTrackSource);
  CHECK_NATIVE_PTR(env, jcaller, native, "AdaptFrame", NULL);
  return native->AdaptFrame(env, width, height, rotation, timestampNs).Release();
}
​
ScopedJavaLocalRef<jobject> AndroidVideoTrackSource::AdaptFrame(
    JNIEnv* env,
    jint j_width,
    jint j_height,
    jint j_rotation,
    jlong j_timestamp_ns) {
  const VideoRotation rotation = jintToVideoRotation(j_rotation);
​
  const int64_t camera_time_us = j_timestamp_ns / rtc::kNumNanosecsPerMicrosec;
  const int64_t aligned_timestamp_ns =
      align_timestamps_ ? rtc::kNumNanosecsPerMicrosec *
                              timestamp_aligner_.TranslateTimestamp(
                                  camera_time_us, rtc::TimeMicros())
                        : j_timestamp_ns;
​
  int adapted_width = 0;
  int adapted_height = 0;
  int crop_width = 0;
  int crop_height = 0;
  int crop_x = 0;
  int crop_y = 0;
  bool drop;
​
  if (rotation % 180 == 0) {
    drop = !rtc::AdaptedVideoTrackSource::AdaptFrame(
        j_width, j_height, camera_time_us, &adapted_width, &adapted_height,
        &crop_width, &crop_height, &crop_x, &crop_y);
  } else {
    // Swap all width/height and x/y.
    drop = !rtc::AdaptedVideoTrackSource::AdaptFrame(
        j_height, j_width, camera_time_us, &adapted_height, &adapted_width,
        &crop_height, &crop_width, &crop_y, &crop_x);
  }
​
  return Java_NativeAndroidVideoTrackSource_createFrameAdaptationParameters(
      env, crop_x, crop_y, crop_width, crop_height, adapted_width,
      adapted_height, aligned_timestamp_ns, drop);
}
2.执行适配获取分辨率

bool AdaptedVideoTrackSource::AdaptFrame(int width,
                                         int height,
                                         int64_t time_us,
                                         int* out_width,
                                         int* out_height,
                                         int* crop_width,
                                         int* crop_height,
                                         int* crop_x,
                                         int* crop_y) {
  {
    webrtc::MutexLock lock(&stats_mutex_);
    stats_ = Stats{width, height};
  }
    // 没有想要的数据,不需要进行处理
  if (!broadcaster_.frame_wanted()) {
    return false;
  }
​
  if (!video_adapter_.AdaptFrameResolution(
          width, height, time_us * rtc::kNumNanosecsPerMicrosec, crop_width,
          crop_height, out_width, out_height)) {
    broadcaster_.OnDiscardedFrame();
    // VideoAdapter dropped the frame.
    return false;
  }
​
  *crop_x = (width - *crop_width) / 2;
  *crop_y = (height - *crop_height) / 2;
  return true;
}
​
​
​
bool VideoAdapter::AdaptFrameResolution(int in_width,
                                        int in_height,
                                        int64_t in_timestamp_ns,
                                        int* cropped_width,
                                        int* cropped_height,
                                        int* out_width,
                                        int* out_height) {
  webrtc::MutexLock lock(&mutex_);
  ++frames_in_;
  // The max output pixel count is the minimum of the requests from
  // OnOutputFormatRequest and OnResolutionFramerateRequest.
  int max_pixel_count = resolution_request_max_pixel_count_;
​
  // Select target aspect ratio and max pixel count depending on input frame
  // orientation.
  absl::optional<std::pair<int, int>> target_aspect_ratio;
  if (in_width > in_height) {
    target_aspect_ratio = output_format_request_.target_landscape_aspect_ratio;
    if (output_format_request_.max_landscape_pixel_count)
      max_pixel_count = std::min(
          max_pixel_count, *output_format_request_.max_landscape_pixel_count);
  } else {
    target_aspect_ratio = output_format_request_.target_portrait_aspect_ratio;
    if (output_format_request_.max_portrait_pixel_count)
      max_pixel_count = std::min(
          max_pixel_count, *output_format_request_.max_portrait_pixel_count);
  }
  // sink更新后的最大像素
  int target_pixel_count =
      std::min(resolution_request_target_pixel_count_, max_pixel_count);
​
  // Drop the input frame if necessary.
  if (max_pixel_count <= 0 || DropFrame(in_timestamp_ns)) {
    // Show VAdapt log every 90 frames dropped. (3 seconds)
    if ((frames_in_ - frames_out_) % 90 == 0) {
      // TODO(fbarchard): Reduce to LS_VERBOSE when adapter info is not needed
      // in default calls.
      RTC_LOG(LS_INFO) << "VAdapt Drop Frame: scaled " << frames_scaled_
                       << " / out " << frames_out_ << " / in " << frames_in_
                       << " Changes: " << adaption_changes_
                       << " Input: " << in_width << "x" << in_height
                       << " timestamp: " << in_timestamp_ns
                       << " Output fps: " << max_framerate_request_ << "/"
                       << output_format_request_.max_fps.value_or(-1)
                       << " alignment: " << resolution_alignment_;
    }
​
    // Drop frame.
    return false;
  }
​
  // Calculate how the input should be cropped.
  // 裁剪
  if (!target_aspect_ratio || target_aspect_ratio->first <= 0 ||
      target_aspect_ratio->second <= 0) {
    *cropped_width = in_width;
    *cropped_height = in_height;
  } else {
    const float requested_aspect =
        target_aspect_ratio->first /
        static_cast<float>(target_aspect_ratio->second);
    *cropped_width =
        std::min(in_width, static_cast<int>(in_height * requested_aspect));
    *cropped_height =
        std::min(in_height, static_cast<int>(in_width / requested_aspect));
  }
  // 根据像素大小的变化进行宽高缩放
  const Fraction scale =
      FindScale(*cropped_width, *cropped_height, target_pixel_count,
                max_pixel_count, variable_start_scale_factor_);
  // Adjust cropping slightly to get correctly aligned output size and a perfect
  // scale factor.
  // 四舍五入,保持编码器要求的分辨率对齐
  *cropped_width = roundUp(*cropped_width,
                           scale.denominator * resolution_alignment_, in_width);
  *cropped_height = roundUp(
      *cropped_height, scale.denominator * resolution_alignment_, in_height);
  RTC_DCHECK_EQ(0, *cropped_width % scale.denominator);
  RTC_DCHECK_EQ(0, *cropped_height % scale.denominator);
​
  // Calculate final output size.
  *out_width = *cropped_width / scale.denominator * scale.numerator;
  *out_height = *cropped_height / scale.denominator * scale.numerator;
  RTC_DCHECK_EQ(0, *out_width % resolution_alignment_);
  RTC_DCHECK_EQ(0, *out_height % resolution_alignment_);
​
  ++frames_out_;
  if (scale.numerator != scale.denominator)
    ++frames_scaled_;
​
  previous_width_ = *out_width;
  previous_height_ = *out_height;
​
  return true;
}
3.获取分辨率缩放因子

这里并不是返回的比target_pixels更小的总像素,而是根据min_pixel_diff来定的,也就是哪个分辨率的总像素更接近target_pixels,是可以比target_pixels更大的


Fraction FindScale(int input_width,
                   int input_height,
                   int target_pixels,
                   int max_pixels,
                   bool variable_start_scale_factor) {
​
  const int input_pixels = input_width * input_height;
​
  // Don't scale up original.
  // 如果最大的像素值大于输入的像素值,不进行缩放
  if (target_pixels >= input_pixels)
    return Fraction{1, 1};
​
  Fraction current_scale = Fraction{1, 1};
  Fraction best_scale = Fraction{1, 1};
​
  if (variable_start_scale_factor) {
    // Start scaling down by 2/3 depending on `input_width` and `input_height`.
    if (input_width % 3 == 0 && input_height % 3 == 0) {
      // 2/3 (then alternates 3/4, 2/3, 3/4,...).
      current_scale = Fraction{6, 6};
    }
    if (input_width % 9 == 0 && input_height % 9 == 0) {
      // 2/3, 2/3 (then alternates 3/4, 2/3, 3/4,...).
      current_scale = Fraction{36, 36};
    }
  }
​
  // The minimum (absolute) difference between the number of output pixels and
  // the target pixel count.
  int min_pixel_diff = std::numeric_limits<int>::max();
  if (input_pixels <= max_pixels) {
    // Start condition for 1/1 case, if it is less than max.
    min_pixel_diff = std::abs(input_pixels - target_pixels);
  }
​
  /**
   * 这里并不是返回的比target_pixels更小的总像素,而是根据min_pixel_diff来定的,也就是哪个分辨率的总像素更接近target_pixels
   * 
   */
  while (current_scale.scale_pixel_count(input_pixels) > target_pixels) {
    if (current_scale.numerator % 3 == 0 &&
        current_scale.denominator % 2 == 0) {
      // Multiply by 2/3.
      current_scale.numerator /= 3;
      current_scale.denominator /= 2;
    } else {
      // Multiply by 3/4.
      current_scale.numerator *= 3;
      current_scale.denominator *= 4;
    }
​
    int output_pixels = current_scale.scale_pixel_count(input_pixels);
    if (output_pixels <= max_pixels) {
      int diff = std::abs(target_pixels - output_pixels);
      // 如果有一个分辨率更接近,那么进行替换
      if (diff < min_pixel_diff) {
        min_pixel_diff = diff;
        best_scale = current_scale;
      }
    }
  }
  best_scale.DivideByGcd();
​
  return best_scale;
}
​

至此,我们获得了进行适配后可编码的帧数据。

总结一下整体的流程:

1.当ResourceAdaptationProcessor收到资源使用状况变化通知后,会根据是kOveruse还是kUnderuse进行调整资源使用策略。

2.如果是kOveruse,那么根据降级策略获取到资源降级后的总像素和分辨率。如果是维持帧率的话,总像素降低到之前的3/5。

3.通知帧适配器VideoAdapter,更新最大总像素值。

4.当android上层采样到一帧后,到VideoAdapter进行适配转换,根据输入的总像素与第3步中获取到的总像素获取到一个缩放比例,至此得到需要编码的帧数据的宽高。

以上是个人见解,如果错误或不同见解,欢迎指正和交流。