| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129 | // #include "jetson_nv_encoder.h"// #include <limits>// #include <string>// #include "third_party/openh264/src/codec/api/svc/codec_api.h"// #include "third_party/openh264/src/codec/api/svc/codec_app_def.h"// #include "third_party/openh264/src/codec/api/svc/codec_def.h"// #include "third_party/openh264/src/codec/api/svc/codec_ver.h"// #include "absl/strings/match.h"// #include "common_video/h264/h264_common.h"// #include "common_video/libyuv/include/webrtc_libyuv.h"// #include "modules/video_coding/utility/simulcast_rate_allocator.h"// #include "modules/video_coding/utility/simulcast_utility.h"// #include "rtc_base/checks.h"// #include "rtc_base/logging.h"// #include "rtc_base/time_utils.h"// #include "system_wrappers/include/metrics.h"// #include "third_party/libyuv/include/libyuv/convert.h"// #include "third_party/libyuv/include/libyuv/scale.h"// namespace webrtc {// namespace {// const bool kOpenH264EncoderDetailedLogging = false;// // QP scaling thresholds.// static const int kLowH264QpThreshold = 24;// static const int kHighH264QpThreshold = 37;// // Used by histograms. Values of entries should not be changed.// enum NvVideoEncoderEvent// {// 	kH264EncoderEventInit = 0,// 	kH264EncoderEventError = 1,// 	kH264EncoderEventMax = 16,// };// int NumberOfThreads(int width, int height, int number_of_cores) // {// 	// TODO(hbos): In Chromium, multiple threads do not work with sandbox on Mac,// 	// see crbug.com/583348. Until further investigated, only use one thread.// 	//  if (width * height >= 1920 * 1080 && number_of_cores > 8) {// 	//    return 8;  // 8 threads for 1080p on high perf machines.// 	//  } else if (width * height > 1280 * 960 && number_of_cores >= 6) {// 	//    return 3;  // 3 threads for 1080p.// 	//  } else if (width * height > 640 * 480 && number_of_cores >= 3) {// 	//    return 2;  // 2 threads for qHD/HD.// 	//  } else {// 	//    return 1;  // 1 thread for VGA or less.// 	//  }// 	// TODO(sprang): Also check sSliceArgument.uiSliceNum om GetEncoderPrams(),// 	//               before enabling multithreading here.//   return 1;// }// VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) {// 	switch (type) {// 	case videoFrameTypeIDR:// 		return VideoFrameType::kVideoFrameKey;// 	case videoFrameTypeSkip:// 	case videoFrameTypeI:// 	case videoFrameTypeP:// 	case videoFrameTypeIPMixed:// 		return VideoFrameType::kVideoFrameDelta;// 	case videoFrameTypeInvalid:// 		break;// 	}// 	RTC_NOTREACHED() << "Unexpected/invalid frame type: " << type;// 	return VideoFrameType::kEmptyFrame;// }// }  // namespace// static void RtpFragmentize(EncodedImage* encoded_image,//                            const VideoFrameBuffer& frame_buffer,//                            std::vector<uint8_t>& frame_packet,//                            RTPFragmentationHeader* frag_header) // {// 	size_t required_capacity = 0;// 	encoded_image->set_size(0);// 	required_capacity = frame_packet.size();// 	encoded_image->SetEncodedData(EncodedImageBuffer::Create(required_capacity));// 	// TODO(nisse): Use a cache or buffer pool to avoid allocation?// 	encoded_image->SetEncodedData(EncodedImageBuffer::Create(required_capacity));// 	memcpy(encoded_image->data(), &frame_packet[0], frame_packet.size());// 	std::vector<webrtc::H264::NaluIndex> nalus = webrtc::H264::FindNaluIndices(// 		encoded_image->data(), encoded_image->size());// 	size_t fragments_count = nalus.size();// 	frag_header->VerifyAndAllocateFragmentationHeader(fragments_count);// 	for (size_t i = 0; i < nalus.size(); i++) {// 		frag_header->fragmentationOffset[i] = nalus[i].payload_start_offset;// 		frag_header->fragmentationLength[i] = nalus[i].payload_size;// 	}// }// NvEncoder::NvEncoder(const cricket::VideoCodec& codec)//     : packetization_mode_(H264PacketizationMode::SingleNalUnit),//       max_payload_size_(0),//       number_of_cores_(0),//       encoded_image_callback_(nullptr),//       has_reported_init_(false),//       has_reported_error_(false),//       num_temporal_layers_(1),//       tl0sync_limit_(0) // {// 	RTC_CHECK(absl::EqualsIgnoreCase(codec.name, cricket::kH264CodecName));// 	std::string packetization_mode_string;// 	if (codec.GetParam(cricket::kH264FmtpPacketizationMode, &packetization_mode_string) // 		&& packetization_mode_string == "1") {// 		packetization_mode_ = H264PacketizationMode::NonInterleaved;// 	}// 	encoded_images_.reserve(kMaxSimulcastStreams);// 	nv_encoders_.reserve(kMaxSimulcastStreams);// 	configurations_.reserve(kMaxSimulcastStreams);// 	image_buffer_ = nullptr;// }// NvEncoder::~NvEncoder() // {// 	Release();// }// int32_t NvEncoder::InitEncode(const VideoCodec* inst,//                                    int32_t number_of_cores,//                                    size_t max_payload_size) // {// 	ReportInit();// 	if (!inst || inst->codecType != kVideoCodecH264) {// 		ReportError();// 		return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;// 	}// 	if (inst->maxFramerate == 0) {// 		ReportError();// 		return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;// 	}// 	if (inst->width < 1 || inst->height < 1) {// 		ReportError();// 		return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;// 	}// 	int32_t release_ret = Release();// 	if (release_ret != WEBRTC_VIDEO_CODEC_OK) {// 		ReportError();// 		return release_ret;// 	}// 	int number_of_streams = SimulcastUtility::NumberOfSimulcastStreams(*inst);// 	bool doing_simulcast = (number_of_streams > 1);// 	if (doing_simulcast && !SimulcastUtility::ValidSimulcastParameters(*inst, number_of_streams)) {// 		return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED;// 	}// 	assert(number_of_streams == 1);// 	encoded_images_.resize(number_of_streams);// 	nv_encoders_.resize(number_of_streams);// 	configurations_.resize(number_of_streams);// 	number_of_cores_ = number_of_cores;// 	max_payload_size_ = max_payload_size;// 	codec_ = *inst;// 	// Code expects simulcastStream resolutions to be correct, make sure they are// 	// filled even when there are no simulcast layers.// 	if (codec_.numberOfSimulcastStreams == 0) {// 		codec_.simulcastStream[0].width = codec_.width;// 		codec_.simulcastStream[0].height = codec_.height;// 	}// 	num_temporal_layers_ = codec_.H264()->numberOfTemporalLayers;// 	for (int i = 0, idx = number_of_streams - 1; i < number_of_streams;  ++i, --idx) {// 		// Store nvidia encoder.// 		xop::NvidiaD3D11Encoder* nv_encoder = new xop::NvidiaD3D11Encoder();// 		nv_encoders_[i] = nv_encoder;// 		// Set internal settings from codec_settings// 		configurations_[i].simulcast_idx = idx;// 		configurations_[i].sending = false;// 		configurations_[i].width = codec_.simulcastStream[idx].width;// 		configurations_[i].height = codec_.simulcastStream[idx].height;// 		configurations_[i].max_frame_rate = static_cast<float>(codec_.maxFramerate);// 		configurations_[i].frame_dropping_on = codec_.H264()->frameDroppingOn;// 		configurations_[i].key_frame_interval = codec_.H264()->keyFrameInterval;// 		// Codec_settings uses kbits/second; encoder uses bits/second.// 		configurations_[i].max_bps = codec_.maxBitrate * 1000;// 		configurations_[i].target_bps = codec_.maxBitrate * 1000 / 2;	// 		nv_encoder->SetOption(xop::VE_OPT_WIDTH, configurations_[i].width);// 		nv_encoder->SetOption(xop::VE_OPT_HEIGHT, configurations_[i].height);// 		nv_encoder->SetOption(xop::VE_OPT_FRAME_RATE, static_cast<int>(configurations_[i].max_frame_rate));// 		nv_encoder->SetOption(xop::VE_OPT_GOP, configurations_[i].key_frame_interval);// 		nv_encoder->SetOption(xop::VE_OPT_CODEC, xop::VE_OPT_CODEC_H264);// 		nv_encoder->SetOption(xop::VE_OPT_BITRATE_KBPS, configurations_[i].target_bps / 1000);// 		nv_encoder->SetOption(xop::VE_OPT_TEXTURE_FORMAT, xop::VE_OPT_FORMAT_B8G8R8A8);// 		if (!nv_encoder->Init()) {// 			Release();// 			ReportError();// 			return WEBRTC_VIDEO_CODEC_ERROR;// 		}		// 		image_buffer_.reset(new uint8_t[configurations_[i].width * configurations_[i].height * 10]);// 		// TODO(pbos): Base init params on these values before submitting.// 		video_format_ = EVideoFormatType::videoFormatI420;// 		// Initialize encoded image. Default buffer size: size of unencoded data.// 		const size_t new_capacity = CalcBufferSize(VideoType::kI420, // 			codec_.simulcastStream[idx].width, codec_.simulcastStream[idx].height);// 		encoded_images_[i].SetEncodedData(EncodedImageBuffer::Create(new_capacity));// 		encoded_images_[i]._completeFrame = true;// 		encoded_images_[i]._encodedWidth = codec_.simulcastStream[idx].width;// 		encoded_images_[i]._encodedHeight = codec_.simulcastStream[idx].height;// 		encoded_images_[i].set_size(0);// 	}// 	SimulcastRateAllocator init_allocator(codec_);// 	VideoBitrateAllocation allocation = init_allocator.GetAllocation(// 		codec_.maxBitrate * 1000 / 2, codec_.maxFramerate);// 	SetRates(RateControlParameters(allocation, codec_.maxFramerate));// 	return WEBRTC_VIDEO_CODEC_OK;// }// int32_t NvEncoder::Release() // {// 	while (!nv_encoders_.empty()) {		// 		xop::NvidiaD3D11Encoder* nv_encoder = reinterpret_cast<xop::NvidiaD3D11Encoder*>(nv_encoders_.back());// 		if (nv_encoder) {// 			nv_encoder->Destroy();// 			delete nv_encoder;// 		}// 		nv_encoders_.pop_back();// 	}// 	configurations_.clear();// 	encoded_images_.clear();// 	return WEBRTC_VIDEO_CODEC_OK;// }// int32_t NvEncoder::RegisterEncodeCompleteCallback(EncodedImageCallback* callback) // {// 	encoded_image_callback_ = callback;// 	return WEBRTC_VIDEO_CODEC_OK;// }// void NvEncoder::SetRates(const RateControlParameters& parameters)// {// 	if (parameters.bitrate.get_sum_bps() == 0) {// 		// Encoder paused, turn off all encoding.// 		for (size_t i = 0; i < configurations_.size(); ++i)// 			configurations_[i].SetStreamState(false);// 		return;// 	}// 	// At this point, bitrate allocation should already match codec settings.// 	if (codec_.maxBitrate > 0)// 		RTC_DCHECK_LE(parameters.bitrate.get_sum_kbps(), codec_.maxBitrate);// 	RTC_DCHECK_GE(parameters.bitrate.get_sum_kbps(), codec_.minBitrate);// 	if (codec_.numberOfSimulcastStreams > 0)// 		RTC_DCHECK_GE(parameters.bitrate.get_sum_kbps(), codec_.simulcastStream[0].minBitrate);// 	codec_.maxFramerate = static_cast<uint32_t>(parameters.framerate_fps);// 	size_t stream_idx = nv_encoders_.size() - 1;// 	for (size_t i = 0; i < nv_encoders_.size(); ++i, --stream_idx) {// 		configurations_[i].target_bps = parameters.bitrate.GetSpatialLayerSum(stream_idx);// 		configurations_[i].max_frame_rate = static_cast<float>(parameters.framerate_fps);// 		if (configurations_[i].target_bps) {// 			configurations_[i].SetStreamState(true);// 			if (nv_encoders_[i]) {// 				xop::NvidiaD3D11Encoder* nv_encoder = reinterpret_cast<xop::NvidiaD3D11Encoder*>(nv_encoders_[i]);// 				nv_encoder->SetEvent(xop::VE_EVENT_RESET_BITRATE_KBPS, configurations_[i].target_bps/1000);// 				nv_encoder->SetEvent(xop::VE_EVENT_RESET_FRAME_RATE, static_cast<int>(configurations_[i].max_frame_rate));// 			}// 			else {// 				configurations_[i].SetStreamState(false);// 			}// 		} // 	}// }// int32_t NvEncoder::Encode(const VideoFrame& input_frame,// 						  const std::vector<VideoFrameType>* frame_types)// {// 	if (nv_encoders_.empty()) {// 		ReportError();// 		return WEBRTC_VIDEO_CODEC_UNINITIALIZED;// 	}// 	if (!encoded_image_callback_) {// 		RTC_LOG(LS_WARNING)// 			<< "InitEncode() has been called, but a callback function "// 			<< "has not been set with RegisterEncodeCompleteCallback()";// 		ReportError();// 		return WEBRTC_VIDEO_CODEC_UNINITIALIZED;// 	}// 	rtc::scoped_refptr<const I420BufferInterface> frame_buffer = input_frame.video_frame_buffer()->ToI420();// 	bool send_key_frame = false;// 	for (size_t i = 0; i < configurations_.size(); ++i) {// 		if (configurations_[i].key_frame_request && configurations_[i].sending) {// 			send_key_frame = true;// 			break;// 		}// 	}// 	if (!send_key_frame && frame_types) {// 		for (size_t i = 0; i < frame_types->size() && i < configurations_.size(); ++i) {// 			if ((*frame_types)[i] == VideoFrameType::kVideoFrameKey && configurations_[i].sending) {// 				send_key_frame = true;// 				break;// 			}// 		}// 	}	// 	RTC_DCHECK_EQ(configurations_[0].width, frame_buffer->width());// 	RTC_DCHECK_EQ(configurations_[0].height, frame_buffer->height());// 	// Encode image for each layer.// 	for (size_t i = 0; i < nv_encoders_.size(); ++i) {// 		if (!configurations_[i].sending) {// 			continue;// 		}// 		if (frame_types != nullptr) {// 			// Skip frame?// 			if ((*frame_types)[i] == VideoFrameType::kEmptyFrame) {// 				continue;// 			}// 		}// 		if (send_key_frame) {			// 			if (!nv_encoders_.empty() && nv_encoders_[i]) {// 				xop::NvidiaD3D11Encoder* nv_encoder = reinterpret_cast<xop::NvidiaD3D11Encoder*>(nv_encoders_[i]);// 				nv_encoder->SetEvent(xop::VE_EVENT_FORCE_IDR, 1);// 			}// 			configurations_[i].key_frame_request = false;// 		}// 		// EncodeFrame output.// 		SFrameBSInfo info;// 		memset(&info, 0, sizeof(SFrameBSInfo));// 		std::vector<uint8_t> frame_packet;// 		EncodeFrame((int)i, input_frame, frame_packet);// 		if (frame_packet.size() == 0) {// 			return WEBRTC_VIDEO_CODEC_OK;// 		}// 		else {// 			if ((frame_packet[4] & 0x1f) == 0x07) {// 				info.eFrameType = videoFrameTypeIDR; // 			}// 			else if ((frame_packet[4] & 0x1f) == 0x01) {// 				info.eFrameType = videoFrameTypeP;// 			}// 			else {// 				return WEBRTC_VIDEO_CODEC_OK;// 			}// 		}// 		encoded_images_[i]._encodedWidth = configurations_[i].width;// 		encoded_images_[i]._encodedHeight = configurations_[i].height;// 		encoded_images_[i].SetTimestamp(input_frame.timestamp());// 		encoded_images_[i].ntp_time_ms_ = input_frame.ntp_time_ms();// 		encoded_images_[i].capture_time_ms_ = input_frame.render_time_ms();// 		encoded_images_[i].rotation_ = input_frame.rotation();// 		encoded_images_[i].SetColorSpace(input_frame.color_space());// 		encoded_images_[i].content_type_ = (codec_.mode == VideoCodecMode::kScreensharing)// 											? VideoContentType::SCREENSHARE// 											: VideoContentType::UNSPECIFIED;// 		encoded_images_[i].timing_.flags = VideoSendTiming::kInvalid;// 		encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType);// 		encoded_images_[i].SetSpatialIndex(configurations_[i].simulcast_idx);// 		// Split encoded image up into fragments. This also updates// 		// |encoded_image_|.// 		RTPFragmentationHeader frag_header;// 		RtpFragmentize(&encoded_images_[i], *frame_buffer, frame_packet, &frag_header);// 		// Encoder can skip frames to save bandwidth in which case// 		// |encoded_images_[i]._length| == 0.// 		if (encoded_images_[i].size() > 0) {// 			// Parse QP.// 			h264_bitstream_parser_.ParseBitstream(encoded_images_[i].data(),// 			                                      encoded_images_[i].size());// 			h264_bitstream_parser_.GetLastSliceQp(&encoded_images_[i].qp_);// 			// Deliver encoded image.// 			CodecSpecificInfo codec_specific;// 			codec_specific.codecType = kVideoCodecH264;// 			codec_specific.codecSpecific.H264.packetization_mode = packetization_mode_;// 			codec_specific.codecSpecific.H264.temporal_idx = kNoTemporalIdx;// 			codec_specific.codecSpecific.H264.idr_frame = (info.eFrameType == videoFrameTypeIDR);// 			codec_specific.codecSpecific.H264.base_layer_sync = false;// 			encoded_image_callback_->OnEncodedImage(encoded_images_[i], &codec_specific, &frag_header);// 		}// 	}// 	return WEBRTC_VIDEO_CODEC_OK;// }// void NvEncoder::ReportInit() // {// 	if (has_reported_init_)// 		return;// 	RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.NvEncoder.Event",// 							kH264EncoderEventInit, kH264EncoderEventMax);// 	has_reported_init_ = true;// }// void NvEncoder::ReportError() // {// 	if (has_reported_error_)// 		return;// 	RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.NvEncoder.Event",// 							kH264EncoderEventError, kH264EncoderEventMax);// 	has_reported_error_ = true;// }// VideoEncoder::EncoderInfo NvEncoder::GetEncoderInfo() const // {// 	EncoderInfo info;// 	info.supports_native_handle = false;// 	info.implementation_name = "NvEncoder";// 	info.scaling_settings = VideoEncoder::ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);// 	info.is_hardware_accelerated = true;// 	info.has_internal_source = false;// 	return info;// }// void NvEncoder::LayerConfig::SetStreamState(bool send_stream) // {// 	if (send_stream && !sending) {// 		// Need a key frame if we have not sent this stream before.// 		key_frame_request = true;// 	}// 	sending = send_stream;// }// bool NvEncoder::EncodeFrame(int index, const VideoFrame& input_frame,// 							std::vector<uint8_t>& frame_packet) // {// 	frame_packet.clear();// 	if (nv_encoders_.empty() || !nv_encoders_[index]) {// 		return false;// 	}// 	if (video_format_ == EVideoFormatType::videoFormatI420) {// 		if (image_buffer_ != nullptr) {// 			if (webrtc::ConvertFromI420(input_frame, webrtc::VideoType::kARGB, 0,// 										image_buffer_.get()) < 0) {// 				return false;// 			}// 		} // 		else {// 			return false;// 		}// 	}// 	int width = input_frame.width();// 	int height = input_frame.height();// 	int image_size = width * height * 4; // argb// 	xop::NvidiaD3D11Encoder* nv_encoder = reinterpret_cast<xop::NvidiaD3D11Encoder*>(nv_encoders_[index]);// 	if (nv_encoder) {// 		int frame_size = nv_encoder->Encode(std::vector<uint8_t>(image_buffer_.get(), image_buffer_.get() + image_size) ,frame_packet);// 		if (frame_size < 0) {// 			return false;// 		}// 	}	// 	return true;// }// }  // namespace webrtc/* *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. * *  Use of this source code is governed by a BSD-style license *  that can be found in the LICENSE file in the root of the source *  tree. An additional intellectual property rights grant can be found *  in the file PATENTS.  All contributing project authors may *  be found in the AUTHORS file in the root of the source tree. * */// Everything declared/defined in this header is only required when WebRTC is// build with H264 support, please do not move anything out of the// #ifdef unless needed and tested.// #ifdef WEBRTC_USE_H264#include "modules/video_coding/codecs/h264/h264_encoder_impl.h"#include <limits>#include <string>#include "absl/strings/match.h"#include "common_video/libyuv/include/webrtc_libyuv.h"#include "modules/video_coding/utility/simulcast_rate_allocator.h"#include "modules/video_coding/utility/simulcast_utility.h"#include "rtc_base/checks.h"#include "rtc_base/logging.h"#include "rtc_base/time_utils.h"#include "system_wrappers/include/metrics.h"#include "third_party/libyuv/include/libyuv/convert.h"#include "third_party/libyuv/include/libyuv/scale.h"#include "third_party/openh264/src/codec/api/svc/codec_api.h"#include "third_party/openh264/src/codec/api/svc/codec_app_def.h"#include "third_party/openh264/src/codec/api/svc/codec_def.h"#include "third_party/openh264/src/codec/api/svc/codec_ver.h"#include "jetson_nv_encoder.h"namespace webrtc {namespace {const bool kOpenH264EncoderDetailedLogging = false;// QP scaling thresholds.static const int kLowH264QpThreshold = 24;static const int kHighH264QpThreshold = 37;// Used by histograms. Values of entries should not be changed.enum H264EncoderImplEvent {  kH264EncoderEventInit = 0,  kH264EncoderEventError = 1,  kH264EncoderEventMax = 16,};int NumberOfThreads(int width, int height, int number_of_cores) {  // TODO(hbos): In Chromium, multiple threads do not work with sandbox on Mac,  // see crbug.com/583348. Until further investigated, only use one thread.  //  if (width * height >= 1920 * 1080 && number_of_cores > 8) {  //    return 8;  // 8 threads for 1080p on high perf machines.  //  } else if (width * height > 1280 * 960 && number_of_cores >= 6) {  //    return 3;  // 3 threads for 1080p.  //  } else if (width * height > 640 * 480 && number_of_cores >= 3) {  //    return 2;  // 2 threads for qHD/HD.  //  } else {  //    return 1;  // 1 thread for VGA or less.  //  }  // TODO(sprang): Also check sSliceArgument.uiSliceNum om GetEncoderPrams(),  //               before enabling multithreading here.  return 1;}VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) {  switch (type) {    case videoFrameTypeIDR:      return VideoFrameType::kVideoFrameKey;    case videoFrameTypeSkip:    case videoFrameTypeI:    case videoFrameTypeP:    case videoFrameTypeIPMixed:      return VideoFrameType::kVideoFrameDelta;    case videoFrameTypeInvalid:      break;  }  RTC_NOTREACHED() << "Unexpected/invalid frame type: " << type;  return VideoFrameType::kEmptyFrame;}}  // namespace// Helper method used by H264EncoderImpl::Encode.// Copies the encoded bytes from |info| to |encoded_image|. The// |encoded_image->_buffer| may be deleted and reallocated if a bigger buffer is// required.//// After OpenH264 encoding, the encoded bytes are stored in |info| spread out// over a number of layers and "NAL units". Each NAL unit is a fragment starting// with the four-byte start code {0,0,0,1}. All of this data (including the// start codes) is copied to the |encoded_image->_buffer|.static void RtpFragmentize(EncodedImage* encoded_image, SFrameBSInfo* info) {  // Calculate minimum buffer size required to hold encoded data.  size_t required_capacity = 0;  size_t fragments_count = 0;  for (int layer = 0; layer < info->iLayerNum; ++layer) {    const SLayerBSInfo& layerInfo = info->sLayerInfo[layer];    for (int nal = 0; nal < layerInfo.iNalCount; ++nal, ++fragments_count) {      RTC_CHECK_GE(layerInfo.pNalLengthInByte[nal], 0);      // Ensure |required_capacity| will not overflow.      RTC_CHECK_LE(layerInfo.pNalLengthInByte[nal],                   std::numeric_limits<size_t>::max() - required_capacity);      required_capacity += layerInfo.pNalLengthInByte[nal];    }  }  // TODO(nisse): Use a cache or buffer pool to avoid allocation?  auto buffer = EncodedImageBuffer::Create(required_capacity);  encoded_image->SetEncodedData(buffer);  // Iterate layers and NAL units, note each NAL unit as a fragment and copy  // the data to |encoded_image->_buffer|.  const uint8_t start_code[4] = {0, 0, 0, 1};  size_t frag = 0;  encoded_image->set_size(0);  for (int layer = 0; layer < info->iLayerNum; ++layer) {    const SLayerBSInfo& layerInfo = info->sLayerInfo[layer];    // Iterate NAL units making up this layer, noting fragments.    size_t layer_len = 0;    for (int nal = 0; nal < layerInfo.iNalCount; ++nal, ++frag) {      // Because the sum of all layer lengths, |required_capacity|, fits in a      // |size_t|, we know that any indices in-between will not overflow.      RTC_DCHECK_GE(layerInfo.pNalLengthInByte[nal], 4);      RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 0], start_code[0]);      RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 1], start_code[1]);      RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 2], start_code[2]);      RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 3], start_code[3]);      layer_len += layerInfo.pNalLengthInByte[nal];    }    // Copy the entire layer's data (including start codes).    memcpy(buffer->data() + encoded_image->size(), layerInfo.pBsBuf, layer_len);    encoded_image->set_size(encoded_image->size() + layer_len);  }}H264EncoderImpl_::H264EncoderImpl_(const cricket::VideoCodec& codec)    : packetization_mode_(H264PacketizationMode::SingleNalUnit),      max_payload_size_(0),      number_of_cores_(0),      encoded_image_callback_(nullptr),      has_reported_init_(false),      has_reported_error_(false) {  RTC_CHECK(absl::EqualsIgnoreCase(codec.name, cricket::kH264CodecName));  std::string packetization_mode_string;  if (codec.GetParam(cricket::kH264FmtpPacketizationMode,                     &packetization_mode_string) &&      packetization_mode_string == "1") {    packetization_mode_ = H264PacketizationMode::NonInterleaved;  }  downscaled_buffers_.reserve(kMaxSimulcastStreams - 1);  encoded_images_.reserve(kMaxSimulcastStreams);  encoders_.reserve(kMaxSimulcastStreams);  configurations_.reserve(kMaxSimulcastStreams);  tl0sync_limit_.reserve(kMaxSimulcastStreams);}H264EncoderImpl_::~H264EncoderImpl_() {  Release();}int32_t H264EncoderImpl_::InitEncode(const VideoCodec* inst,                                    const VideoEncoder::Settings& settings) {  ReportInit();  if (!inst || inst->codecType != kVideoCodecH264) {    ReportError();    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;  }  if (inst->maxFramerate == 0) {    ReportError();    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;  }  if (inst->width < 1 || inst->height < 1) {    ReportError();    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;  }  int32_t release_ret = Release();  if (release_ret != WEBRTC_VIDEO_CODEC_OK) {    ReportError();    return release_ret;  }  int number_of_streams = SimulcastUtility::NumberOfSimulcastStreams(*inst);  bool doing_simulcast = (number_of_streams > 1);  if (doing_simulcast &&      !SimulcastUtility::ValidSimulcastParameters(*inst, number_of_streams)) {    return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED;  }  downscaled_buffers_.resize(number_of_streams - 1);  encoded_images_.resize(number_of_streams);  encoders_.resize(number_of_streams);  pictures_.resize(number_of_streams);  configurations_.resize(number_of_streams);  tl0sync_limit_.resize(number_of_streams);  number_of_cores_ = settings.number_of_cores;  max_payload_size_ = settings.max_payload_size;  codec_ = *inst;  // Code expects simulcastStream resolutions to be correct, make sure they are  // filled even when there are no simulcast layers.  if (codec_.numberOfSimulcastStreams == 0) {    codec_.simulcastStream[0].width = codec_.width;    codec_.simulcastStream[0].height = codec_.height;  }  for (int i = 0, idx = number_of_streams - 1; i < number_of_streams;       ++i, --idx) {    ISVCEncoder* openh264_encoder;    // Create encoder.    if (WelsCreateSVCEncoder(&openh264_encoder) != 0) {      // Failed to create encoder.      RTC_LOG(LS_ERROR) << "Failed to create OpenH264 encoder";      RTC_DCHECK(!openh264_encoder);      Release();      ReportError();      return WEBRTC_VIDEO_CODEC_ERROR;    }    RTC_DCHECK(openh264_encoder);    if (kOpenH264EncoderDetailedLogging) {      int trace_level = WELS_LOG_DETAIL;      openh264_encoder->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level);    }    // else WELS_LOG_DEFAULT is used by default.    // Store h264 encoder.    encoders_[i] = openh264_encoder;    // Set internal settings from codec_settings    configurations_[i].simulcast_idx = idx;    configurations_[i].sending = false;    configurations_[i].width = codec_.simulcastStream[idx].width;    configurations_[i].height = codec_.simulcastStream[idx].height;    configurations_[i].max_frame_rate = static_cast<float>(codec_.maxFramerate);    configurations_[i].frame_dropping_on = codec_.H264()->frameDroppingOn;    configurations_[i].key_frame_interval = codec_.H264()->keyFrameInterval;    configurations_[i].num_temporal_layers =        codec_.simulcastStream[idx].numberOfTemporalLayers;    printf("4567890-=09876567890-98765467890-986754567890978654567890-985645678908654567890654567890\n");    // Create downscaled image buffers.    if (i > 0) {      downscaled_buffers_[i - 1] = I420Buffer::Create(          configurations_[i].width, configurations_[i].height,          configurations_[i].width, configurations_[i].width / 2,          configurations_[i].width / 2);    }    // Codec_settings uses kbits/second; encoder uses bits/second.    configurations_[i].max_bps = codec_.maxBitrate * 1000;    configurations_[i].target_bps = codec_.startBitrate * 1000;    // Create encoder parameters based on the layer configuration.    SEncParamExt encoder_params = CreateEncoderParams(i);    // Initialize.    if (openh264_encoder->InitializeExt(&encoder_params) != 0) {      RTC_LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder";      Release();      ReportError();      return WEBRTC_VIDEO_CODEC_ERROR;    }    // TODO(pbos): Base init params on these values before submitting.    int video_format = EVideoFormatType::videoFormatI420;    openh264_encoder->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format);    // Initialize encoded image. Default buffer size: size of unencoded data.    const size_t new_capacity =        CalcBufferSize(VideoType::kI420, codec_.simulcastStream[idx].width,                       codec_.simulcastStream[idx].height);    encoded_images_[i].SetEncodedData(EncodedImageBuffer::Create(new_capacity));    encoded_images_[i]._completeFrame = true;    encoded_images_[i]._encodedWidth = codec_.simulcastStream[idx].width;    encoded_images_[i]._encodedHeight = codec_.simulcastStream[idx].height;    encoded_images_[i].set_size(0);    tl0sync_limit_[i] = configurations_[i].num_temporal_layers;  }  SimulcastRateAllocator init_allocator(codec_);  VideoBitrateAllocation allocation =      init_allocator.Allocate(VideoBitrateAllocationParameters(          DataRate::KilobitsPerSec(codec_.startBitrate), codec_.maxFramerate));  SetRates(RateControlParameters(allocation, codec_.maxFramerate));  return WEBRTC_VIDEO_CODEC_OK;}int32_t H264EncoderImpl_::Release() {  while (!encoders_.empty()) {    ISVCEncoder* openh264_encoder = encoders_.back();    if (openh264_encoder) {      RTC_CHECK_EQ(0, openh264_encoder->Uninitialize());      WelsDestroySVCEncoder(openh264_encoder);    }    encoders_.pop_back();  }  downscaled_buffers_.clear();  configurations_.clear();  encoded_images_.clear();  pictures_.clear();  tl0sync_limit_.clear();  return WEBRTC_VIDEO_CODEC_OK;}int32_t H264EncoderImpl_::RegisterEncodeCompleteCallback(    EncodedImageCallback* callback) {  encoded_image_callback_ = callback;  return WEBRTC_VIDEO_CODEC_OK;}void H264EncoderImpl_::SetRates(const RateControlParameters& parameters) {  if (encoders_.empty()) {    RTC_LOG(LS_WARNING) << "SetRates() while uninitialized.";    return;  }  if (parameters.framerate_fps < 1.0) {    RTC_LOG(LS_WARNING) << "Invalid frame rate: " << parameters.framerate_fps;    return;  }  if (parameters.bitrate.get_sum_bps() == 0) {    // Encoder paused, turn off all encoding.    for (size_t i = 0; i < configurations_.size(); ++i) {      configurations_[i].SetStreamState(false);    }    return;  }  codec_.maxFramerate = static_cast<uint32_t>(parameters.framerate_fps);  size_t stream_idx = encoders_.size() - 1;  for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) {    // Update layer config.    configurations_[i].target_bps =        parameters.bitrate.GetSpatialLayerSum(stream_idx);    configurations_[i].max_frame_rate = parameters.framerate_fps;    if (configurations_[i].target_bps) {      configurations_[i].SetStreamState(true);      // Update h264 encoder.      SBitrateInfo target_bitrate;      memset(&target_bitrate, 0, sizeof(SBitrateInfo));      target_bitrate.iLayer = SPATIAL_LAYER_ALL,      target_bitrate.iBitrate = configurations_[i].target_bps;      encoders_[i]->SetOption(ENCODER_OPTION_BITRATE, &target_bitrate);      encoders_[i]->SetOption(ENCODER_OPTION_FRAME_RATE,                              &configurations_[i].max_frame_rate);    } else {      configurations_[i].SetStreamState(false);    }  }}int32_t H264EncoderImpl_::Encode(    const VideoFrame& input_frame,    const std::vector<VideoFrameType>* frame_types) {  if (encoders_.empty()) {    ReportError();    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;  }  if (!encoded_image_callback_) {    RTC_LOG(LS_WARNING)        << "InitEncode() has been called, but a callback function "           "has not been set with RegisterEncodeCompleteCallback()";    ReportError();    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;  }  rtc::scoped_refptr<const I420BufferInterface> frame_buffer =      input_frame.video_frame_buffer()->ToI420();  bool send_key_frame = false;  for (size_t i = 0; i < configurations_.size(); ++i) {    if (configurations_[i].key_frame_request && configurations_[i].sending) {      send_key_frame = true;      break;    }  }  if (!send_key_frame && frame_types) {    for (size_t i = 0; i < configurations_.size(); ++i) {      const size_t simulcast_idx =          static_cast<size_t>(configurations_[i].simulcast_idx);      if (configurations_[i].sending && simulcast_idx < frame_types->size() &&          (*frame_types)[simulcast_idx] == VideoFrameType::kVideoFrameKey) {        send_key_frame = true;        break;      }    }  }  RTC_DCHECK_EQ(configurations_[0].width, frame_buffer->width());  RTC_DCHECK_EQ(configurations_[0].height, frame_buffer->height());  // Encode image for each layer.  for (size_t i = 0; i < encoders_.size(); ++i) {    // EncodeFrame input.    pictures_[i] = {0};    pictures_[i].iPicWidth = configurations_[i].width;    pictures_[i].iPicHeight = configurations_[i].height;    pictures_[i].iColorFormat = EVideoFormatType::videoFormatI420;    pictures_[i].uiTimeStamp = input_frame.ntp_time_ms();    // Downscale images on second and ongoing layers.    if (i == 0) {      pictures_[i].iStride[0] = frame_buffer->StrideY();      pictures_[i].iStride[1] = frame_buffer->StrideU();      pictures_[i].iStride[2] = frame_buffer->StrideV();      pictures_[i].pData[0] = const_cast<uint8_t*>(frame_buffer->DataY());      pictures_[i].pData[1] = const_cast<uint8_t*>(frame_buffer->DataU());      pictures_[i].pData[2] = const_cast<uint8_t*>(frame_buffer->DataV());    } else {      pictures_[i].iStride[0] = downscaled_buffers_[i - 1]->StrideY();      pictures_[i].iStride[1] = downscaled_buffers_[i - 1]->StrideU();      pictures_[i].iStride[2] = downscaled_buffers_[i - 1]->StrideV();      pictures_[i].pData[0] =          const_cast<uint8_t*>(downscaled_buffers_[i - 1]->DataY());      pictures_[i].pData[1] =          const_cast<uint8_t*>(downscaled_buffers_[i - 1]->DataU());      pictures_[i].pData[2] =          const_cast<uint8_t*>(downscaled_buffers_[i - 1]->DataV());      // Scale the image down a number of times by downsampling factor.      libyuv::I420Scale(pictures_[i - 1].pData[0], pictures_[i - 1].iStride[0],                        pictures_[i - 1].pData[1], pictures_[i - 1].iStride[1],                        pictures_[i - 1].pData[2], pictures_[i - 1].iStride[2],                        configurations_[i - 1].width,                        configurations_[i - 1].height, pictures_[i].pData[0],                        pictures_[i].iStride[0], pictures_[i].pData[1],                        pictures_[i].iStride[1], pictures_[i].pData[2],                        pictures_[i].iStride[2], configurations_[i].width,                        configurations_[i].height, libyuv::kFilterBilinear);    }    if (!configurations_[i].sending) {      continue;    }    if (frame_types != nullptr) {      // Skip frame?      if ((*frame_types)[i] == VideoFrameType::kEmptyFrame) {        continue;      }    }    if (send_key_frame) {      // API doc says ForceIntraFrame(false) does nothing, but calling this      // function forces a key frame regardless of the |bIDR| argument's value.      // (If every frame is a key frame we get lag/delays.)      encoders_[i]->ForceIntraFrame(true);      configurations_[i].key_frame_request = false;    }    // EncodeFrame output.    SFrameBSInfo info;    memset(&info, 0, sizeof(SFrameBSInfo));    // Encode!    int enc_ret = encoders_[i]->EncodeFrame(&pictures_[i], &info);    if (enc_ret != 0) {      RTC_LOG(LS_ERROR)          << "OpenH264 frame encoding failed, EncodeFrame returned " << enc_ret          << ".";      ReportError();      return WEBRTC_VIDEO_CODEC_ERROR;    }    encoded_images_[i]._encodedWidth = configurations_[i].width;    encoded_images_[i]._encodedHeight = configurations_[i].height;    encoded_images_[i].SetTimestamp(input_frame.timestamp());    encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType);    encoded_images_[i].SetSpatialIndex(configurations_[i].simulcast_idx);    // Split encoded image up into fragments. This also updates    // |encoded_image_|.    RtpFragmentize(&encoded_images_[i], &info);    // Encoder can skip frames to save bandwidth in which case    // |encoded_images_[i]._length| == 0.    if (encoded_images_[i].size() > 0) {      // Parse QP.      h264_bitstream_parser_.ParseBitstream(encoded_images_[i].data(),                                            encoded_images_[i].size());      h264_bitstream_parser_.GetLastSliceQp(&encoded_images_[i].qp_);      // Deliver encoded image.      CodecSpecificInfo codec_specific;      codec_specific.codecType = kVideoCodecH264;      codec_specific.codecSpecific.H264.packetization_mode =          packetization_mode_;      codec_specific.codecSpecific.H264.temporal_idx = kNoTemporalIdx;      codec_specific.codecSpecific.H264.idr_frame =          info.eFrameType == videoFrameTypeIDR;      codec_specific.codecSpecific.H264.base_layer_sync = false;      if (configurations_[i].num_temporal_layers > 1) {        const uint8_t tid = info.sLayerInfo[0].uiTemporalId;        codec_specific.codecSpecific.H264.temporal_idx = tid;        codec_specific.codecSpecific.H264.base_layer_sync =            tid > 0 && tid < tl0sync_limit_[i];        if (codec_specific.codecSpecific.H264.base_layer_sync) {          tl0sync_limit_[i] = tid;        }        if (tid == 0) {          tl0sync_limit_[i] = configurations_[i].num_temporal_layers;        }      }      encoded_image_callback_->OnEncodedImage(encoded_images_[i],                                              &codec_specific);    }  }  return WEBRTC_VIDEO_CODEC_OK;}// Initialization parameters.// There are two ways to initialize. There is SEncParamBase (cleared with// memset(&p, 0, sizeof(SEncParamBase)) used in Initialize, and SEncParamExt// which is a superset of SEncParamBase (cleared with GetDefaultParams) used// in InitializeExt.SEncParamExt H264EncoderImpl_::CreateEncoderParams(size_t i) const {  SEncParamExt encoder_params;  encoders_[i]->GetDefaultParams(&encoder_params);  if (codec_.mode == VideoCodecMode::kRealtimeVideo) {    encoder_params.iUsageType = CAMERA_VIDEO_REAL_TIME;  } else if (codec_.mode == VideoCodecMode::kScreensharing) {    encoder_params.iUsageType = SCREEN_CONTENT_REAL_TIME;  } else {    RTC_NOTREACHED();  }  encoder_params.iPicWidth = configurations_[i].width;  encoder_params.iPicHeight = configurations_[i].height;  encoder_params.iTargetBitrate = configurations_[i].target_bps;  // Keep unspecified. WebRTC's max codec bitrate is not the same setting  // as OpenH264's iMaxBitrate. More details in https://crbug.com/webrtc/11543  encoder_params.iMaxBitrate = UNSPECIFIED_BIT_RATE;  // Rate Control mode  encoder_params.iRCMode = RC_BITRATE_MODE;  encoder_params.fMaxFrameRate = configurations_[i].max_frame_rate;  // The following parameters are extension parameters (they're in SEncParamExt,  // not in SEncParamBase).  encoder_params.bEnableFrameSkip = configurations_[i].frame_dropping_on;  // |uiIntraPeriod|    - multiple of GOP size  // |keyFrameInterval| - number of frames  encoder_params.uiIntraPeriod = configurations_[i].key_frame_interval;  encoder_params.uiMaxNalSize = 0;  // Threading model: use auto.  //  0: auto (dynamic imp. internal encoder)  //  1: single thread (default value)  // >1: number of threads  encoder_params.iMultipleThreadIdc = NumberOfThreads(      encoder_params.iPicWidth, encoder_params.iPicHeight, number_of_cores_);  // The base spatial layer 0 is the only one we use.  encoder_params.sSpatialLayers[0].iVideoWidth = encoder_params.iPicWidth;  encoder_params.sSpatialLayers[0].iVideoHeight = encoder_params.iPicHeight;  encoder_params.sSpatialLayers[0].fFrameRate = encoder_params.fMaxFrameRate;  encoder_params.sSpatialLayers[0].iSpatialBitrate =      encoder_params.iTargetBitrate;  encoder_params.sSpatialLayers[0].iMaxSpatialBitrate =      encoder_params.iMaxBitrate;  encoder_params.iTemporalLayerNum = configurations_[i].num_temporal_layers;  if (encoder_params.iTemporalLayerNum > 1) {    encoder_params.iNumRefFrame = 1;  }  RTC_LOG(INFO) << "OpenH264 version is " << OPENH264_MAJOR << "."                << OPENH264_MINOR;  switch (packetization_mode_) {    case H264PacketizationMode::SingleNalUnit:      // Limit the size of the packets produced.      encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1;      encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode =          SM_SIZELIMITED_SLICE;      encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint =          static_cast<unsigned int>(max_payload_size_);      RTC_LOG(INFO) << "Encoder is configured with NALU constraint: "                    << max_payload_size_ << " bytes";      break;    case H264PacketizationMode::NonInterleaved:      // When uiSliceMode = SM_FIXEDSLCNUM_SLICE, uiSliceNum = 0 means auto      // design it with cpu core number.      // TODO(sprang): Set to 0 when we understand why the rate controller borks      //               when uiSliceNum > 1.      encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1;      encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode =          SM_FIXEDSLCNUM_SLICE;      break;  }  return encoder_params;}void H264EncoderImpl_::ReportInit() {  if (has_reported_init_)    return;  RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event",                            kH264EncoderEventInit, kH264EncoderEventMax);  has_reported_init_ = true;}void H264EncoderImpl_::ReportError() {  if (has_reported_error_)    return;  RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event",                            kH264EncoderEventError, kH264EncoderEventMax);  has_reported_error_ = true;}VideoEncoder::EncoderInfo H264EncoderImpl_::GetEncoderInfo() const {  EncoderInfo info;  info.supports_native_handle = false;  info.implementation_name = "OpenH264";  info.scaling_settings =      VideoEncoder::ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);  info.is_hardware_accelerated = false;  info.has_internal_source = false;  info.supports_simulcast = true;  return info;}void H264EncoderImpl_::LayerConfig::SetStreamState(bool send_stream) {  if (send_stream && !sending) {    // Need a key frame if we have not sent this stream before.    key_frame_request = true;  }  sending = send_stream;}}  // namespace webrtc// #endif  // WEBRTC_USE_H264
 |