jetson_nv_encoder.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708
  1. #include <limits>
  2. #include <string>
  3. // WebRTC
  4. #include <common_video/libyuv/include/webrtc_libyuv.h>
  5. #include <modules/video_coding/codecs/h264/include/h264.h>
  6. #include <modules/video_coding/include/video_codec_interface.h>
  7. #include <modules/video_coding/include/video_error_codes.h>
  8. // #include <modules/video_coding/svc/create_scalability_structure.h>
  9. #include <rtc_base/checks.h>
  10. #include <rtc_base/logging.h>
  11. #include <rtc_base/time_utils.h>
  12. #include <system_wrappers/include/metrics.h>
  13. // libyuv
  14. #include <libyuv/convert.h>
  15. #include <libyuv/convert_from.h>
  16. #include <libyuv/video_common.h>
  17. // L4T Multimedia API
  18. #include "NvBufSurface.h"
  19. #include "NvVideoEncoder.h"
  20. #include "nvbufsurface.h"
  21. #include "nvbufsurftransform.h"
  22. #include "jetson_nv_encoder.h"
  23. #define H264HWENC_HEADER_DEBUG 0
  24. #define INIT_ERROR(cond, desc) \
  25. if (cond) { \
  26. RTC_LOG(LS_ERROR) << __FUNCTION__ << desc; \
  27. Release(); \
  28. return WEBRTC_VIDEO_CODEC_ERROR; \
  29. }
  30. namespace webrtc {
  31. JetsonVideoEncoder::JetsonVideoEncoder(const cricket::VideoCodec& codec)
  32. : callback_(nullptr),
  33. encoder_(nullptr),
  34. configured_framerate_(30),
  35. use_native_(true),
  36. use_dmabuff_(true) {}
  37. JetsonVideoEncoder::~JetsonVideoEncoder() {
  38. Release();
  39. }
  40. // bool JetsonVideoEncoder::IsSupported(webrtc::VideoCodecType codec) {
  41. // //SuppressErrors sup;
  42. // printf("----------------------------------------------------------------------------------issupported\n");
  43. // auto encoder = NvVideoEncoder::createVideoEncoder("enc0");
  44. // // auto ret = encoder->setCapturePlaneFormat(VideoCodecToV4L2Format(codec), 1024,
  45. // // 768, 2 * 1024 * 1024);
  46. // auto ret = encoder->setCapturePlaneFormat(V4L2_PIX_FMT_H264, 1280,
  47. // 720, 2 * 1024 * 1024);
  48. // delete encoder;
  49. // return ret >= 0;
  50. // }
  51. int32_t JetsonVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
  52. int32_t number_of_cores,
  53. size_t max_payload_size) {
  54. RTC_DCHECK(codec_settings);
  55. int32_t release_ret = Release();
  56. if (release_ret != WEBRTC_VIDEO_CODEC_OK) {
  57. return release_ret;
  58. }
  59. if (&codec_ != codec_settings) {
  60. codec_ = *codec_settings;
  61. }
  62. width_ = codec_settings->width;
  63. height_ = codec_settings->height;
  64. target_bitrate_bps_ = codec_settings->startBitrate * 1000;
  65. // std::cout << "g=heig " << width_ << height_ << target_bitrate_bps_ << std::endl;
  66. key_frame_interval_ = codec_settings->H264().keyFrameInterval;
  67. framerate_ = codec_settings->maxFramerate;
  68. // std::cout << "---------------------------------------------framerate_" << framerate_ << std::endl;
  69. RTC_LOG(LS_INFO) << "InitEncode " << framerate_ << "fps "
  70. << target_bitrate_bps_ << "bit/sec "
  71. << codec_settings->maxBitrate << "kbit/sec ";
  72. // Initialize encoded image.
  73. encoded_image_.timing_.flags =
  74. webrtc::VideoSendTiming::TimingFrameFlags::kInvalid;
  75. encoded_image_.content_type_ =
  76. (codec_settings->mode == webrtc::VideoCodecMode::kScreensharing)
  77. ? webrtc::VideoContentType::SCREENSHARE
  78. : webrtc::VideoContentType::UNSPECIFIED;
  79. gof_idx_ = 0;
  80. RTC_LOG(LS_INFO) << __FUNCTION__ << " End";
  81. return WEBRTC_VIDEO_CODEC_OK;
  82. }
  83. int32_t JetsonVideoEncoder::Release() {
  84. JetsonRelease();
  85. return WEBRTC_VIDEO_CODEC_OK;
  86. }
  87. int32_t JetsonVideoEncoder::JetsonConfigure() {
  88. int ret = 0;
  89. bool use_converter =
  90. use_native_ && (width_ != raw_width_ || height_ != raw_height_ ||
  91. decode_pixfmt_ != V4L2_PIX_FMT_YUV420M);
  92. encoder_ = NvVideoEncoder::createVideoEncoder("enc0");
  93. INIT_ERROR(!encoder_, "Failed to createVideoEncoder");
  94. ret =encoder_->setCapturePlaneFormat(V4L2_PIX_FMT_H264,width_, height_, 2 * 1024 * 1024);
  95. printf("width_;%d, height_:%d\n",width_,height_);
  96. INIT_ERROR(ret < 0, "Failed to encoder setCapturePlaneFormat");
  97. ret = encoder_->setOutputPlaneFormat(V4L2_PIX_FMT_YUV420M, width_, height_);
  98. INIT_ERROR(ret < 0, "Failed to encoder setOutputPlaneFormat");
  99. // ret = encoder_->setProfile(V4L2_MPEG_VIDEO_H264_PROFILE_HIGH);
  100. ret = encoder_->setProfile(V4L2_MPEG_VIDEO_H264_PROFILE_MAIN);
  101. // ret = encoder_->setProfile(V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10);
  102. // ret = encoder_->setProfile(V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE);
  103. INIT_ERROR(ret < 0, "Failed to setProfile");
  104. // ret = encoder_->setLevel(V4L2_MPEG_VIDEO_H264_LEVEL_5_1);
  105. ret = encoder_->setLevel(V4L2_MPEG_VIDEO_H264_LEVEL_2_1);
  106. // INIT_ERROR(ret < 0, "Failed to setLevel");
  107. ret = encoder_->setNumBFrames(0);
  108. // ret = encoder_->setNumBFrames(1);
  109. INIT_ERROR(ret < 0, "Failed to setNumBFrames");
  110. ret = encoder_->setInsertSpsPpsAtIdrEnabled(true);
  111. // ret = encoder_->setInsertSpsPpsAtIdrEnabled(false);
  112. INIT_ERROR(ret < 0, "Failed to setInsertSpsPpsAtIdrEnabled");
  113. ret = encoder_->setInsertVuiEnabled(true);
  114. // ret = encoder_->setInsertVuiEnabled(false);
  115. INIT_ERROR(ret < 0, "Failed to setInsertSpsPpsAtIdrEnabled");
  116. // ret = encoder_->setHWPresetType(V4L2_ENC_HW_PRESET_FAST);
  117. ret = encoder_->setHWPresetType(V4L2_ENC_HW_PRESET_ULTRAFAST);
  118. // ret = encoder_->setHWPresetType(V4L2_ENC_HW_PRESET_SLOW);
  119. INIT_ERROR(ret < 0, "Failed to setHWPresetType");
  120. bitrate_adjuster_.reset(new webrtc::BitrateAdjuster(.5, .95));
  121. bitrate_adjuster_->SetTargetBitrateBps(target_bitrate_bps_);
  122. SetBitrateBps(target_bitrate_bps_);
  123. ret = encoder_->setIDRInterval(key_frame_interval_);
  124. INIT_ERROR(ret < 0, "Failed to setIDRInterval");
  125. // ret = encoder_->setIFrameInterval(key_frame_interval_);
  126. ret = encoder_->setIFrameInterval(0);
  127. INIT_ERROR(ret < 0, "Failed to setIFrameInterval");
  128. //
  129. // ret = encoder_->setFrameRate(framerate_, 1);
  130. ret = encoder_->setFrameRate(30, 1);
  131. INIT_ERROR(ret < 0, "Failed to setFrameRate");
  132. if (use_native_) {
  133. std::cout << "use native -------------------------------------------\n" << std::endl;
  134. if (use_dmabuff_ || use_converter) {
  135. std::cout << "use use_converter -------------------------------------------\n" << std::endl;
  136. ret = encoder_->output_plane.reqbufs(V4L2_MEMORY_DMABUF, 10);
  137. INIT_ERROR(ret < 0, "Failed to reqbufs at encoder output_plane");
  138. int fd;
  139. NvBufSurf::NvCommonAllocateParams cParams;
  140. cParams.width = width_;
  141. cParams.height = height_;
  142. cParams.layout = NVBUF_LAYOUT_PITCH;
  143. cParams.colorFormat = NVBUF_COLOR_FORMAT_YUV420;
  144. cParams.memtag = NvBufSurfaceTag_VIDEO_ENC;
  145. cParams.memType = NVBUF_MEM_SURFACE_ARRAY;
  146. for (uint32_t i = 0; i < encoder_->output_plane.getNumBuffers(); i++) {
  147. ret = NvBufSurf::NvAllocate(&cParams, 1, &fd);
  148. INIT_ERROR(ret, "Failed to create NvBuffer");
  149. RTC_LOG(LS_ERROR) << "NvBufferCreateEx i:" << i << " fd:" << fd;
  150. output_plane_fd_[i] = fd;
  151. }
  152. }
  153. else {
  154. std::cout << "V4L2_MEMORY_USERPTR-------------------\n" << std::endl;
  155. ret = encoder_->output_plane.setupPlane(V4L2_MEMORY_USERPTR, 1, false,
  156. false);
  157. INIT_ERROR(ret < 0, "Failed to setupPlane at encoder output_plane");
  158. }
  159. } else {
  160. std::cout << "V4L2_MEMORY_MMAP-------------------\n" << std::endl;
  161. ret = encoder_->output_plane.setupPlane(V4L2_MEMORY_MMAP, 1, true, false);
  162. INIT_ERROR(ret < 0, "Failed to setupPlane at encoder output_plane");
  163. }
  164. ret = encoder_->capture_plane.setupPlane(V4L2_MEMORY_MMAP, 1, true, false);
  165. INIT_ERROR(ret < 0, "Failed to setupPlane at capture_plane");
  166. ret = encoder_->subscribeEvent(V4L2_EVENT_EOS, 0, 0);
  167. INIT_ERROR(ret < 0, "Failed to subscribeEvent V4L2_EVENT_EOS");
  168. ret = encoder_->output_plane.setStreamStatus(true);
  169. INIT_ERROR(ret < 0, "Failed to setStreamStatus at encoder output_plane");
  170. ret = encoder_->capture_plane.setStreamStatus(true);
  171. INIT_ERROR(ret < 0, "Failed to setStreamStatus at encoder capture_plane");
  172. encoder_->capture_plane.setDQThreadCallback(EncodeFinishedCallbackFunction);
  173. encoder_->capture_plane.startDQThread(this);
  174. for (uint32_t i = 0; i < encoder_->capture_plane.getNumBuffers(); i++) {
  175. struct v4l2_buffer v4l2_buf;
  176. struct v4l2_plane planes[MAX_PLANES];
  177. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  178. memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
  179. v4l2_buf.index = i;
  180. v4l2_buf.m.planes = planes;
  181. ret = encoder_->capture_plane.qBuffer(v4l2_buf, NULL);
  182. INIT_ERROR(ret < 0, "Failed to qBuffer at encoder capture_plane");
  183. }
  184. configured_framerate_ = framerate_;
  185. return WEBRTC_VIDEO_CODEC_OK;
  186. }
  187. void JetsonVideoEncoder::JetsonRelease() {
  188. if (!encoder_)
  189. return;
  190. SendEOS();
  191. encoder_->capture_plane.waitForDQThread(2000);
  192. encoder_->capture_plane.deinitPlane();
  193. if (use_dmabuff_) {
  194. for (uint32_t i = 0; i < encoder_->output_plane.getNumBuffers(); i++) {
  195. if (encoder_->output_plane.unmapOutputBuffers(i, output_plane_fd_[i]) <
  196. 0) {
  197. RTC_LOG(LS_ERROR)
  198. << "Failed to unmapOutputBuffers at encoder output_plane";
  199. }
  200. if (NvBufSurf::NvDestroy(output_plane_fd_[i]) < 0) {
  201. RTC_LOG(LS_ERROR)
  202. << "Failed to NvBufferDestroy at encoder output_plane";
  203. }
  204. }
  205. } else {
  206. encoder_->output_plane.deinitPlane();
  207. }
  208. delete encoder_;
  209. encoder_ = nullptr;
  210. }
  211. void JetsonVideoEncoder::SendEOS() {
  212. if (encoder_->output_plane.getStreamStatus()) {
  213. struct v4l2_buffer v4l2_buf;
  214. struct v4l2_plane planes[MAX_PLANES];
  215. NvBuffer* buffer;
  216. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  217. memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
  218. v4l2_buf.m.planes = planes;
  219. if (encoder_->output_plane.getNumQueuedBuffers() ==
  220. encoder_->output_plane.getNumBuffers()) {
  221. if (encoder_->output_plane.dqBuffer(v4l2_buf, &buffer, NULL, 10) < 0) {
  222. RTC_LOG(LS_ERROR) << "Failed to dqBuffer at encoder output_plane";
  223. }
  224. }
  225. planes[0].bytesused = 0;
  226. for (int i = 0; i < buffer->n_planes; i++) {
  227. buffer->planes[i].bytesused = 0;
  228. }
  229. if (encoder_->output_plane.qBuffer(v4l2_buf, NULL) < 0) {
  230. RTC_LOG(LS_ERROR) << "Failed to qBuffer at encoder output_plane";
  231. }
  232. }
  233. }
  234. bool JetsonVideoEncoder::EncodeFinishedCallbackFunction(
  235. struct v4l2_buffer* v4l2_buf,
  236. NvBuffer* buffer,
  237. NvBuffer* shared_buffer,
  238. void* data) {
  239. return ((JetsonVideoEncoder*)data)
  240. ->EncodeFinishedCallback(v4l2_buf, buffer, shared_buffer);
  241. }
  242. bool JetsonVideoEncoder::EncodeFinishedCallback(struct v4l2_buffer* v4l2_buf,
  243. NvBuffer* buffer,
  244. NvBuffer* shared_buffer) {
  245. if (!v4l2_buf) {
  246. RTC_LOG(LS_INFO) << __FUNCTION__ << " v4l2_buf is null";
  247. return false;
  248. }
  249. if (buffer->planes[0].bytesused == 0) {
  250. RTC_LOG(LS_INFO) << __FUNCTION__ << " buffer size is zero";
  251. return false;
  252. }
  253. uint64_t timestamp = v4l2_buf->timestamp.tv_sec * rtc::kNumMicrosecsPerSec +
  254. v4l2_buf->timestamp.tv_usec;
  255. std::unique_ptr<FrameParams> params;
  256. {
  257. webrtc::MutexLock lock(&frame_params_lock_);
  258. do {
  259. if (frame_params_.empty()) {
  260. RTC_LOG(LS_WARNING)
  261. << __FUNCTION__
  262. << "Frame parameter is not found. SkipFrame timestamp:"
  263. << timestamp;
  264. return true;
  265. }
  266. params = std::move(frame_params_.front());
  267. frame_params_.pop();
  268. } while (params->timestamp_us < timestamp);
  269. if (params->timestamp_us != timestamp) {
  270. RTC_LOG(LS_WARNING)
  271. << __FUNCTION__
  272. << "Frame parameter is not found. SkipFrame timestamp:" << timestamp;
  273. return true;
  274. }
  275. }
  276. v4l2_ctrl_videoenc_outputbuf_metadata enc_metadata;
  277. if (encoder_->getMetadata(v4l2_buf->index, enc_metadata) != 0) {
  278. RTC_LOG(LS_WARNING) << __FUNCTION__
  279. << "getMetadata failed. SkipFrame timestamp:"
  280. << timestamp;
  281. return true;
  282. }
  283. SendFrame(buffer->planes[0].data, buffer->planes[0].bytesused,
  284. std::move(params), &enc_metadata);
  285. if (encoder_->capture_plane.qBuffer(*v4l2_buf, NULL) < 0) {
  286. RTC_LOG(LS_ERROR) << __FUNCTION__ << "Failed to qBuffer at capture_plane";
  287. return false;
  288. }
  289. return true;
  290. }
  291. int32_t JetsonVideoEncoder::RegisterEncodeCompleteCallback(
  292. webrtc::EncodedImageCallback* callback) {
  293. callback_ = callback;
  294. return WEBRTC_VIDEO_CODEC_OK;
  295. }
  296. void JetsonVideoEncoder::SetRates(const RateControlParameters& parameters) {
  297. // printf("SetRatesSetRatesSetRatesSetRatesSetRatesSetRatesSetRates\n");
  298. if (encoder_ == nullptr)
  299. return;
  300. if (parameters.bitrate.get_sum_bps() <= 0 || parameters.framerate_fps <= 0)
  301. return;
  302. RTC_LOG(LS_INFO) << __FUNCTION__ << " framerate:" << parameters.framerate_fps
  303. << " bitrate:" << parameters.bitrate.ToString();
  304. // if (svc_controller_) {
  305. // svc_controller_->OnRatesUpdated(parameters.bitrate);
  306. // }
  307. framerate_ = parameters.framerate_fps;
  308. target_bitrate_bps_ = parameters.bitrate.get_sum_bps();
  309. bitrate_adjuster_->SetTargetBitrateBps(target_bitrate_bps_);
  310. return;
  311. }
  312. void JetsonVideoEncoder::SetFramerate(uint32_t framerate) {
  313. if (configured_framerate_ == framerate) {
  314. return;
  315. }
  316. RTC_LOG(LS_INFO) << __FUNCTION__ << " " << framerate << "fps";
  317. if (encoder_->setFrameRate(framerate, 1) < 0) {
  318. RTC_LOG(LS_ERROR) << "Failed to set bitrate";
  319. return;
  320. }
  321. configured_framerate_ = framerate;
  322. }
  323. void JetsonVideoEncoder::SetBitrateBps(uint32_t bitrate_bps) {
  324. if (bitrate_bps < 300000 || (configured_bitrate_bps_ == bitrate_bps &&
  325. configured_framerate_ == framerate_)) {
  326. return;
  327. }
  328. configured_bitrate_bps_ = bitrate_bps;
  329. if (encoder_->setBitrate(bitrate_bps) < 0) {
  330. RTC_LOG(LS_ERROR) << "Failed to setBitrate";
  331. return;
  332. }
  333. }
  334. webrtc::VideoEncoder::EncoderInfo JetsonVideoEncoder::GetEncoderInfo() const {
  335. EncoderInfo info;
  336. info.supports_native_handle = true;
  337. info.implementation_name = "Jetson Video Encoder";
  338. static const int kLowH264QpThreshold = 24; //34
  339. static const int kHighH264QpThreshold = 37; //40
  340. info.scaling_settings = VideoEncoder::ScalingSettings(kLowH264QpThreshold,
  341. kHighH264QpThreshold);
  342. return info;
  343. }
  344. int32_t JetsonVideoEncoder::Encode(
  345. const webrtc::VideoFrame& input_frame,
  346. const std::vector<webrtc::VideoFrameType>* frame_types) {
  347. // printf("encode encode \n");
  348. if (!callback_) {
  349. RTC_LOG(LS_WARNING)
  350. << "InitEncode() has been called, but a callback function "
  351. << "has not been set with RegisterEncodeCompleteCallback()";
  352. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  353. }
  354. int fd = 0;
  355. webrtc::VideoType video_type;
  356. uint8_t* native_data;
  357. rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer =
  358. input_frame.video_frame_buffer();
  359. // std::shared_ptr<JetsonJpegDecoder> decoder;
  360. if (frame_buffer->type() == webrtc::VideoFrameBuffer::Type::kNative) {
  361. use_native_ = true;
  362. } else {
  363. use_native_ = false;
  364. }
  365. if (encoder_ == nullptr) {
  366. if (JetsonConfigure() != WEBRTC_VIDEO_CODEC_OK) {
  367. RTC_LOG(LS_ERROR) << "Failed to JetsonConfigure";
  368. return WEBRTC_VIDEO_CODEC_ERROR;
  369. }
  370. }
  371. bool force_key_frame = false;
  372. if (frame_types != nullptr) {
  373. RTC_DCHECK_EQ(frame_types->size(), static_cast<size_t>(1));
  374. if ((*frame_types)[0] == webrtc::VideoFrameType::kEmptyFrame) {
  375. return WEBRTC_VIDEO_CODEC_OK;
  376. }
  377. if ((*frame_types)[0] == webrtc::VideoFrameType::kVideoFrameKey) {
  378. if (encoder_->forceIDR() < 0) {
  379. RTC_LOG(LS_ERROR) << "Failed to forceIDR";
  380. }
  381. }
  382. }
  383. SetFramerate(framerate_);
  384. SetBitrateBps(bitrate_adjuster_->GetAdjustedBitrateBps());
  385. {
  386. webrtc::MutexLock lock(&frame_params_lock_);
  387. frame_params_.push(absl::make_unique<FrameParams>(
  388. frame_buffer->width(), frame_buffer->height(),
  389. input_frame.render_time_ms(), input_frame.ntp_time_ms(),
  390. input_frame.timestamp_us(), input_frame.timestamp(),
  391. // input_frame.rotation(), input_frame.color_space(), decoder));
  392. input_frame.rotation(), input_frame.color_space()));
  393. }
  394. struct v4l2_buffer v4l2_buf;
  395. struct v4l2_plane planes[MAX_PLANES];
  396. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  397. memset(planes, 0, sizeof(planes));
  398. v4l2_buf.m.planes = planes;
  399. if (use_native_) {
  400. NvBuffer* buffer;
  401. if (encoder_->output_plane.getNumQueuedBuffers() ==
  402. encoder_->output_plane.getNumBuffers()) {
  403. if (encoder_->output_plane.dqBuffer(v4l2_buf, &buffer, NULL, 10) < 0) {
  404. RTC_LOG(LS_ERROR) << "Failed to dqBuffer at encoder output_plane";
  405. return WEBRTC_VIDEO_CODEC_ERROR;
  406. }
  407. } else {
  408. buffer = encoder_->output_plane.getNthBuffer(
  409. encoder_->output_plane.getNumQueuedBuffers());
  410. v4l2_buf.index = encoder_->output_plane.getNumQueuedBuffers();
  411. }
  412. int src_dma_fd = -1;
  413. if (use_dmabuff_) {
  414. src_dma_fd = fd;
  415. } else if (video_type == webrtc::VideoType::kYUY2 ||
  416. video_type == webrtc::VideoType::kUYVY) {
  417. buffer->planes[0].bytesused = buffer->planes[0].fmt.width *
  418. buffer->planes[0].fmt.bytesperpixel *
  419. buffer->planes[0].fmt.height;
  420. buffer->planes[0].data = native_data;
  421. } else if (video_type == webrtc::VideoType::kI420) {
  422. size_t offset = 0;
  423. for (int i = 0; i < buffer->n_planes; i++) {
  424. buffer->planes[i].bytesused = buffer->planes[i].fmt.width *
  425. buffer->planes[i].fmt.bytesperpixel *
  426. buffer->planes[i].fmt.height;
  427. buffer->planes[i].data = native_data + offset;
  428. offset += buffer->planes[i].bytesused;
  429. }
  430. } else if (video_type == webrtc::VideoType::kYV12) {
  431. size_t offset = 0;
  432. buffer->planes[0].bytesused = buffer->planes[0].fmt.width *
  433. buffer->planes[0].fmt.bytesperpixel *
  434. buffer->planes[0].fmt.height;
  435. buffer->planes[0].data = native_data;
  436. offset += buffer->planes[0].bytesused;
  437. buffer->planes[2].bytesused = buffer->planes[1].fmt.width *
  438. buffer->planes[1].fmt.bytesperpixel *
  439. buffer->planes[1].fmt.height;
  440. buffer->planes[2].data = native_data + offset;
  441. offset += buffer->planes[2].bytesused;
  442. buffer->planes[1].bytesused = buffer->planes[2].fmt.width *
  443. buffer->planes[2].fmt.bytesperpixel *
  444. buffer->planes[2].fmt.height;
  445. buffer->planes[1].data = native_data + offset;
  446. } else {
  447. RTC_LOG(LS_ERROR) << "Unsupported webrtc::VideoType";
  448. return WEBRTC_VIDEO_CODEC_ERROR;
  449. }
  450. NvBufSurf::NvCommonTransformParams transform_params;
  451. /* Indicates which of the transform parameters are valid */
  452. memset(&transform_params, 0, sizeof(transform_params));
  453. transform_params.src_top = 0;
  454. transform_params.src_left = 0;
  455. transform_params.src_width = raw_width_;
  456. transform_params.src_height = raw_height_;
  457. transform_params.dst_top = 0;
  458. transform_params.dst_left = 0;
  459. transform_params.dst_width = width_;
  460. transform_params.dst_height = height_;
  461. transform_params.flag =
  462. (NvBufSurfTransform_Transform_Flag)(NVBUFSURF_TRANSFORM_FILTER |
  463. NVBUFSURF_TRANSFORM_CROP_SRC);
  464. transform_params.flip = NvBufSurfTransform_None;
  465. transform_params.filter = NvBufSurfTransformInter_Bilinear;
  466. if (NvBufSurf::NvTransform(&transform_params, src_dma_fd,
  467. output_plane_fd_[v4l2_buf.index])) {
  468. RTC_LOG(LS_ERROR) << "Failed to NvBufferTransform";
  469. return WEBRTC_VIDEO_CODEC_ERROR;
  470. }
  471. planes[0].m.fd = output_plane_fd_[v4l2_buf.index];
  472. planes[0].bytesused = 1234;
  473. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
  474. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  475. v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY;
  476. v4l2_buf.timestamp.tv_sec =
  477. input_frame.timestamp_us() / rtc::kNumMicrosecsPerSec;
  478. v4l2_buf.timestamp.tv_usec =
  479. input_frame.timestamp_us() % rtc::kNumMicrosecsPerSec;
  480. if (encoder_->output_plane.qBuffer(v4l2_buf, nullptr) < 0) {
  481. RTC_LOG(LS_ERROR) << "Failed to qBuffer at converter output_plane";
  482. return WEBRTC_VIDEO_CODEC_ERROR;
  483. }
  484. } else {
  485. NvBuffer* buffer;
  486. RTC_LOG(LS_VERBOSE) << __FUNCTION__ << " output_plane.getNumBuffers: "
  487. << encoder_->output_plane.getNumBuffers()
  488. << " output_plane.getNumQueuedBuffers: "
  489. << encoder_->output_plane.getNumQueuedBuffers();
  490. if (encoder_->output_plane.getNumQueuedBuffers() ==
  491. encoder_->output_plane.getNumBuffers()) {
  492. if (encoder_->output_plane.dqBuffer(v4l2_buf, &buffer, NULL, 10) < 0) {
  493. RTC_LOG(LS_ERROR) << "Failed to dqBuffer at encoder output_plane";
  494. return WEBRTC_VIDEO_CODEC_ERROR;
  495. }
  496. } else {
  497. buffer = encoder_->output_plane.getNthBuffer(
  498. encoder_->output_plane.getNumQueuedBuffers());
  499. v4l2_buf.index = encoder_->output_plane.getNumQueuedBuffers();
  500. }
  501. rtc::scoped_refptr<const webrtc::I420BufferInterface> i420_buffer =
  502. frame_buffer->ToI420();
  503. for (uint32_t i = 0; i < buffer->n_planes; i++) {
  504. const uint8_t* source_data;
  505. int source_stride;
  506. if (i == 0) {
  507. source_data = i420_buffer->DataY();
  508. source_stride = i420_buffer->StrideY();
  509. } else if (i == 1) {
  510. source_data = i420_buffer->DataU();
  511. source_stride = i420_buffer->StrideU();
  512. } else if (i == 2) {
  513. source_data = i420_buffer->DataV();
  514. source_stride = i420_buffer->StrideV();
  515. } else {
  516. break;
  517. }
  518. NvBuffer::NvBufferPlane& plane = buffer->planes[i];
  519. std::streamsize bytes_to_read = plane.fmt.bytesperpixel * plane.fmt.width;
  520. uint8_t* input_data = plane.data;
  521. plane.bytesused = 0;
  522. for (uint32_t j = 0; j < plane.fmt.height; j++) {
  523. memcpy(input_data, source_data + (source_stride * j), bytes_to_read);
  524. input_data += plane.fmt.stride;
  525. }
  526. plane.bytesused = plane.fmt.stride * plane.fmt.height;
  527. }
  528. v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY;
  529. v4l2_buf.timestamp.tv_sec =
  530. input_frame.timestamp_us() / rtc::kNumMicrosecsPerSec;
  531. v4l2_buf.timestamp.tv_usec =
  532. input_frame.timestamp_us() % rtc::kNumMicrosecsPerSec;
  533. for (int i = 0; i < MAX_PLANES; i++) {
  534. NvBufSurface* surf = 0;
  535. if (NvBufSurfaceFromFd(buffer->planes[i].fd, (void**)(&surf)) == -1) {
  536. RTC_LOG(LS_ERROR) << __FUNCTION__ << "Failed to NvBufSurfaceFromFd";
  537. return WEBRTC_VIDEO_CODEC_ERROR;
  538. }
  539. if (NvBufSurfaceSyncForDevice(surf, 0, i) == -1) {
  540. RTC_LOG(LS_ERROR) << "Failed to NvBufSurfaceSyncForDevice";
  541. return WEBRTC_VIDEO_CODEC_ERROR;
  542. }
  543. }
  544. if (encoder_->output_plane.qBuffer(v4l2_buf, nullptr) < 0) {
  545. RTC_LOG(LS_ERROR) << "Failed to qBuffer at encoder output_plane";
  546. return WEBRTC_VIDEO_CODEC_ERROR;
  547. }
  548. }
  549. return WEBRTC_VIDEO_CODEC_OK;
  550. }
  551. int32_t JetsonVideoEncoder::SendFrame(
  552. unsigned char* buffer,
  553. size_t size,
  554. std::unique_ptr<FrameParams> params,
  555. v4l2_ctrl_videoenc_outputbuf_metadata* enc_metadata) {
  556. if (!callback_) {
  557. RTC_LOG(LS_WARNING)
  558. << "InitEncode() has been called, but a callback function "
  559. << "has not been set with RegisterEncodeCompleteCallback()";
  560. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  561. }
  562. // encoded_image_.SetRtpTimestamp(params->timestamp_rtp);
  563. encoded_image_.SetTimestamp(params->timestamp_rtp);
  564. encoded_image_.SetColorSpace(params->color_space);
  565. encoded_image_._encodedWidth = params->width;
  566. encoded_image_._encodedHeight = params->height;
  567. encoded_image_.capture_time_ms_ = params->render_time_ms;
  568. encoded_image_.ntp_time_ms_ = params->ntp_time_ms;
  569. encoded_image_.rotation_ = params->rotation;
  570. encoded_image_.qp_ = enc_metadata->AvgQP;
  571. // if (enc_metadata->KeyFrame) {
  572. // encoded_image_.SetFrameType(webrtc::VideoFrameType::kVideoFrameKey);
  573. // } else {
  574. // encoded_image_.SetFrameType(webrtc::VideoFrameType::kVideoFrameDelta);
  575. // }
  576. if (enc_metadata->KeyFrame) {
  577. encoded_image_._frameType= webrtc::VideoFrameType::kVideoFrameKey;
  578. } else {
  579. encoded_image_._frameType= webrtc::VideoFrameType::kVideoFrameDelta;
  580. }
  581. webrtc::CodecSpecificInfo codec_specific;
  582. codec_specific.codecType = codec_.codecType;
  583. auto encoded_image_buffer =
  584. webrtc::EncodedImageBuffer::Create(buffer, size);
  585. encoded_image_.SetEncodedData(encoded_image_buffer);
  586. codec_specific.codecSpecific.H264.packetization_mode =
  587. webrtc::H264PacketizationMode::NonInterleaved;
  588. // webrtc::H264PacketizationMode::SingleNalUnit;
  589. RTC_LOG(LS_VERBOSE) << "key_frame=" << enc_metadata->KeyFrame
  590. << " size=" << size << " qp=" << encoded_image_.qp_;
  591. webrtc::EncodedImageCallback::Result result =
  592. callback_->OnEncodedImage(encoded_image_, &codec_specific);
  593. if (result.error != webrtc::EncodedImageCallback::Result::OK) {
  594. RTC_LOG(LS_ERROR) << __FUNCTION__
  595. << " OnEncodedImage failed error:" << result.error;
  596. return WEBRTC_VIDEO_CODEC_ERROR;
  597. }
  598. bitrate_adjuster_->Update(size);
  599. return WEBRTC_VIDEO_CODEC_OK;
  600. }
  601. } // namespace webrtc