jetson_nv_encoder.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707
  1. #include <limits>
  2. #include <string>
  3. // WebRTC
  4. #include <common_video/libyuv/include/webrtc_libyuv.h>
  5. #include <modules/video_coding/codecs/h264/include/h264.h>
  6. #include <modules/video_coding/include/video_codec_interface.h>
  7. #include <modules/video_coding/include/video_error_codes.h>
  8. // #include <modules/video_coding/svc/create_scalability_structure.h>
  9. #include <rtc_base/checks.h>
  10. #include <rtc_base/logging.h>
  11. #include <rtc_base/time_utils.h>
  12. #include <system_wrappers/include/metrics.h>
  13. // libyuv
  14. #include <libyuv/convert.h>
  15. #include <libyuv/convert_from.h>
  16. #include <libyuv/video_common.h>
  17. // L4T Multimedia API
  18. #include "NvBufSurface.h"
  19. #include "NvVideoEncoder.h"
  20. #include "nvbufsurface.h"
  21. #include "nvbufsurftransform.h"
  22. #include "jetson_nv_encoder.h"
  23. #define H264HWENC_HEADER_DEBUG 0
  24. #define INIT_ERROR(cond, desc) \
  25. if (cond) { \
  26. RTC_LOG(LS_ERROR) << __FUNCTION__ << desc; \
  27. Release(); \
  28. return WEBRTC_VIDEO_CODEC_ERROR; \
  29. }
  30. namespace webrtc {
  31. JetsonVideoEncoder::JetsonVideoEncoder(const cricket::VideoCodec& codec)
  32. : callback_(nullptr),
  33. encoder_(nullptr),
  34. configured_framerate_(30),
  35. use_native_(false),
  36. use_dmabuff_(false) {}
  37. JetsonVideoEncoder::~JetsonVideoEncoder() {
  38. Release();
  39. }
  40. // bool JetsonVideoEncoder::IsSupported(webrtc::VideoCodecType codec) {
  41. // //SuppressErrors sup;
  42. // printf("----------------------------------------------------------------------------------issupported\n");
  43. // auto encoder = NvVideoEncoder::createVideoEncoder("enc0");
  44. // // auto ret = encoder->setCapturePlaneFormat(VideoCodecToV4L2Format(codec), 1024,
  45. // // 768, 2 * 1024 * 1024);
  46. // auto ret = encoder->setCapturePlaneFormat(V4L2_PIX_FMT_H264, 1280,
  47. // 720, 2 * 1024 * 1024);
  48. // delete encoder;
  49. // return ret >= 0;
  50. // }
  51. int32_t JetsonVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
  52. int32_t number_of_cores,
  53. size_t max_payload_size) {
  54. RTC_DCHECK(codec_settings);
  55. int32_t release_ret = Release();
  56. if (release_ret != WEBRTC_VIDEO_CODEC_OK) {
  57. return release_ret;
  58. }
  59. if (&codec_ != codec_settings) {
  60. codec_ = *codec_settings;
  61. }
  62. width_ = codec_settings->width;
  63. height_ = codec_settings->height;
  64. target_bitrate_bps_ = codec_settings->startBitrate * 1000;
  65. // std::cout << "g=heig " << width_ << height_ << target_bitrate_bps_ << std::endl;
  66. key_frame_interval_ = codec_settings->H264().keyFrameInterval;
  67. framerate_ = codec_settings->maxFramerate;
  68. RTC_LOG(LS_INFO) << "InitEncode " << framerate_ << "fps "
  69. << target_bitrate_bps_ << "bit/sec "
  70. << codec_settings->maxBitrate << "kbit/sec ";
  71. // Initialize encoded image.
  72. encoded_image_.timing_.flags =
  73. webrtc::VideoSendTiming::TimingFrameFlags::kInvalid;
  74. encoded_image_.content_type_ =
  75. (codec_settings->mode == webrtc::VideoCodecMode::kScreensharing)
  76. ? webrtc::VideoContentType::SCREENSHARE
  77. : webrtc::VideoContentType::UNSPECIFIED;
  78. gof_idx_ = 0;
  79. RTC_LOG(LS_INFO) << __FUNCTION__ << " End";
  80. return WEBRTC_VIDEO_CODEC_OK;
  81. }
  82. int32_t JetsonVideoEncoder::Release() {
  83. JetsonRelease();
  84. return WEBRTC_VIDEO_CODEC_OK;
  85. }
  86. int32_t JetsonVideoEncoder::JetsonConfigure() {
  87. printf("int32_t JetsonVideoEncoder::JetsonConfigure()\n");
  88. int ret = 0;
  89. bool use_converter =
  90. use_native_ && (width_ != raw_width_ || height_ != raw_height_ ||
  91. decode_pixfmt_ != V4L2_PIX_FMT_YUV420M);
  92. std::cout << "use native" << " "<< use_converter << std::endl;
  93. encoder_ = NvVideoEncoder::createVideoEncoder("enc0");
  94. INIT_ERROR(!encoder_, "Failed to createVideoEncoder");
  95. ret =encoder_->setCapturePlaneFormat(V4L2_PIX_FMT_H264,width_, height_, 2 * 1024 * 1024);
  96. // printf("width_;%d, height_:%d\n",width_,height_);
  97. INIT_ERROR(ret < 0, "Failed to encoder setCapturePlaneFormat");
  98. ret = encoder_->setOutputPlaneFormat(V4L2_PIX_FMT_YUV420M, width_, height_);
  99. INIT_ERROR(ret < 0, "Failed to encoder setOutputPlaneFormat");
  100. if (codec_.codecType == webrtc::kVideoCodecH264) {
  101. // printf("000000000000000000000000000000000000000000000009876542345678992534567890789657463656789065456789087654345678909456780\n");
  102. // ret = encoder_->setProfile(V4L2_MPEG_VIDEO_H264_PROFILE_HIGH);
  103. ret = encoder_->setProfile(V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE);
  104. INIT_ERROR(ret < 0, "Failed to setProfile");
  105. // ret = encoder_->setLevel(V4L2_MPEG_VIDEO_H264_LEVEL_5_1);
  106. ret = encoder_->setLevel(V4L2_MPEG_VIDEO_H264_LEVEL_2_1);
  107. // INIT_ERROR(ret < 0, "Failed to setLevel");
  108. ret = encoder_->setNumBFrames(0);
  109. INIT_ERROR(ret < 0, "Failed to setNumBFrames");
  110. ret = encoder_->setInsertSpsPpsAtIdrEnabled(true);
  111. INIT_ERROR(ret < 0, "Failed to setInsertSpsPpsAtIdrEnabled");
  112. ret = encoder_->setInsertVuiEnabled(true);
  113. INIT_ERROR(ret < 0, "Failed to setInsertSpsPpsAtIdrEnabled");
  114. ret = encoder_->setHWPresetType(V4L2_ENC_HW_PRESET_FAST);
  115. // ret = encoder_->setHWPresetType(V4L2_ENC_HW_PRESET_ULTRAFAST);
  116. INIT_ERROR(ret < 0, "Failed to setHWPresetType");
  117. }
  118. ret = encoder_->setRateControlMode(V4L2_MPEG_VIDEO_BITRATE_MODE_CBR);
  119. INIT_ERROR(ret < 0, "Failed to setRateControlMode");
  120. bitrate_adjuster_.reset(new webrtc::BitrateAdjuster(.5, .95));
  121. bitrate_adjuster_->SetTargetBitrateBps(target_bitrate_bps_);
  122. SetBitrateBps(target_bitrate_bps_);
  123. ret = encoder_->setIDRInterval(key_frame_interval_);
  124. INIT_ERROR(ret < 0, "Failed to setIDRInterval");
  125. // ret = encoder_->setIFrameInterval(0);
  126. ret = encoder_->setIFrameInterval(0);
  127. INIT_ERROR(ret < 0, "Failed to setIFrameInterval");
  128. // ret = encoder_->setFrameRate(framerate_, 1);
  129. ret = encoder_->setFrameRate(30, 1);
  130. INIT_ERROR(ret < 0, "Failed to setFrameRate");
  131. // if (use_native_) {
  132. // std::cout << "use native -------------------------------------------\n" << std::endl;
  133. // if (use_dmabuff_ || use_converter) {
  134. // std::cout << "use use_converter -------------------------------------------\n" << std::endl;
  135. // ret = encoder_->output_plane.reqbufs(V4L2_MEMORY_DMABUF, 10);
  136. // INIT_ERROR(ret < 0, "Failed to reqbufs at encoder output_plane");
  137. // int fd;
  138. // NvBufSurf::NvCommonAllocateParams cParams;
  139. // cParams.width = width_;
  140. // cParams.height = height_;
  141. // cParams.layout = NVBUF_LAYOUT_PITCH;
  142. // cParams.colorFormat = NVBUF_COLOR_FORMAT_YUV420;
  143. // cParams.memtag = NvBufSurfaceTag_VIDEO_ENC;
  144. // cParams.memType = NVBUF_MEM_SURFACE_ARRAY;
  145. // for (uint32_t i = 0; i < encoder_->output_plane.getNumBuffers(); i++) {
  146. // ret = NvBufSurf::NvAllocate(&cParams, 1, &fd);
  147. // INIT_ERROR(ret, "Failed to create NvBuffer");
  148. // RTC_LOG(LS_ERROR) << "NvBufferCreateEx i:" << i << " fd:" << fd;
  149. // output_plane_fd_[i] = fd;
  150. // }
  151. // } else {
  152. // ret = encoder_->output_plane.setupPlane(V4L2_MEMORY_USERPTR, 1, false,
  153. // false);
  154. // INIT_ERROR(ret < 0, "Failed to setupPlane at encoder output_plane");
  155. // }
  156. // } else {
  157. ret = encoder_->output_plane.setupPlane(V4L2_MEMORY_MMAP, 1, true, false);
  158. INIT_ERROR(ret < 0, "Failed to setupPlane at encoder output_plane");
  159. // }
  160. ret = encoder_->capture_plane.setupPlane(V4L2_MEMORY_MMAP, 1, true, false);
  161. INIT_ERROR(ret < 0, "Failed to setupPlane at capture_plane");
  162. ret = encoder_->subscribeEvent(V4L2_EVENT_EOS, 0, 0);
  163. INIT_ERROR(ret < 0, "Failed to subscribeEvent V4L2_EVENT_EOS");
  164. ret = encoder_->output_plane.setStreamStatus(true);
  165. INIT_ERROR(ret < 0, "Failed to setStreamStatus at encoder output_plane");
  166. ret = encoder_->capture_plane.setStreamStatus(true);
  167. INIT_ERROR(ret < 0, "Failed to setStreamStatus at encoder capture_plane");
  168. encoder_->capture_plane.setDQThreadCallback(EncodeFinishedCallbackFunction);
  169. encoder_->capture_plane.startDQThread(this);
  170. for (uint32_t i = 0; i < encoder_->capture_plane.getNumBuffers(); i++) {
  171. struct v4l2_buffer v4l2_buf;
  172. struct v4l2_plane planes[MAX_PLANES];
  173. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  174. memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
  175. v4l2_buf.index = i;
  176. v4l2_buf.m.planes = planes;
  177. ret = encoder_->capture_plane.qBuffer(v4l2_buf, NULL);
  178. INIT_ERROR(ret < 0, "Failed to qBuffer at encoder capture_plane");
  179. }
  180. configured_framerate_ = framerate_;
  181. return WEBRTC_VIDEO_CODEC_OK;
  182. }
  183. void JetsonVideoEncoder::JetsonRelease() {
  184. if (!encoder_)
  185. return;
  186. SendEOS();
  187. encoder_->capture_plane.waitForDQThread(2000);
  188. encoder_->capture_plane.deinitPlane();
  189. if (use_dmabuff_) {
  190. for (uint32_t i = 0; i < encoder_->output_plane.getNumBuffers(); i++) {
  191. if (encoder_->output_plane.unmapOutputBuffers(i, output_plane_fd_[i]) <
  192. 0) {
  193. RTC_LOG(LS_ERROR)
  194. << "Failed to unmapOutputBuffers at encoder output_plane";
  195. }
  196. if (NvBufSurf::NvDestroy(output_plane_fd_[i]) < 0) {
  197. RTC_LOG(LS_ERROR)
  198. << "Failed to NvBufferDestroy at encoder output_plane";
  199. }
  200. }
  201. } else {
  202. encoder_->output_plane.deinitPlane();
  203. }
  204. delete encoder_;
  205. encoder_ = nullptr;
  206. }
  207. void JetsonVideoEncoder::SendEOS() {
  208. if (encoder_->output_plane.getStreamStatus()) {
  209. struct v4l2_buffer v4l2_buf;
  210. struct v4l2_plane planes[MAX_PLANES];
  211. NvBuffer* buffer;
  212. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  213. memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
  214. v4l2_buf.m.planes = planes;
  215. if (encoder_->output_plane.getNumQueuedBuffers() ==
  216. encoder_->output_plane.getNumBuffers()) {
  217. if (encoder_->output_plane.dqBuffer(v4l2_buf, &buffer, NULL, 10) < 0) {
  218. RTC_LOG(LS_ERROR) << "Failed to dqBuffer at encoder output_plane";
  219. }
  220. }
  221. planes[0].bytesused = 0;
  222. for (int i = 0; i < buffer->n_planes; i++) {
  223. buffer->planes[i].bytesused = 0;
  224. }
  225. if (encoder_->output_plane.qBuffer(v4l2_buf, NULL) < 0) {
  226. RTC_LOG(LS_ERROR) << "Failed to qBuffer at encoder output_plane";
  227. }
  228. }
  229. }
  230. bool JetsonVideoEncoder::EncodeFinishedCallbackFunction(
  231. struct v4l2_buffer* v4l2_buf,
  232. NvBuffer* buffer,
  233. NvBuffer* shared_buffer,
  234. void* data) {
  235. return ((JetsonVideoEncoder*)data)
  236. ->EncodeFinishedCallback(v4l2_buf, buffer, shared_buffer);
  237. }
  238. bool JetsonVideoEncoder::EncodeFinishedCallback(struct v4l2_buffer* v4l2_buf,
  239. NvBuffer* buffer,
  240. NvBuffer* shared_buffer) {
  241. if (!v4l2_buf) {
  242. RTC_LOG(LS_INFO) << __FUNCTION__ << " v4l2_buf is null";
  243. return false;
  244. }
  245. if (buffer->planes[0].bytesused == 0) {
  246. RTC_LOG(LS_INFO) << __FUNCTION__ << " buffer size is zero";
  247. return false;
  248. }
  249. uint64_t timestamp = v4l2_buf->timestamp.tv_sec * rtc::kNumMicrosecsPerSec +
  250. v4l2_buf->timestamp.tv_usec;
  251. std::unique_ptr<FrameParams> params;
  252. {
  253. webrtc::MutexLock lock(&frame_params_lock_);
  254. do {
  255. if (frame_params_.empty()) {
  256. RTC_LOG(LS_WARNING)
  257. << __FUNCTION__
  258. << "Frame parameter is not found. SkipFrame timestamp:"
  259. << timestamp;
  260. return true;
  261. }
  262. params = std::move(frame_params_.front());
  263. frame_params_.pop();
  264. } while (params->timestamp_us < timestamp);
  265. if (params->timestamp_us != timestamp) {
  266. RTC_LOG(LS_WARNING)
  267. << __FUNCTION__
  268. << "Frame parameter is not found. SkipFrame timestamp:" << timestamp;
  269. return true;
  270. }
  271. }
  272. v4l2_ctrl_videoenc_outputbuf_metadata enc_metadata;
  273. if (encoder_->getMetadata(v4l2_buf->index, enc_metadata) != 0) {
  274. RTC_LOG(LS_WARNING) << __FUNCTION__
  275. << "getMetadata failed. SkipFrame timestamp:"
  276. << timestamp;
  277. return true;
  278. }
  279. SendFrame(buffer->planes[0].data, buffer->planes[0].bytesused,
  280. std::move(params), &enc_metadata);
  281. if (encoder_->capture_plane.qBuffer(*v4l2_buf, NULL) < 0) {
  282. RTC_LOG(LS_ERROR) << __FUNCTION__ << "Failed to qBuffer at capture_plane";
  283. return false;
  284. }
  285. return true;
  286. }
  287. int32_t JetsonVideoEncoder::RegisterEncodeCompleteCallback(
  288. webrtc::EncodedImageCallback* callback) {
  289. callback_ = callback;
  290. return WEBRTC_VIDEO_CODEC_OK;
  291. }
  292. void JetsonVideoEncoder::SetRates(const RateControlParameters& parameters) {
  293. // printf("SetRatesSetRatesSetRatesSetRatesSetRatesSetRatesSetRates\n");
  294. if (encoder_ == nullptr)
  295. return;
  296. if (parameters.bitrate.get_sum_bps() <= 0 || parameters.framerate_fps <= 0)
  297. return;
  298. RTC_LOG(LS_INFO) << __FUNCTION__ << " framerate:" << parameters.framerate_fps
  299. << " bitrate:" << parameters.bitrate.ToString();
  300. // if (svc_controller_) {
  301. // svc_controller_->OnRatesUpdated(parameters.bitrate);
  302. // }
  303. framerate_ = parameters.framerate_fps;
  304. target_bitrate_bps_ = parameters.bitrate.get_sum_bps();
  305. bitrate_adjuster_->SetTargetBitrateBps(target_bitrate_bps_);
  306. return;
  307. }
  308. void JetsonVideoEncoder::SetFramerate(uint32_t framerate) {
  309. if (configured_framerate_ == framerate) {
  310. return;
  311. }
  312. RTC_LOG(LS_INFO) << __FUNCTION__ << " " << framerate << "fps";
  313. if (encoder_->setFrameRate(framerate, 1) < 0) {
  314. RTC_LOG(LS_ERROR) << "Failed to set bitrate";
  315. return;
  316. }
  317. configured_framerate_ = framerate;
  318. }
  319. void JetsonVideoEncoder::SetBitrateBps(uint32_t bitrate_bps) {
  320. if (bitrate_bps < 300000 || (configured_bitrate_bps_ == bitrate_bps &&
  321. configured_framerate_ == framerate_)) {
  322. return;
  323. }
  324. configured_bitrate_bps_ = bitrate_bps;
  325. if (encoder_->setBitrate(bitrate_bps) < 0) {
  326. RTC_LOG(LS_ERROR) << "Failed to setBitrate";
  327. return;
  328. }
  329. }
  330. webrtc::VideoEncoder::EncoderInfo JetsonVideoEncoder::GetEncoderInfo() const {
  331. EncoderInfo info;
  332. info.supports_native_handle = true;
  333. info.implementation_name = "Jetson Video Encoder";
  334. static const int kLowH264QpThreshold = 24; //34
  335. static const int kHighH264QpThreshold = 37; //40
  336. info.scaling_settings = VideoEncoder::ScalingSettings(kLowH264QpThreshold,
  337. kHighH264QpThreshold);
  338. return info;
  339. }
  340. int32_t JetsonVideoEncoder::Encode(
  341. const webrtc::VideoFrame& input_frame,
  342. const std::vector<webrtc::VideoFrameType>* frame_types) {
  343. // printf("encode encode \n");
  344. if (!callback_) {
  345. RTC_LOG(LS_WARNING)
  346. << "InitEncode() has been called, but a callback function "
  347. << "has not been set with RegisterEncodeCompleteCallback()";
  348. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  349. }
  350. int fd = 0;
  351. webrtc::VideoType video_type;
  352. uint8_t* native_data;
  353. rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer =
  354. input_frame.video_frame_buffer();
  355. // std::shared_ptr<JetsonJpegDecoder> decoder;
  356. if (frame_buffer->type() == webrtc::VideoFrameBuffer::Type::kNative) {
  357. use_native_ = true;
  358. } else {
  359. use_native_ = false;
  360. }
  361. if (encoder_ == nullptr) {
  362. if (JetsonConfigure() != WEBRTC_VIDEO_CODEC_OK) {
  363. RTC_LOG(LS_ERROR) << "Failed to JetsonConfigure";
  364. return WEBRTC_VIDEO_CODEC_ERROR;
  365. }
  366. }
  367. bool force_key_frame = false;
  368. if (frame_types != nullptr) {
  369. RTC_DCHECK_EQ(frame_types->size(), static_cast<size_t>(1));
  370. if ((*frame_types)[0] == webrtc::VideoFrameType::kEmptyFrame) {
  371. return WEBRTC_VIDEO_CODEC_OK;
  372. }
  373. if ((*frame_types)[0] == webrtc::VideoFrameType::kVideoFrameKey) {
  374. if (encoder_->forceIDR() < 0) {
  375. RTC_LOG(LS_ERROR) << "Failed to forceIDR";
  376. }
  377. }
  378. }
  379. SetFramerate(framerate_);
  380. SetBitrateBps(bitrate_adjuster_->GetAdjustedBitrateBps());
  381. {
  382. webrtc::MutexLock lock(&frame_params_lock_);
  383. frame_params_.push(absl::make_unique<FrameParams>(
  384. frame_buffer->width(), frame_buffer->height(),
  385. input_frame.render_time_ms(), input_frame.ntp_time_ms(),
  386. input_frame.timestamp_us(), input_frame.timestamp(),
  387. // input_frame.rotation(), input_frame.color_space(), decoder));
  388. input_frame.rotation(), input_frame.color_space()));
  389. }
  390. struct v4l2_buffer v4l2_buf;
  391. struct v4l2_plane planes[MAX_PLANES];
  392. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  393. memset(planes, 0, sizeof(planes));
  394. v4l2_buf.m.planes = planes;
  395. if (use_native_) {
  396. NvBuffer* buffer;
  397. if (encoder_->output_plane.getNumQueuedBuffers() ==
  398. encoder_->output_plane.getNumBuffers()) {
  399. if (encoder_->output_plane.dqBuffer(v4l2_buf, &buffer, NULL, 10) < 0) {
  400. RTC_LOG(LS_ERROR) << "Failed to dqBuffer at encoder output_plane";
  401. return WEBRTC_VIDEO_CODEC_ERROR;
  402. }
  403. } else {
  404. buffer = encoder_->output_plane.getNthBuffer(
  405. encoder_->output_plane.getNumQueuedBuffers());
  406. v4l2_buf.index = encoder_->output_plane.getNumQueuedBuffers();
  407. }
  408. int src_dma_fd = -1;
  409. if (use_dmabuff_) {
  410. src_dma_fd = fd;
  411. } else if (video_type == webrtc::VideoType::kYUY2 ||
  412. video_type == webrtc::VideoType::kUYVY) {
  413. buffer->planes[0].bytesused = buffer->planes[0].fmt.width *
  414. buffer->planes[0].fmt.bytesperpixel *
  415. buffer->planes[0].fmt.height;
  416. buffer->planes[0].data = native_data;
  417. } else if (video_type == webrtc::VideoType::kI420) {
  418. size_t offset = 0;
  419. for (int i = 0; i < buffer->n_planes; i++) {
  420. buffer->planes[i].bytesused = buffer->planes[i].fmt.width *
  421. buffer->planes[i].fmt.bytesperpixel *
  422. buffer->planes[i].fmt.height;
  423. buffer->planes[i].data = native_data + offset;
  424. offset += buffer->planes[i].bytesused;
  425. }
  426. } else if (video_type == webrtc::VideoType::kYV12) {
  427. size_t offset = 0;
  428. buffer->planes[0].bytesused = buffer->planes[0].fmt.width *
  429. buffer->planes[0].fmt.bytesperpixel *
  430. buffer->planes[0].fmt.height;
  431. buffer->planes[0].data = native_data;
  432. offset += buffer->planes[0].bytesused;
  433. buffer->planes[2].bytesused = buffer->planes[1].fmt.width *
  434. buffer->planes[1].fmt.bytesperpixel *
  435. buffer->planes[1].fmt.height;
  436. buffer->planes[2].data = native_data + offset;
  437. offset += buffer->planes[2].bytesused;
  438. buffer->planes[1].bytesused = buffer->planes[2].fmt.width *
  439. buffer->planes[2].fmt.bytesperpixel *
  440. buffer->planes[2].fmt.height;
  441. buffer->planes[1].data = native_data + offset;
  442. } else {
  443. RTC_LOG(LS_ERROR) << "Unsupported webrtc::VideoType";
  444. return WEBRTC_VIDEO_CODEC_ERROR;
  445. }
  446. NvBufSurf::NvCommonTransformParams transform_params;
  447. /* Indicates which of the transform parameters are valid */
  448. memset(&transform_params, 0, sizeof(transform_params));
  449. transform_params.src_top = 0;
  450. transform_params.src_left = 0;
  451. transform_params.src_width = raw_width_;
  452. transform_params.src_height = raw_height_;
  453. transform_params.dst_top = 0;
  454. transform_params.dst_left = 0;
  455. transform_params.dst_width = width_;
  456. transform_params.dst_height = height_;
  457. transform_params.flag =
  458. (NvBufSurfTransform_Transform_Flag)(NVBUFSURF_TRANSFORM_FILTER |
  459. NVBUFSURF_TRANSFORM_CROP_SRC);
  460. transform_params.flip = NvBufSurfTransform_None;
  461. transform_params.filter = NvBufSurfTransformInter_Bilinear;
  462. if (NvBufSurf::NvTransform(&transform_params, src_dma_fd,
  463. output_plane_fd_[v4l2_buf.index])) {
  464. RTC_LOG(LS_ERROR) << "Failed to NvBufferTransform";
  465. return WEBRTC_VIDEO_CODEC_ERROR;
  466. }
  467. planes[0].m.fd = output_plane_fd_[v4l2_buf.index];
  468. planes[0].bytesused = 1234;
  469. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
  470. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  471. v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY;
  472. v4l2_buf.timestamp.tv_sec =
  473. input_frame.timestamp_us() / rtc::kNumMicrosecsPerSec;
  474. v4l2_buf.timestamp.tv_usec =
  475. input_frame.timestamp_us() % rtc::kNumMicrosecsPerSec;
  476. if (encoder_->output_plane.qBuffer(v4l2_buf, nullptr) < 0) {
  477. RTC_LOG(LS_ERROR) << "Failed to qBuffer at converter output_plane";
  478. return WEBRTC_VIDEO_CODEC_ERROR;
  479. }
  480. } else {
  481. NvBuffer* buffer;
  482. RTC_LOG(LS_VERBOSE) << __FUNCTION__ << " output_plane.getNumBuffers: "
  483. << encoder_->output_plane.getNumBuffers()
  484. << " output_plane.getNumQueuedBuffers: "
  485. << encoder_->output_plane.getNumQueuedBuffers();
  486. if (encoder_->output_plane.getNumQueuedBuffers() ==
  487. encoder_->output_plane.getNumBuffers()) {
  488. if (encoder_->output_plane.dqBuffer(v4l2_buf, &buffer, NULL, 10) < 0) {
  489. RTC_LOG(LS_ERROR) << "Failed to dqBuffer at encoder output_plane";
  490. return WEBRTC_VIDEO_CODEC_ERROR;
  491. }
  492. } else {
  493. buffer = encoder_->output_plane.getNthBuffer(
  494. encoder_->output_plane.getNumQueuedBuffers());
  495. v4l2_buf.index = encoder_->output_plane.getNumQueuedBuffers();
  496. }
  497. rtc::scoped_refptr<const webrtc::I420BufferInterface> i420_buffer =
  498. frame_buffer->ToI420();
  499. for (uint32_t i = 0; i < buffer->n_planes; i++) {
  500. const uint8_t* source_data;
  501. int source_stride;
  502. if (i == 0) {
  503. source_data = i420_buffer->DataY();
  504. source_stride = i420_buffer->StrideY();
  505. } else if (i == 1) {
  506. source_data = i420_buffer->DataU();
  507. source_stride = i420_buffer->StrideU();
  508. } else if (i == 2) {
  509. source_data = i420_buffer->DataV();
  510. source_stride = i420_buffer->StrideV();
  511. } else {
  512. break;
  513. }
  514. NvBuffer::NvBufferPlane& plane = buffer->planes[i];
  515. std::streamsize bytes_to_read = plane.fmt.bytesperpixel * plane.fmt.width;
  516. uint8_t* input_data = plane.data;
  517. plane.bytesused = 0;
  518. for (uint32_t j = 0; j < plane.fmt.height; j++) {
  519. memcpy(input_data, source_data + (source_stride * j), bytes_to_read);
  520. input_data += plane.fmt.stride;
  521. }
  522. plane.bytesused = plane.fmt.stride * plane.fmt.height;
  523. }
  524. v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY;
  525. v4l2_buf.timestamp.tv_sec =
  526. input_frame.timestamp_us() / rtc::kNumMicrosecsPerSec;
  527. v4l2_buf.timestamp.tv_usec =
  528. input_frame.timestamp_us() % rtc::kNumMicrosecsPerSec;
  529. for (int i = 0; i < MAX_PLANES; i++) {
  530. NvBufSurface* surf = 0;
  531. if (NvBufSurfaceFromFd(buffer->planes[i].fd, (void**)(&surf)) == -1) {
  532. RTC_LOG(LS_ERROR) << __FUNCTION__ << "Failed to NvBufSurfaceFromFd";
  533. return WEBRTC_VIDEO_CODEC_ERROR;
  534. }
  535. if (NvBufSurfaceSyncForDevice(surf, 0, i) == -1) {
  536. RTC_LOG(LS_ERROR) << "Failed to NvBufSurfaceSyncForDevice";
  537. return WEBRTC_VIDEO_CODEC_ERROR;
  538. }
  539. }
  540. if (encoder_->output_plane.qBuffer(v4l2_buf, nullptr) < 0) {
  541. RTC_LOG(LS_ERROR) << "Failed to qBuffer at encoder output_plane";
  542. return WEBRTC_VIDEO_CODEC_ERROR;
  543. }
  544. }
  545. return WEBRTC_VIDEO_CODEC_OK;
  546. }
  547. int32_t JetsonVideoEncoder::SendFrame(
  548. unsigned char* buffer,
  549. size_t size,
  550. std::unique_ptr<FrameParams> params,
  551. v4l2_ctrl_videoenc_outputbuf_metadata* enc_metadata) {
  552. if (!callback_) {
  553. RTC_LOG(LS_WARNING)
  554. << "InitEncode() has been called, but a callback function "
  555. << "has not been set with RegisterEncodeCompleteCallback()";
  556. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  557. }
  558. // encoded_image_.SetRtpTimestamp(params->timestamp_rtp);
  559. encoded_image_.SetTimestamp(params->timestamp_rtp);
  560. encoded_image_.SetColorSpace(params->color_space);
  561. encoded_image_._encodedWidth = params->width;
  562. encoded_image_._encodedHeight = params->height;
  563. encoded_image_.capture_time_ms_ = params->render_time_ms;
  564. encoded_image_.ntp_time_ms_ = params->ntp_time_ms;
  565. encoded_image_.rotation_ = params->rotation;
  566. encoded_image_.qp_ = enc_metadata->AvgQP;
  567. // if (enc_metadata->KeyFrame) {
  568. // encoded_image_.SetFrameType(webrtc::VideoFrameType::kVideoFrameKey);
  569. // } else {
  570. // encoded_image_.SetFrameType(webrtc::VideoFrameType::kVideoFrameDelta);
  571. // }
  572. if (enc_metadata->KeyFrame) {
  573. encoded_image_._frameType= webrtc::VideoFrameType::kVideoFrameKey;
  574. } else {
  575. encoded_image_._frameType= webrtc::VideoFrameType::kVideoFrameDelta;
  576. }
  577. webrtc::CodecSpecificInfo codec_specific;
  578. codec_specific.codecType = codec_.codecType;
  579. auto encoded_image_buffer =
  580. webrtc::EncodedImageBuffer::Create(buffer, size);
  581. encoded_image_.SetEncodedData(encoded_image_buffer);
  582. codec_specific.codecSpecific.H264.packetization_mode =
  583. webrtc::H264PacketizationMode::NonInterleaved;
  584. // webrtc::H264PacketizationMode::SingleNalUnit;
  585. RTC_LOG(LS_VERBOSE) << "key_frame=" << enc_metadata->KeyFrame
  586. << " size=" << size << " qp=" << encoded_image_.qp_;
  587. webrtc::EncodedImageCallback::Result result =
  588. callback_->OnEncodedImage(encoded_image_, &codec_specific);
  589. if (result.error != webrtc::EncodedImageCallback::Result::OK) {
  590. RTC_LOG(LS_ERROR) << __FUNCTION__
  591. << " OnEncodedImage failed error:" << result.error;
  592. return WEBRTC_VIDEO_CODEC_ERROR;
  593. }
  594. bitrate_adjuster_->Update(size);
  595. return WEBRTC_VIDEO_CODEC_OK;
  596. }
  597. } // namespace webrtc