jetson_h264_encode.cpp 63 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839
  1. /*
  2. * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. *
  10. */
  11. // Everything declared/defined in this header is only required when WebRTC is
  12. // build with H264 support, please do not move anything out of the
  13. // #ifdef unless needed and tested.
  14. #include <limits>
  15. #include <string>
  16. #include "absl/strings/match.h"
  17. #include "common_video/libyuv/include/webrtc_libyuv.h"
  18. #include "modules/video_coding/utility/simulcast_rate_allocator.h"
  19. #include "modules/video_coding/utility/simulcast_utility.h"
  20. #include "rtc_base/checks.h"
  21. #include "rtc_base/logging.h"
  22. #include "rtc_base/time_utils.h"
  23. #include "system_wrappers/include/metrics.h"
  24. #include "third_party/libyuv/include/libyuv/convert.h"
  25. #include "third_party/libyuv/include/libyuv/scale.h"
  26. #include "third_party/openh264/src/codec/api/svc/codec_api.h"
  27. #include "third_party/openh264/src/codec/api/svc/codec_app_def.h"
  28. #include "third_party/openh264/src/codec/api/svc/codec_def.h"
  29. #include "third_party/openh264/src/codec/api/svc/codec_ver.h"
  30. #include "/home/nvidia/devdata/ZJ_PRO_JET/webrtcinterop/jetson_h264_encode.h"
  31. #include "/home/nvidia/devdata/ZJ_PRO_JET/webrtcinterop/include/NvUtils.h"
  32. using namespace std;
  33. #define IS_DIGIT(c) (c >= '0' && c <= '9')
  34. #define MICROSECOND_UNIT 1000000
  35. namespace webrtc {
  36. namespace {
  37. const bool kOpenH264EncoderDetailedLogging = false;
  38. // QP scaling thresholds.
  39. static const int kLowH264QpThreshold = 24;
  40. static const int kHighH264QpThreshold = 37;
  41. // Used by histograms. Values of entries should not be changed.
  42. enum H264EncoderImplEvent {
  43. kH264EncoderEventInit = 0,
  44. kH264EncoderEventError = 1,
  45. kH264EncoderEventMax = 16,
  46. };
  47. //
  48. // void copyFrame(AVFrame *frame, const webrtc::I420BufferInterface *buffer) {
  49. // frame->width = buffer->width();
  50. // frame->height = buffer->height();
  51. // frame->format = AV_PIX_FMT_YUV420P;
  52. // frame->data[kYPlaneIndex] = const_cast<uint8_t *>(buffer->DataY());
  53. // frame->data[kUPlaneIndex] = const_cast<uint8_t *>(buffer->DataU());
  54. // frame->data[kVPlaneIndex] = const_cast<uint8_t *>(buffer->DataV());
  55. // }
  56. int NumberOfThreads(int width, int height, int number_of_cores) {
  57. // TODO(hbos): In Chromium, multiple threads do not work with sandbox on Mac,
  58. // see crbug.com/583348. Until further investigated, only use one thread.
  59. // if (width * height >= 1920 * 1080 && number_of_cores > 8) {
  60. // return 8; // 8 threads for 1080p on high perf machines.
  61. // } else if (width * height > 1280 * 960 && number_of_cores >= 6) {
  62. // return 3; // 3 threads for 1080p.
  63. // } else if (width * height > 640 * 480 && number_of_cores >= 3) {
  64. // return 2; // 2 threads for qHD/HD.
  65. // } else {
  66. // return 1; // 1 thread for VGA or less.
  67. // }
  68. // TODO(sprang): Also check sSliceArgument.uiSliceNum om GetEncoderPrams(),
  69. // before enabling multithreading here.
  70. return 1;
  71. }
  72. /**
  73. * Abort on error.
  74. *
  75. * @param ctx : Encoder context
  76. */
  77. static void
  78. abort(context_enc_t *ctx)
  79. {
  80. ctx->got_error = true;
  81. ctx->enc->abort();
  82. }
  83. static
  84. Crc* InitCrc(unsigned int CrcPolynomial)
  85. {
  86. unsigned short int i;
  87. unsigned short int j;
  88. unsigned int tempcrc;
  89. Crc *phCrc;
  90. phCrc = (Crc*) malloc (sizeof(Crc));
  91. if (phCrc == NULL)
  92. {
  93. cerr << "Mem allocation failed for Init CRC" <<endl;
  94. return NULL;
  95. }
  96. memset (phCrc, 0, sizeof(Crc));
  97. for (i = 0; i <= 255; i++)
  98. {
  99. tempcrc = i;
  100. for (j = 8; j > 0; j--)
  101. {
  102. if (tempcrc & 1)
  103. {
  104. tempcrc = (tempcrc >> 1) ^ CrcPolynomial;
  105. }
  106. else
  107. {
  108. tempcrc >>= 1;
  109. }
  110. }
  111. phCrc->CRCTable[i] = tempcrc;
  112. }
  113. phCrc->CrcValue = 0;
  114. return phCrc;
  115. }
  116. // static void set_defaults(context_enc_t & ctx)
  117. // {
  118. // memset(ctx, 0, sizeof(context_enc_t));
  119. // // ctx->in_file_path = "/home/nvidia/Desktop/env_enc/jetson_enc/build/output.yuv";
  120. // // ctx->out_file_path = "test.h264";
  121. // // ctx->width = 1280;
  122. // // ctx->height = 720;
  123. // ctx->encoder_pixfmt = V4L2_PIX_FMT_H264;
  124. // ctx->raw_pixfmt = V4L2_PIX_FMT_YUV420M;
  125. // ctx->bitrate = 4 * 1024 * 1024;
  126. // ctx->peak_bitrate = 0;
  127. // ctx->profile = V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE;
  128. // ctx->ratecontrol = V4L2_MPEG_VIDEO_BITRATE_MODE_CBR;
  129. // ctx->iframe_interval = 30;
  130. // ctx->externalRPS = false;
  131. // ctx->enableGDR = false;
  132. // ctx->enableROI = false;
  133. // ctx->bnoIframe = false;
  134. // ctx->bGapsInFrameNumAllowed = false;
  135. // ctx->bReconCrc = false;
  136. // ctx->enableLossless = false;
  137. // ctx->nH264FrameNumBits = 0;
  138. // ctx->nH265PocLsbBits = 0;
  139. // ctx->idr_interval = 256;
  140. // ctx->level = -1;
  141. // ctx->fps_n = 30;
  142. // ctx->fps_d = 1;
  143. // ctx->gdr_start_frame_number = 0xffffffff;
  144. // ctx->gdr_num_frames = 0xffffffff;
  145. // ctx->gdr_out_frame_number = 0xffffffff;
  146. // ctx->num_b_frames = (uint32_t) -1;
  147. // ctx->nMinQpI = (uint32_t)QP_RETAIN_VAL;
  148. // ctx->nMaxQpI = (uint32_t)QP_RETAIN_VAL;
  149. // ctx->nMinQpP = (uint32_t)QP_RETAIN_VAL;
  150. // ctx->nMaxQpP = (uint32_t)QP_RETAIN_VAL;
  151. // ctx->nMinQpB = (uint32_t)QP_RETAIN_VAL;
  152. // ctx->nMaxQpB = (uint32_t)QP_RETAIN_VAL;
  153. // ctx->use_gold_crc = false;
  154. // ctx->pBitStreamCrc = NULL;
  155. // ctx->externalRCHints = false;
  156. // ctx->input_metadata = false;
  157. // ctx->sMaxQp = 51;
  158. // ctx->stats = false;
  159. // ctx->stress_test = 1;
  160. // ctx->output_memory_type = V4L2_MEMORY_DMABUF;
  161. // ctx->capture_memory_type = V4L2_MEMORY_MMAP;
  162. // ctx->cs = V4L2_COLORSPACE_SMPTE170M;
  163. // ctx->copy_timestamp = false;
  164. // ctx->sar_width = 0;
  165. // ctx->sar_height = 0;
  166. // ctx->start_ts = 0;
  167. // ctx->max_perf = 0;
  168. // ctx->blocking_mode = 1;
  169. // ctx->startf = 0;
  170. // ctx->endf = 0;
  171. // ctx->num_output_buffers = 6;
  172. // ctx->num_frames_to_encode = -1;
  173. // ctx->poc_type = 0;
  174. // ctx->chroma_format_idc = -1;
  175. // ctx->bit_depth = 8;
  176. // ctx->is_semiplanar = false;
  177. // ctx->enable_initQP = false;
  178. // ctx->IinitQP = 0;
  179. // ctx->PinitQP = 0;
  180. // ctx->BinitQP = 0;
  181. // ctx->enable_ratecontrol = true;
  182. // ctx->enable_av1tile = false;
  183. // ctx->log2_num_av1rows = 0;
  184. // ctx->log2_num_av1cols = 0;
  185. // ctx->enable_av1ssimrdo = (uint8_t)-1;
  186. // ctx->disable_av1cdfupdate = (uint8_t)-1;
  187. // ctx->ppe_init_params.enable_ppe = false;
  188. // ctx->ppe_init_params.wait_time_ms = -1;
  189. // ctx->ppe_init_params.feature_flags = V4L2_PPE_FEATURE_NONE;
  190. // ctx->ppe_init_params.enable_profiler = 0;
  191. // ctx->ppe_init_params.taq_max_qp_delta = 5;
  192. // /* TAQ for B-frames is enabled by default */
  193. // ctx->ppe_init_params.taq_b_frame_mode = 1;
  194. // }
  195. static
  196. void CalculateCrc(Crc *phCrc, unsigned char *buffer, uint32_t count)
  197. {
  198. unsigned char *p;
  199. unsigned int temp1;
  200. unsigned int temp2;
  201. unsigned int crc = phCrc->CrcValue;
  202. unsigned int *CRCTable = phCrc->CRCTable;
  203. if(!count)
  204. return;
  205. p = (unsigned char *) buffer;
  206. while (count-- != 0)
  207. {
  208. temp1 = (crc >> 8) & 0x00FFFFFFL;
  209. temp2 = CRCTable[((unsigned int) crc ^ *p++) & 0xFF];
  210. crc = temp1 ^ temp2;
  211. }
  212. phCrc->CrcValue = crc;
  213. }
  214. /**
  215. * Encoder polling thread loop function.
  216. *
  217. * @param args : void arguments
  218. */
  219. static void *encoder_pollthread_fcn(void *arg)
  220. {
  221. context_enc_t *ctx = (context_enc_t *) arg;
  222. v4l2_ctrl_video_device_poll devicepoll;
  223. cout << "Starting Device Poll Thread " << endl;
  224. memset(&devicepoll, 0, sizeof(v4l2_ctrl_video_device_poll));
  225. /* wait here until signalled to issue the Poll call.
  226. Check if the abort status is set , if so exit
  227. Else issue the Poll on the encoder and block.
  228. When the Poll returns, signal the encoder thread to continue. */
  229. while (!ctx->got_error && !ctx->enc->isInError())
  230. {
  231. sem_wait(&ctx->pollthread_sema);
  232. if (ctx->got_eos)
  233. {
  234. cout << "Got eos, exiting poll thread \n";
  235. return NULL;
  236. }
  237. devicepoll.req_events = POLLIN | POLLOUT | POLLERR | POLLPRI;
  238. /* This call shall wait in the v4l2 encoder library */
  239. ctx->enc->DevicePoll(&devicepoll);
  240. /* Can check the devicepoll.resp_events bitmask to see which events are set. */
  241. sem_post(&ctx->encoderthread_sema);
  242. }
  243. return NULL;
  244. }
  245. static int
  246. write_encoder_output_frame(ofstream * stream, NvBuffer * buffer)
  247. {
  248. stream->write((char *) buffer->planes[0].data, buffer->planes[0].bytesused);
  249. return 0;
  250. }
  251. static bool
  252. encoder_capture_plane_dq_callback(struct v4l2_buffer *v4l2_buf, NvBuffer * buffer,
  253. NvBuffer * shared_buffer, void *arg)
  254. {
  255. printf("------------------------------------\n");
  256. context_enc_t *ctx = (context_enc_t *) arg;
  257. NvVideoEncoder *enc = ctx->enc;
  258. pthread_setname_np(pthread_self(), "EncCapPlane");
  259. uint32_t frame_num = ctx->enc->capture_plane.getTotalDequeuedBuffers() - 1;
  260. uint32_t ReconRef_Y_CRC = 0;
  261. uint32_t ReconRef_U_CRC = 0;
  262. uint32_t ReconRef_V_CRC = 0;
  263. static uint32_t num_encoded_frames = 1;
  264. struct v4l2_event ev;
  265. int ret = 0;
  266. if (v4l2_buf == NULL)
  267. {
  268. cout << "Error while dequeing buffer from output plane" << endl;
  269. abort(ctx);
  270. return false;
  271. }
  272. if (ctx->b_use_enc_cmd)
  273. {
  274. if(v4l2_buf->flags & V4L2_BUF_FLAG_LAST)
  275. {
  276. memset(&ev,0,sizeof(struct v4l2_event));
  277. ret = ctx->enc->dqEvent(ev,1000);
  278. if (ret < 0)
  279. cout << "Error in dqEvent" << endl;
  280. if(ev.type == V4L2_EVENT_EOS)
  281. return false;
  282. }
  283. }
  284. /* Received EOS from encoder. Stop dqthread. */
  285. if (buffer->planes[0].bytesused == 0)
  286. {
  287. cout << "Got 0 size buffer in capture \n";
  288. return false;
  289. }
  290. /* Computing CRC with each frame */
  291. if(ctx->pBitStreamCrc)
  292. CalculateCrc (ctx->pBitStreamCrc, buffer->planes[0].data, buffer->planes[0].bytesused);
  293. if (!ctx->stats)
  294. write_encoder_output_frame(ctx->out_file, buffer);
  295. /* Accounting for the first frame as it is only sps+pps */
  296. if (ctx->gdr_out_frame_number != 0xFFFFFFFF)
  297. if ( (ctx->enableGDR) && (ctx->GDR_out_file_path) && (num_encoded_frames >= ctx->gdr_out_frame_number+1))
  298. write_encoder_output_frame(ctx->gdr_out_file, buffer);
  299. num_encoded_frames++;
  300. if (ctx->report_metadata)
  301. {
  302. v4l2_ctrl_videoenc_outputbuf_metadata enc_metadata;
  303. if (ctx->enc->getMetadata(v4l2_buf->index, enc_metadata) == 0)
  304. {
  305. if (ctx->bReconCrc && enc_metadata.bValidReconCRC) {
  306. /* CRC for Recon frame */
  307. cout << "Frame: " << frame_num << endl;
  308. cout << "ReconFrame_Y_CRC " << enc_metadata.ReconFrame_Y_CRC <<
  309. " ReconFrame_U_CRC " << enc_metadata.ReconFrame_U_CRC <<
  310. " ReconFrame_V_CRC " << enc_metadata.ReconFrame_V_CRC <<
  311. endl;
  312. if (!ctx->recon_Ref_file->eof())
  313. {
  314. string recon_ref_YUV_data[4];
  315. parse_csv_recon_file(ctx->recon_Ref_file, recon_ref_YUV_data);
  316. ReconRef_Y_CRC = stoul(recon_ref_YUV_data[0]);
  317. ReconRef_U_CRC = stoul(recon_ref_YUV_data[1]);
  318. ReconRef_V_CRC = stoul(recon_ref_YUV_data[2]);
  319. }
  320. if ((ReconRef_Y_CRC != enc_metadata.ReconFrame_Y_CRC) ||
  321. (ReconRef_U_CRC != enc_metadata.ReconFrame_U_CRC) ||
  322. (ReconRef_V_CRC != enc_metadata.ReconFrame_V_CRC))
  323. {
  324. cout << "Recon CRC FAIL" << endl;
  325. cout << "ReconRef_Y_CRC " << ReconRef_Y_CRC <<
  326. " ReconRef_U_CRC " << ReconRef_U_CRC <<
  327. " ReconRef_V_CRC " << ReconRef_V_CRC <<
  328. endl;
  329. abort(ctx);
  330. return false;
  331. }
  332. cout << "Recon CRC PASS for frame : " << frame_num << endl;
  333. } else if (ctx->externalRPS && enc_metadata.bRPSFeedback_status) {
  334. /* RPS Feedback */
  335. ctx->rps_par.nActiveRefFrames = enc_metadata.nActiveRefFrames;
  336. cout << "Frame: " << frame_num << endl;
  337. cout << "nCurrentRefFrameId " << enc_metadata.nCurrentRefFrameId <<
  338. " nActiveRefFrames " << enc_metadata.nActiveRefFrames << endl;
  339. for (uint32_t i = 0; i < enc_metadata.nActiveRefFrames; i++)
  340. {
  341. /* Update RPS List */
  342. ctx->rps_par.rps_list[i].nFrameId = enc_metadata.RPSList[i].nFrameId;
  343. ctx->rps_par.rps_list[i].bLTRefFrame = enc_metadata.RPSList[i].bLTRefFrame;
  344. cout << "FrameId " << enc_metadata.RPSList[i].nFrameId <<
  345. " IdrFrame " << (int) enc_metadata.RPSList[i].bIdrFrame <<
  346. " LTRefFrame " << (int) enc_metadata.RPSList[i].bLTRefFrame <<
  347. " PictureOrderCnt " << enc_metadata.RPSList[i].nPictureOrderCnt <<
  348. " FrameNum " << enc_metadata.RPSList[i].nFrameNum <<
  349. " LTFrameIdx " << enc_metadata.RPSList[i].nLTRFrameIdx << endl;
  350. }
  351. } else if (ctx->externalRCHints) {
  352. /* Rate Control Feedback */
  353. cout << "Frame: " << frame_num << endl;
  354. cout << "EncodedBits " << enc_metadata.EncodedFrameBits <<
  355. " MinQP " << enc_metadata.FrameMinQP <<
  356. " MaxQP " << enc_metadata.FrameMaxQP <<
  357. endl;
  358. } else {
  359. cout << "Frame " << frame_num <<
  360. ": isKeyFrame=" << (int) enc_metadata.KeyFrame <<
  361. " AvgQP=" << enc_metadata.AvgQP <<
  362. " MinQP=" << enc_metadata.FrameMinQP <<
  363. " MaxQP=" << enc_metadata.FrameMaxQP <<
  364. " EncodedBits=" << enc_metadata.EncodedFrameBits <<
  365. endl;
  366. }
  367. }
  368. }
  369. if (ctx->dump_mv)
  370. {
  371. /* Get motion vector parameters of the frames from encoder */
  372. v4l2_ctrl_videoenc_outputbuf_metadata_MV enc_mv_metadata;
  373. if (ctx->enc->getMotionVectors(v4l2_buf->index, enc_mv_metadata) == 0)
  374. {
  375. uint32_t numMVs = enc_mv_metadata.bufSize / sizeof(MVInfo);
  376. MVInfo *pInfo = enc_mv_metadata.pMVInfo;
  377. cout << "Frame " << frame_num << ": Num MVs=" << numMVs << endl;
  378. for (uint32_t i = 0; i < numMVs; i++, pInfo++)
  379. {
  380. cout << i << ": mv_x=" << pInfo->mv_x <<
  381. " mv_y=" << pInfo->mv_y <<
  382. " weight=" << pInfo->weight <<
  383. endl;
  384. }
  385. }
  386. }
  387. if (ctx->blocking_mode && ctx->RPS_threeLayerSvc)
  388. {
  389. sem_post(&ctx->rps_par.sema);
  390. }
  391. /* encoder qbuffer for capture plane */
  392. if (enc->capture_plane.qBuffer(*v4l2_buf, NULL) < 0)
  393. {
  394. cerr << "Error while Qing buffer at capture plane" << endl;
  395. abort(ctx);
  396. return false;
  397. }
  398. return true;
  399. }
  400. static int
  401. setup_output_dmabuf(context_enc_t *ctx, uint32_t num_buffers )
  402. {
  403. int ret=0;
  404. NvBufSurf::NvCommonAllocateParams cParams;
  405. int fd;
  406. ret = ctx->enc->output_plane.reqbufs(V4L2_MEMORY_DMABUF,num_buffers);
  407. if(ret)
  408. {
  409. cerr << "reqbufs failed for output plane V4L2_MEMORY_DMABUF" << endl;
  410. return ret;
  411. }
  412. for (uint32_t i = 0; i < ctx->enc->output_plane.getNumBuffers(); i++)
  413. {
  414. cParams.width = ctx->width;
  415. cParams.height = ctx->height;
  416. cParams.layout = NVBUF_LAYOUT_PITCH;
  417. switch (ctx->cs)
  418. {
  419. case V4L2_COLORSPACE_REC709:
  420. cParams.colorFormat = ctx->enable_extended_colorformat ?
  421. NVBUF_COLOR_FORMAT_YUV420_709_ER : NVBUF_COLOR_FORMAT_YUV420_709;
  422. break;
  423. case V4L2_COLORSPACE_SMPTE170M:
  424. default:
  425. cParams.colorFormat = ctx->enable_extended_colorformat ?
  426. NVBUF_COLOR_FORMAT_YUV420_ER : NVBUF_COLOR_FORMAT_YUV420;
  427. }
  428. if (ctx->is_semiplanar)
  429. {
  430. cParams.colorFormat = NVBUF_COLOR_FORMAT_NV12;
  431. }
  432. if (ctx->encoder_pixfmt == V4L2_PIX_FMT_H264)
  433. {
  434. if (ctx->enableLossless)
  435. {
  436. if (ctx->is_semiplanar)
  437. cParams.colorFormat = NVBUF_COLOR_FORMAT_NV24;
  438. else
  439. cParams.colorFormat = NVBUF_COLOR_FORMAT_YUV444;
  440. }
  441. }
  442. else if (ctx->encoder_pixfmt == V4L2_PIX_FMT_H265)
  443. {
  444. if (ctx->chroma_format_idc == 3)
  445. {
  446. if (ctx->is_semiplanar)
  447. cParams.colorFormat = NVBUF_COLOR_FORMAT_NV24;
  448. else
  449. cParams.colorFormat = NVBUF_COLOR_FORMAT_YUV444;
  450. if (ctx->bit_depth == 10)
  451. cParams.colorFormat = NVBUF_COLOR_FORMAT_NV24_10LE;
  452. }
  453. if (ctx->profile == V4L2_MPEG_VIDEO_H265_PROFILE_MAIN10 && (ctx->bit_depth == 10))
  454. {
  455. cParams.colorFormat = NVBUF_COLOR_FORMAT_NV12_10LE;
  456. }
  457. }
  458. cParams.memtag = NvBufSurfaceTag_VIDEO_ENC;
  459. cParams.memType = NVBUF_MEM_SURFACE_ARRAY;
  460. /* Create output plane fd for DMABUF io-mode */
  461. ret = NvBufSurf::NvAllocate(&cParams, 1, &fd);
  462. if(ret < 0)
  463. {
  464. cerr << "Failed to create NvBuffer" << endl;
  465. return ret;
  466. }
  467. ctx->output_plane_fd[i]=fd;
  468. }
  469. return ret;
  470. }
  471. static int
  472. setup_capture_dmabuf(context_enc_t *ctx, uint32_t num_buffers )
  473. {
  474. NvBufSurfaceAllocateParams cParams = {{0}};
  475. NvBufSurface *surface = 0;
  476. int ret=0;
  477. ret = ctx->enc->capture_plane.reqbufs(V4L2_MEMORY_DMABUF,num_buffers);
  478. if(ret)
  479. {
  480. cerr << "reqbufs failed for capture plane V4L2_MEMORY_DMABUF" << endl;
  481. return ret;
  482. }
  483. for (uint32_t i = 0; i < ctx->enc->capture_plane.getNumBuffers(); i++)
  484. {
  485. ret = ctx->enc->capture_plane.queryBuffer(i);
  486. if (ret)
  487. {
  488. cerr << "Error in querying for " << i << "th buffer plane" << endl;
  489. return ret;
  490. }
  491. NvBuffer *buffer = ctx->enc->capture_plane.getNthBuffer(i);
  492. cParams.params.memType = NVBUF_MEM_HANDLE;
  493. cParams.params.size = buffer->planes[0].length;
  494. cParams.memtag = NvBufSurfaceTag_VIDEO_ENC;
  495. ret = NvBufSurfaceAllocate(&surface, 1, &cParams);
  496. if(ret < 0)
  497. {
  498. cerr << "Failed to create NvBuffer" << endl;
  499. return ret;
  500. }
  501. surface->numFilled = 1;
  502. ctx->capture_plane_fd[i] = surface->surfaceList[0].bufferDesc;
  503. }
  504. return ret;
  505. }
  506. static int
  507. get_next_parsed_pair(context_enc_t *ctx, char *id, uint32_t *value)
  508. {
  509. char charval;
  510. *ctx->runtime_params_str >> *id;
  511. if (ctx->runtime_params_str->eof())
  512. {
  513. return -1;
  514. }
  515. charval = ctx->runtime_params_str->peek();
  516. if (!IS_DIGIT(charval))
  517. {
  518. return -1;
  519. }
  520. *ctx->runtime_params_str >> *value;
  521. *ctx->runtime_params_str >> charval;
  522. if (ctx->runtime_params_str->eof())
  523. {
  524. return 0;
  525. }
  526. return charval;
  527. }
  528. static int
  529. get_next_runtime_param_change_frame(context_enc_t *ctx)
  530. {
  531. char charval;
  532. int ret;
  533. ret = get_next_parsed_pair(ctx, &charval, &ctx->next_param_change_frame);
  534. if(ret == 0)
  535. {
  536. return 0;
  537. }
  538. // TEST_PARSE_ERROR((ret != ';' && ret != ',') || charval != 'f', err);
  539. return 0;
  540. // err:
  541. // cerr << "Skipping further runtime parameter changes" <<endl;
  542. // delete ctx->runtime_params_str;
  543. // ctx->runtime_params_str = NULL;
  544. // return -1;
  545. }
  546. VideoFrameType ConvertToVideoFrameType(EVideoFrameType type) {
  547. switch (type) {
  548. case videoFrameTypeIDR:
  549. return VideoFrameType::kVideoFrameKey;
  550. case videoFrameTypeSkip:
  551. case videoFrameTypeI:
  552. case videoFrameTypeP:
  553. case videoFrameTypeIPMixed:
  554. return VideoFrameType::kVideoFrameDelta;
  555. case videoFrameTypeInvalid:
  556. break;
  557. }
  558. RTC_NOTREACHED() << "Unexpected/invalid frame type: " << type;
  559. return VideoFrameType::kEmptyFrame;
  560. }
  561. } // namespace
  562. // Helper method used by H264EncoderImpl::Encode.
  563. // Copies the encoded bytes from |info| to |encoded_image|. The
  564. // |encoded_image->_buffer| may be deleted and reallocated if a bigger buffer is
  565. // required.
  566. //
  567. // After OpenH264 encoding, the encoded bytes are stored in |info| spread out
  568. // over a number of layers and "NAL units". Each NAL unit is a fragment starting
  569. // with the four-byte start code {0,0,0,1}. All of this data (including the
  570. // start codes) is copied to the |encoded_image->_buffer|.
  571. static void RtpFragmentize(EncodedImage* encoded_image, SFrameBSInfo* info) {
  572. // Calculate minimum buffer size required to hold encoded data.
  573. size_t required_capacity = 0;
  574. size_t fragments_count = 0;
  575. for (int layer = 0; layer < info->iLayerNum; ++layer) {
  576. const SLayerBSInfo& layerInfo = info->sLayerInfo[layer];
  577. for (int nal = 0; nal < layerInfo.iNalCount; ++nal, ++fragments_count) {
  578. RTC_CHECK_GE(layerInfo.pNalLengthInByte[nal], 0);
  579. // Ensure |required_capacity| will not overflow.
  580. RTC_CHECK_LE(layerInfo.pNalLengthInByte[nal],
  581. std::numeric_limits<size_t>::max() - required_capacity);
  582. required_capacity += layerInfo.pNalLengthInByte[nal];
  583. }
  584. }
  585. // TODO(nisse): Use a cache or buffer pool to avoid allocation?
  586. auto buffer = EncodedImageBuffer::Create(required_capacity);
  587. encoded_image->SetEncodedData(buffer);
  588. // Iterate layers and NAL units, note each NAL unit as a fragment and copy
  589. // the data to |encoded_image->_buffer|.
  590. const uint8_t start_code[4] = {0, 0, 0, 1};
  591. size_t frag = 0;
  592. encoded_image->set_size(0);
  593. for (int layer = 0; layer < info->iLayerNum; ++layer) {
  594. const SLayerBSInfo& layerInfo = info->sLayerInfo[layer];
  595. // Iterate NAL units making up this layer, noting fragments.
  596. size_t layer_len = 0;
  597. for (int nal = 0; nal < layerInfo.iNalCount; ++nal, ++frag) {
  598. // Because the sum of all layer lengths, |required_capacity|, fits in a
  599. // |size_t|, we know that any indices in-between will not overflow.
  600. RTC_DCHECK_GE(layerInfo.pNalLengthInByte[nal], 4);
  601. RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 0], start_code[0]);
  602. RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 1], start_code[1]);
  603. RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 2], start_code[2]);
  604. RTC_DCHECK_EQ(layerInfo.pBsBuf[layer_len + 3], start_code[3]);
  605. layer_len += layerInfo.pNalLengthInByte[nal];
  606. }
  607. // Copy the entire layer's data (including start codes).
  608. memcpy(buffer->data() + encoded_image->size(), layerInfo.pBsBuf, layer_len);
  609. encoded_image->set_size(encoded_image->size() + layer_len);
  610. }
  611. }
  612. JetH264Encoder::JetH264Encoder(const cricket::VideoCodec& codec)
  613. : packetization_mode_(H264PacketizationMode::SingleNalUnit),
  614. max_payload_size_(0),
  615. number_of_cores_(0),
  616. encoded_image_callback_(nullptr),
  617. has_reported_init_(false),
  618. has_reported_error_(false) {
  619. RTC_CHECK(absl::EqualsIgnoreCase(codec.name, cricket::kH264CodecName));
  620. std::string packetization_mode_string;
  621. if (codec.GetParam(cricket::kH264FmtpPacketizationMode,
  622. &packetization_mode_string) &&
  623. packetization_mode_string == "1") {
  624. packetization_mode_ = H264PacketizationMode::NonInterleaved;
  625. }
  626. downscaled_buffers_.reserve(kMaxSimulcastStreams - 1);
  627. encoded_images_.reserve(kMaxSimulcastStreams);
  628. encoders_.reserve(kMaxSimulcastStreams);
  629. configurations_.reserve(kMaxSimulcastStreams);
  630. tl0sync_limit_.reserve(kMaxSimulcastStreams);
  631. }
  632. JetH264Encoder::~JetH264Encoder() {
  633. Release();
  634. }
  635. // //增加
  636. bool JetH264Encoder::OpenEncoder(context_enc_t *ctx, JetH264Encoder::LayerConfig &config)
  637. {
  638. int ret = 0;
  639. int error = 0;
  640. bool eos = false;
  641. memset(ctx, 0, sizeof(context_enc_t));
  642. // ctx->in_file_path = "/home/nvidia/Desktop/env_enc/jetson_enc/build/output.yuv";
  643. // ctx->out_file_path = "test.h264";
  644. // ctx->width = 1280;
  645. // ctx->height = 720;
  646. ctx->encoder_pixfmt = V4L2_PIX_FMT_H264;
  647. ctx->raw_pixfmt = V4L2_PIX_FMT_YUV420M;
  648. ctx->bitrate = 4 * 1024 * 1024;
  649. ctx->peak_bitrate = 0;
  650. ctx->profile = V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE;
  651. ctx->ratecontrol = V4L2_MPEG_VIDEO_BITRATE_MODE_CBR;
  652. ctx->iframe_interval = 30;
  653. ctx->externalRPS = false;
  654. ctx->enableGDR = false;
  655. ctx->enableROI = false;
  656. ctx->bnoIframe = false;
  657. ctx->bGapsInFrameNumAllowed = false;
  658. ctx->bReconCrc = false;
  659. ctx->enableLossless = false;
  660. ctx->nH264FrameNumBits = 0;
  661. ctx->nH265PocLsbBits = 0;
  662. ctx->idr_interval = 256;
  663. ctx->level = -1;
  664. ctx->fps_n = 30;
  665. ctx->fps_d = 1;
  666. ctx->gdr_start_frame_number = 0xffffffff;
  667. ctx->gdr_num_frames = 0xffffffff;
  668. ctx->gdr_out_frame_number = 0xffffffff;
  669. ctx->num_b_frames = (uint32_t) -1;
  670. ctx->nMinQpI = (uint32_t)QP_RETAIN_VAL;
  671. ctx->nMaxQpI = (uint32_t)QP_RETAIN_VAL;
  672. ctx->nMinQpP = (uint32_t)QP_RETAIN_VAL;
  673. ctx->nMaxQpP = (uint32_t)QP_RETAIN_VAL;
  674. ctx->nMinQpB = (uint32_t)QP_RETAIN_VAL;
  675. ctx->nMaxQpB = (uint32_t)QP_RETAIN_VAL;
  676. ctx->use_gold_crc = false;
  677. ctx->pBitStreamCrc = NULL;
  678. ctx->externalRCHints = false;
  679. ctx->input_metadata = false;
  680. ctx->sMaxQp = 51;
  681. ctx->stats = false;
  682. ctx->stress_test = 1;
  683. ctx->output_memory_type = V4L2_MEMORY_DMABUF;
  684. ctx->capture_memory_type = V4L2_MEMORY_MMAP;
  685. ctx->cs = V4L2_COLORSPACE_SMPTE170M;
  686. ctx->copy_timestamp = false;
  687. ctx->sar_width = 0;
  688. ctx->sar_height = 0;
  689. ctx->start_ts = 0;
  690. ctx->max_perf = 0;
  691. ctx->blocking_mode = 1;
  692. ctx->startf = 0;
  693. ctx->endf = 0;
  694. ctx->num_output_buffers = 6;
  695. ctx->num_frames_to_encode = -1;
  696. ctx->poc_type = 0;
  697. ctx->chroma_format_idc = -1;
  698. ctx->bit_depth = 8;
  699. ctx->is_semiplanar = false;
  700. ctx->enable_initQP = false;
  701. ctx->IinitQP = 0;
  702. ctx->PinitQP = 0;
  703. ctx->BinitQP = 0;
  704. ctx->enable_ratecontrol = true;
  705. ctx->enable_av1tile = false;
  706. ctx->log2_num_av1rows = 0;
  707. ctx->log2_num_av1cols = 0;
  708. ctx->enable_av1ssimrdo = (uint8_t)-1;
  709. ctx->disable_av1cdfupdate = (uint8_t)-1;
  710. ctx->ppe_init_params.enable_ppe = false;
  711. ctx->ppe_init_params.wait_time_ms = -1;
  712. ctx->ppe_init_params.feature_flags = V4L2_PPE_FEATURE_NONE;
  713. ctx->ppe_init_params.enable_profiler = 0;
  714. ctx->ppe_init_params.taq_max_qp_delta = 5;
  715. /* TAQ for B-frames is enabled by default */
  716. ctx->ppe_init_params.taq_b_frame_mode = 1;
  717. /* Set default values for encoder context members. */
  718. // set_defaults(&ctx);
  719. /* Parse application command line options. */
  720. // ret = parse_csv_args(&ctx, argc, argv);
  721. // TEST_ERROR(ret < 0, "Error parsing commandline arguments", cleanup);
  722. // std ::cout << ctx.encoder_pixfmt << std::endl;
  723. /* Set thread name for encoder Output Plane thread. */
  724. pthread_setname_np(pthread_self(),"EncOutPlane");
  725. // /* Get the parsed encoder runtime parameters */
  726. // if (ctx->runtime_params_str)
  727. // {
  728. // get_next_runtime_param_change_frame(&ctx);
  729. // }
  730. if (ctx->endf) {
  731. ctx->num_frames_to_encode = ctx->endf - ctx->startf + 1;
  732. }
  733. if (ctx->use_gold_crc)
  734. {
  735. /* CRC specific initializetion if gold_crc flag is set */
  736. ctx->pBitStreamCrc = InitCrc(CRC32_POLYNOMIAL);
  737. }
  738. /* Open input file for raw yuv */
  739. ctx->in_file = new ifstream(ctx->in_file_path);
  740. if (!ctx->stats)
  741. {
  742. /* Open output file for encoded bitstream */
  743. ctx->out_file = new ofstream(ctx->out_file_path);
  744. }
  745. if (ctx->ROI_Param_file_path) {
  746. /* Open Region of Intreset(ROI) parameter file when ROI feature enabled */
  747. ctx->roi_Param_file = new ifstream(ctx->ROI_Param_file_path);
  748. }
  749. if (ctx->Recon_Ref_file_path) {
  750. /* Open Reconstructed CRC reference file when ReconCRC feature enabled */
  751. ctx->recon_Ref_file = new ifstream(ctx->Recon_Ref_file_path);
  752. }
  753. if (ctx->RPS_Param_file_path) {
  754. /* Open Reference Picture set(RPS) specififc reference file when Dynamic RPS feature enabled */
  755. ctx->rps_Param_file = new ifstream(ctx->RPS_Param_file_path);
  756. }
  757. if (ctx->GDR_Param_file_path) {
  758. /* Open Gradual Decoder Refresh(GDR) parameters reference file when GDR feature enabled */
  759. ctx->gdr_Param_file = new ifstream(ctx->GDR_Param_file_path);
  760. }
  761. if (ctx->GDR_out_file_path) {
  762. /* Open Gradual Decoder Refresh(GDR) output parameters reference file when GDR feature enabled */
  763. ctx->gdr_out_file = new ofstream(ctx->GDR_out_file_path);
  764. }
  765. if (ctx->hints_Param_file_path) {
  766. /* Open external hints parameters file for when external rate control feature enabled */
  767. ctx->hints_Param_file = new ifstream(ctx->hints_Param_file_path);
  768. }
  769. /* Create NvVideoEncoder object for blocking or non-blocking I/O mode. */
  770. //进入
  771. if (ctx->blocking_mode)
  772. {
  773. cout << "Creating Encoder in blocking mode \n";
  774. ctx->enc = NvVideoEncoder::createVideoEncoder("enc0");
  775. }
  776. else
  777. {
  778. cout << "Creating Encoder in non-blocking mode \n";
  779. ctx->enc = NvVideoEncoder::createVideoEncoder("enc0", O_NONBLOCK);
  780. }
  781. if (ctx->stats)
  782. {
  783. ctx->enc->enableProfiling();
  784. }
  785. /* Set encoder capture plane format.
  786. NOTE: It is necessary that Capture Plane format be set before Output Plane
  787. format. It is necessary to set width and height on the capture plane as well */
  788. ret =
  789. ctx->enc->setCapturePlaneFormat(ctx->encoder_pixfmt, ctx->width,
  790. ctx->height, 2 * 1024 * 1024);
  791. if (ctx->encoder_pixfmt == V4L2_PIX_FMT_H265)
  792. {
  793. switch (ctx->profile)
  794. {
  795. case V4L2_MPEG_VIDEO_H265_PROFILE_MAIN10:
  796. {
  797. ctx->raw_pixfmt = V4L2_PIX_FMT_P010M;
  798. ctx->is_semiplanar = true; /* To keep previous execution commands working */
  799. ctx->bit_depth = 10;
  800. break;
  801. }
  802. case V4L2_MPEG_VIDEO_H265_PROFILE_MAIN:
  803. {
  804. if (ctx->is_semiplanar)
  805. ctx->raw_pixfmt = V4L2_PIX_FMT_NV12M;
  806. else
  807. ctx->raw_pixfmt = V4L2_PIX_FMT_YUV420M;
  808. if (ctx->chroma_format_idc == 3)
  809. {
  810. if (ctx->bit_depth == 10 && ctx->is_semiplanar)
  811. ctx->raw_pixfmt = V4L2_PIX_FMT_NV24_10LE;
  812. if (ctx->bit_depth == 8)
  813. {
  814. if (ctx->is_semiplanar)
  815. ctx->raw_pixfmt = V4L2_PIX_FMT_NV24M;
  816. else
  817. ctx->raw_pixfmt = V4L2_PIX_FMT_YUV444M;
  818. }
  819. }
  820. }
  821. break;
  822. default:
  823. ctx->raw_pixfmt = V4L2_PIX_FMT_YUV420M;
  824. }
  825. }
  826. if (ctx->encoder_pixfmt == V4L2_PIX_FMT_H264)
  827. {
  828. if (ctx->enableLossless &&
  829. ctx->profile == V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE)
  830. {
  831. if (ctx->is_semiplanar)
  832. ctx->raw_pixfmt = V4L2_PIX_FMT_NV24M;
  833. else
  834. ctx->raw_pixfmt = V4L2_PIX_FMT_YUV444M;
  835. }
  836. else if ((ctx->enableLossless &&
  837. ctx->profile != V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE) ||
  838. (!ctx->enableLossless && ctx->profile == V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE))
  839. {
  840. cerr << "Lossless encoding is supported only for high444 profile\n";
  841. error = 1;
  842. // goto cleanup;
  843. }
  844. else
  845. {
  846. if (ctx->is_semiplanar)
  847. ctx->raw_pixfmt = V4L2_PIX_FMT_NV12M;
  848. else
  849. ctx->raw_pixfmt = V4L2_PIX_FMT_YUV420M;
  850. }
  851. }
  852. /* Set encoder output plane format */
  853. ret = ctx->enc->setOutputPlaneFormat(ctx->raw_pixfmt, ctx->width, ctx->height);
  854. if (ctx->num_frames_to_encode)
  855. {
  856. ret = ctx->enc->setFramesToEncode(ctx->num_frames_to_encode);
  857. }
  858. ret = ctx->enc->setBitrate(ctx->bitrate);
  859. if (ctx->encoder_pixfmt == V4L2_PIX_FMT_H264)
  860. {
  861. /* Set encoder profile for H264 format */
  862. ret = ctx->enc->setProfile(ctx->profile);
  863. if (ctx->level == (uint32_t)-1)
  864. {
  865. ctx->level = (uint32_t)V4L2_MPEG_VIDEO_H264_LEVEL_5_1;
  866. }
  867. ret = ctx->enc->setLevel(ctx->level);
  868. }
  869. else if (ctx->encoder_pixfmt == V4L2_PIX_FMT_H265)
  870. {
  871. ret = ctx->enc->setProfile(ctx->profile);
  872. if (ctx->level != (uint32_t)-1)
  873. {
  874. ret = ctx->enc->setLevel(ctx->level);
  875. }
  876. if (ctx->chroma_format_idc != (uint8_t)-1)
  877. {
  878. ret = ctx->enc->setChromaFactorIDC(ctx->chroma_format_idc);
  879. }
  880. }
  881. if (ctx->enable_initQP)
  882. {
  883. ret = ctx->enc->setInitQP(ctx->IinitQP, ctx->PinitQP, ctx->BinitQP);
  884. }
  885. if (ctx->enableLossless)
  886. {
  887. ret = ctx->enc->setLossless(ctx->enableLossless);
  888. }
  889. else if (!ctx->enable_ratecontrol)
  890. {
  891. /* Set constant QP configuration by disabling rate control */
  892. ret = ctx->enc->setConstantQp(ctx->enable_ratecontrol);
  893. }
  894. else
  895. {
  896. /* Set rate control mode for encoder */
  897. ret = ctx->enc->setRateControlMode(ctx->ratecontrol);
  898. if (ctx->ratecontrol == V4L2_MPEG_VIDEO_BITRATE_MODE_VBR) {
  899. uint32_t peak_bitrate;
  900. if (ctx->peak_bitrate < ctx->bitrate)
  901. peak_bitrate = 1.2f * ctx->bitrate;
  902. else
  903. peak_bitrate = ctx->peak_bitrate;
  904. /* Set peak bitrate value for variable bitrate mode for encoder */
  905. ret = ctx->enc->setPeakBitrate(peak_bitrate);
  906. }
  907. }
  908. if (ctx->poc_type)
  909. {
  910. ret = ctx->enc->setPocType(ctx->poc_type);
  911. }
  912. /* Set IDR frame interval for encoder */
  913. ret = ctx->enc->setIDRInterval(ctx->idr_interval);
  914. /* Set I frame interval for encoder */
  915. ret = ctx->enc->setIFrameInterval(ctx->iframe_interval);
  916. /* Set framerate for encoder */
  917. ret = ctx->enc->setFrameRate(ctx->fps_n, ctx->fps_d);
  918. if (ctx->temporal_tradeoff_level)
  919. {
  920. /* Set temporal tradeoff level value for encoder */
  921. ret = ctx->enc->setTemporalTradeoff(ctx->temporal_tradeoff_level);
  922. }
  923. if (ctx->slice_length)
  924. {
  925. /* Set slice length value for encoder */
  926. ret = ctx->enc->setSliceLength(ctx->slice_length_type,
  927. ctx->slice_length);
  928. }
  929. if (ctx->enable_slice_level_encode)
  930. {
  931. /* Enable slice level encode for encoder */
  932. ret = ctx->enc->setSliceLevelEncode(true);
  933. }
  934. if (ctx->hw_preset_type)
  935. {
  936. /* Set hardware preset value for encoder */
  937. ret = ctx->enc->setHWPresetType(ctx->hw_preset_type);
  938. }
  939. if (ctx->virtual_buffer_size)
  940. {
  941. /* Set virtual buffer size value for encoder */
  942. ret = ctx->enc->setVirtualBufferSize(ctx->virtual_buffer_size);
  943. }
  944. if (ctx->slice_intrarefresh_interval)
  945. {
  946. /* Set slice intra refresh interval value for encoder */
  947. ret = ctx->enc->setSliceIntrarefresh(ctx->slice_intrarefresh_interval);
  948. }
  949. if (ctx->insert_sps_pps_at_idr)
  950. {
  951. /* Enable insert of SPSPPS at IDR frames */
  952. ret = ctx->enc->setInsertSpsPpsAtIdrEnabled(true);
  953. }
  954. if (ctx->disable_cabac)
  955. {
  956. /* Disable CABAC entropy encoding */
  957. ret = ctx->enc->setCABAC(false);
  958. }
  959. if (ctx->sar_width)
  960. {
  961. /* Set SAR width */
  962. ret = ctx->enc->setSampleAspectRatioWidth(ctx->sar_width);
  963. }
  964. if (ctx->sar_height)
  965. {
  966. /* Set SAR width */
  967. ret = ctx->enc->setSampleAspectRatioHeight(ctx->sar_height);
  968. }
  969. if (ctx->insert_vui)
  970. {
  971. /* Enable insert of VUI parameters */
  972. ret = ctx->enc->setInsertVuiEnabled(true);
  973. }
  974. if (ctx->enable_extended_colorformat)
  975. {
  976. /* Enable extnded colorformat for encoder */
  977. ret = ctx->enc->setExtendedColorFormat(true);
  978. }
  979. if (ctx->insert_aud)
  980. {
  981. /* Enable insert of AUD parameters */
  982. ret = ctx->enc->setInsertAudEnabled(true);
  983. }
  984. if (ctx->alliframes)
  985. {
  986. /* Enable all I-frame encode */
  987. ret = ctx->enc->setAlliFramesEncode(true);
  988. }
  989. if (ctx->num_b_frames != (uint32_t) -1)
  990. {
  991. /* Set number of B-frames to to be used by encoder */
  992. ret = ctx->enc->setNumBFrames(ctx->num_b_frames);
  993. }
  994. if ((ctx->nMinQpI != (uint32_t)QP_RETAIN_VAL) ||
  995. (ctx->nMaxQpI != (uint32_t)QP_RETAIN_VAL) ||
  996. (ctx->nMinQpP != (uint32_t)QP_RETAIN_VAL) ||
  997. (ctx->nMaxQpP != (uint32_t)QP_RETAIN_VAL) ||
  998. (ctx->nMinQpB != (uint32_t)QP_RETAIN_VAL) ||
  999. (ctx->nMaxQpB != (uint32_t)QP_RETAIN_VAL))
  1000. {
  1001. /* Set Min & Max qp range values for I/P/B-frames to be used by encoder */
  1002. ret = ctx->enc->setQpRange(ctx->nMinQpI, ctx->nMaxQpI, ctx->nMinQpP,
  1003. ctx->nMaxQpP, ctx->nMinQpB, ctx->nMaxQpB);
  1004. }
  1005. if (ctx->max_perf)
  1006. {
  1007. /* Enable maximum performance mode by disabling internal DFS logic.
  1008. NOTE: This enables encoder to run at max clocks */
  1009. ret = ctx->enc->setMaxPerfMode(ctx->max_perf);
  1010. }
  1011. if (ctx->dump_mv)
  1012. {
  1013. /* Enable dumping of motion vectors report from encoder */
  1014. ret = ctx->enc->enableMotionVectorReporting();
  1015. }
  1016. if (ctx->bnoIframe) {
  1017. ctx->iframe_interval = ((1<<31) + 1); /* TODO: how can we do this properly */
  1018. ret = ctx->enc->setIFrameInterval(ctx->iframe_interval);
  1019. }
  1020. if (ctx->enableROI) {
  1021. v4l2_enc_enable_roi_param VEnc_enable_ext_roi_ctrl;
  1022. VEnc_enable_ext_roi_ctrl.bEnableROI = ctx->enableROI;
  1023. /* Enable region of intrest configuration for encoder */
  1024. ret = ctx->enc->enableROI(VEnc_enable_ext_roi_ctrl);
  1025. }
  1026. if (ctx->bReconCrc) {
  1027. v4l2_enc_enable_reconcrc_param VEnc_enable_recon_crc_ctrl;
  1028. VEnc_enable_recon_crc_ctrl.bEnableReconCRC = ctx->bReconCrc;
  1029. /* Enable reconstructed CRC configuration for encoder */
  1030. ret = ctx->enc->enableReconCRC(VEnc_enable_recon_crc_ctrl);
  1031. }
  1032. if (ctx->externalRPS) {
  1033. v4l2_enc_enable_ext_rps_ctr VEnc_enable_ext_rps_ctrl;
  1034. VEnc_enable_ext_rps_ctrl.bEnableExternalRPS = ctx->externalRPS;
  1035. if (ctx->encoder_pixfmt == V4L2_PIX_FMT_H264) {
  1036. VEnc_enable_ext_rps_ctrl.bGapsInFrameNumAllowed = ctx->bGapsInFrameNumAllowed;
  1037. VEnc_enable_ext_rps_ctrl.nH264FrameNumBits = ctx->nH264FrameNumBits;
  1038. }
  1039. if (ctx->encoder_pixfmt == V4L2_PIX_FMT_H265) {
  1040. VEnc_enable_ext_rps_ctrl.nH265PocLsbBits = ctx->nH265PocLsbBits;
  1041. }
  1042. /* Enable external reference picture set configuration for encoder */
  1043. ret = ctx->enc->enableExternalRPS(VEnc_enable_ext_rps_ctrl);
  1044. }
  1045. if (ctx->num_reference_frames)
  1046. {
  1047. /* Set number of reference frame configuration value for encoder */
  1048. ret = ctx->enc->setNumReferenceFrames(ctx->num_reference_frames);
  1049. }
  1050. if (ctx->externalRCHints) {
  1051. v4l2_enc_enable_ext_rate_ctr VEnc_enable_ext_rate_ctrl;
  1052. VEnc_enable_ext_rate_ctrl.bEnableExternalPictureRC = ctx->externalRCHints;
  1053. VEnc_enable_ext_rate_ctrl.nsessionMaxQP = ctx->sMaxQp;
  1054. /* Enable external rate control configuration for encoder */
  1055. ret = ctx->enc->enableExternalRC(VEnc_enable_ext_rate_ctrl);
  1056. }
  1057. if (ctx->encoder_pixfmt == V4L2_PIX_FMT_AV1)
  1058. {
  1059. if (ctx->enable_av1tile)
  1060. {
  1061. v4l2_enc_av1_tile_config VEnc_av1_tile_config;
  1062. VEnc_av1_tile_config.bEnableTile = ctx->enable_av1tile;
  1063. VEnc_av1_tile_config.nLog2RowTiles = ctx->log2_num_av1rows;
  1064. VEnc_av1_tile_config.nLog2ColTiles = ctx->log2_num_av1cols;
  1065. /* Enable tile configuration for encoder */
  1066. ret = ctx->enc->enableAV1Tile(VEnc_av1_tile_config);
  1067. }
  1068. if (ctx->enable_av1ssimrdo != (uint8_t) -1)
  1069. {
  1070. ret = ctx->enc->setAV1SsimRdo(ctx->enable_av1ssimrdo);
  1071. }
  1072. if (ctx->disable_av1cdfupdate != (uint8_t) -1)
  1073. {
  1074. ret = ctx->enc->setAV1DisableCDFUpdate(ctx->disable_av1cdfupdate);
  1075. }
  1076. }
  1077. /* Query, Export and Map the output plane buffers so that we can read
  1078. raw data into the buffers */
  1079. switch(ctx->output_memory_type)
  1080. {
  1081. case V4L2_MEMORY_MMAP:
  1082. ret = ctx->enc->output_plane.setupPlane(V4L2_MEMORY_MMAP, 10, true, false);
  1083. break;
  1084. case V4L2_MEMORY_USERPTR:
  1085. ret = ctx->enc->output_plane.setupPlane(V4L2_MEMORY_USERPTR, 10, false, true);
  1086. break;
  1087. case V4L2_MEMORY_DMABUF:
  1088. ret = setup_output_dmabuf(ctx,10);
  1089. break;
  1090. default :
  1091. break;
  1092. }
  1093. /* Query, Export and Map the capture plane buffers so that we can write
  1094. encoded bitstream data into the buffers */
  1095. switch(ctx->capture_memory_type)
  1096. {
  1097. case V4L2_MEMORY_MMAP:
  1098. ret = ctx->enc->capture_plane.setupPlane(V4L2_MEMORY_MMAP, ctx->num_output_buffers, true, false);
  1099. // TEST_ERROR(ret < 0, "Could not setup capture plane", cleanup);
  1100. break;
  1101. case V4L2_MEMORY_DMABUF:
  1102. ret = setup_capture_dmabuf(ctx,ctx->num_output_buffers);
  1103. break;
  1104. default :
  1105. break;
  1106. }
  1107. /* Subscibe for End Of Stream event */
  1108. ret = ctx->enc->subscribeEvent(V4L2_EVENT_EOS,0,0);
  1109. if (ctx->b_use_enc_cmd)
  1110. {
  1111. /* Send v4l2 command for encoder start */
  1112. ret = ctx->enc->setEncoderCommand(V4L2_ENC_CMD_START, 0);
  1113. }
  1114. else
  1115. {
  1116. /* set encoder output plane STREAMON */
  1117. ret = ctx->enc->output_plane.setStreamStatus(true);
  1118. /* set encoder capture plane STREAMON */
  1119. ret = ctx->enc->capture_plane.setStreamStatus(true);
  1120. }
  1121. if (ctx->blocking_mode) //进入
  1122. {
  1123. if (ctx->RPS_threeLayerSvc)
  1124. {
  1125. sem_init(&ctx->rps_par.sema, 0, 0);
  1126. }
  1127. /* Set encoder capture plane dq thread callback for blocking io mode */
  1128. ctx->enc->capture_plane.setDQThreadCallback(encoder_capture_plane_dq_callback);
  1129. /* startDQThread starts a thread internally which calls the
  1130. encoder_capture_plane_dq_callback whenever a buffer is dequeued
  1131. on the plane */
  1132. ctx->enc->capture_plane.startDQThread(&ctx);
  1133. }
  1134. else
  1135. {
  1136. sem_init(&ctx->pollthread_sema, 0, 0);
  1137. sem_init(&ctx->encoderthread_sema, 0, 0);
  1138. /* Set encoder poll thread for non-blocking io mode */
  1139. pthread_create(&ctx->enc_pollthread, NULL, encoder_pollthread_fcn, &ctx);
  1140. pthread_setname_np(ctx->enc_pollthread, "EncPollThread");
  1141. cout << "Created the PollThread and Encoder Thread \n";
  1142. }
  1143. /* Enqueue all the empty capture plane buffers. */
  1144. //将一组缓冲区排队到编码器的捕获平面
  1145. for (uint32_t i = 0; i < ctx->enc->capture_plane.getNumBuffers(); i++)
  1146. {
  1147. struct v4l2_buffer v4l2_buf;
  1148. struct v4l2_plane planes[MAX_PLANES];
  1149. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  1150. memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
  1151. v4l2_buf.index = i;
  1152. v4l2_buf.m.planes = planes;
  1153. if(ctx->capture_memory_type == V4L2_MEMORY_DMABUF)
  1154. {
  1155. v4l2_buf.m.planes[0].m.fd = ctx->capture_plane_fd[i];
  1156. /* Map capture plane buffer for memory type DMABUF. */
  1157. ret = ctx->enc->capture_plane.mapOutputBuffers(v4l2_buf, ctx->capture_plane_fd[i]);
  1158. // if (ret < 0)
  1159. // {
  1160. // cerr << "Error while mapping buffer at capture plane" << endl;
  1161. // abort(&ctx);
  1162. // goto cleanup;
  1163. // }
  1164. }
  1165. ret = ctx->enc->capture_plane.qBuffer(v4l2_buf, NULL);
  1166. // if (ret < 0)
  1167. // {
  1168. // cerr << "Error while queueing buffer at capture plane" << endl;
  1169. // abort(&ctx);
  1170. // goto cleanup;
  1171. // }
  1172. }
  1173. if (ctx->copy_timestamp) {
  1174. /* Set user provided timestamp when copy timestamp is enabled */
  1175. ctx->timestamp = (ctx->start_ts * MICROSECOND_UNIT);
  1176. ctx->timestampincr = (MICROSECOND_UNIT * 16) / ((uint32_t) (ctx->fps_n * 16));
  1177. }
  1178. if(ctx->ppe_init_params.enable_ppe)
  1179. {
  1180. ret = ctx->enc->setPPEInitParams(ctx->ppe_init_params);
  1181. if (ret < 0){
  1182. cerr << "Error calling setPPEInitParams" << endl;
  1183. }
  1184. }
  1185. config.target_bps = config.max_bps;
  1186. return true;
  1187. }
  1188. int32_t JetH264Encoder::InitEncode(const VideoCodec* inst,
  1189. const VideoEncoder::Settings& settings) {
  1190. printf("init ----------------------------------------\n");
  1191. ReportInit();
  1192. if (!inst || inst->codecType != kVideoCodecH264) {
  1193. ReportError();
  1194. return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
  1195. }
  1196. if (inst->maxFramerate == 0) {
  1197. ReportError();
  1198. return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
  1199. }
  1200. if (inst->width < 1 || inst->height < 1) {
  1201. ReportError();
  1202. return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
  1203. }
  1204. int32_t release_ret = Release();
  1205. if (release_ret != WEBRTC_VIDEO_CODEC_OK) {
  1206. ReportError();
  1207. return release_ret;
  1208. }
  1209. int number_of_streams = SimulcastUtility::NumberOfSimulcastStreams(*inst);
  1210. bool doing_simulcast = (number_of_streams > 1);
  1211. if (doing_simulcast &&
  1212. !SimulcastUtility::ValidSimulcastParameters(*inst, number_of_streams)) {
  1213. return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED;
  1214. }
  1215. downscaled_buffers_.resize(number_of_streams - 1);
  1216. encoded_images_.resize(number_of_streams);
  1217. encoders_.resize(number_of_streams);
  1218. pictures_.resize(number_of_streams);//
  1219. configurations_.resize(number_of_streams);
  1220. tl0sync_limit_.resize(number_of_streams);
  1221. for (int i = 0; i < number_of_streams; i++){
  1222. encoders_[i] = new context_enc_t();
  1223. }
  1224. number_of_cores_ = settings.number_of_cores;
  1225. max_payload_size_ = settings.max_payload_size;
  1226. codec_ = *inst;
  1227. // Code expects simulcastStream resolutions to be correct, make sure they are
  1228. // filled even when there are no simulcast layers.
  1229. if (codec_.numberOfSimulcastStreams == 0) {
  1230. codec_.simulcastStream[0].width = codec_.width;
  1231. codec_.simulcastStream[0].height = codec_.height;
  1232. }
  1233. for (int i = 0, idx = number_of_streams - 1; i < number_of_streams;
  1234. ++i, --idx)
  1235. {
  1236. // Set internal settings from codec_settings
  1237. configurations_[i].simulcast_idx = idx;
  1238. configurations_[i].sending = false;
  1239. configurations_[i].width = codec_.simulcastStream[idx].width;
  1240. configurations_[i].height = codec_.simulcastStream[idx].height;
  1241. configurations_[i].max_frame_rate = static_cast<float>(codec_.maxFramerate);
  1242. configurations_[i].frame_dropping_on = codec_.H264()->frameDroppingOn;
  1243. configurations_[i].key_frame_interval = codec_.H264()->keyFrameInterval;
  1244. configurations_[i].num_temporal_layers = codec_.simulcastStream[idx].numberOfTemporalLayers;
  1245. // Create downscaled image buffers.
  1246. if (i > 0) {
  1247. downscaled_buffers_[i - 1] = I420Buffer::Create(
  1248. configurations_[i].width, configurations_[i].height,
  1249. configurations_[i].width, configurations_[i].width / 2,
  1250. configurations_[i].width / 2);
  1251. }
  1252. // Codec_settings uses kbits/second; encoder uses bits/second.
  1253. configurations_[i].max_bps = codec_.maxBitrate * 1000; //2500000
  1254. configurations_[i].target_bps = codec_.startBitrate * 1000; //2000000
  1255. if (!OpenEncoder(encoders_[i], configurations_[i])) {
  1256. Release();
  1257. ReportError();
  1258. return WEBRTC_VIDEO_CODEC_ERROR;
  1259. }
  1260. //初始化编码图片大小  默认是 使用未编码数据大小
  1261. // Initialize encoded image. Default buffer size: size of unencoded data.
  1262. const size_t new_capacity =
  1263. CalcBufferSize(VideoType::kI420, codec_.simulcastStream[idx].width,
  1264. codec_.simulcastStream[idx].height);
  1265. encoded_images_[i].SetEncodedData(EncodedImageBuffer::Create(new_capacity));
  1266. encoded_images_[i]._completeFrame = true;
  1267. encoded_images_[i]._encodedWidth = codec_.simulcastStream[idx].width;
  1268. encoded_images_[i]._encodedHeight = codec_.simulcastStream[idx].height;
  1269. encoded_images_[i].set_size(0);
  1270. tl0sync_limit_[i] = configurations_[i].num_temporal_layers;
  1271. }
  1272. SimulcastRateAllocator init_allocator(codec_);
  1273. VideoBitrateAllocation allocation =
  1274. init_allocator.Allocate(VideoBitrateAllocationParameters(
  1275. DataRate::KilobitsPerSec(codec_.startBitrate), codec_.maxFramerate));
  1276. // SetRates(RateControlParameters(allocation, codec_.maxFramerate));
  1277. return WEBRTC_VIDEO_CODEC_OK;
  1278. }
  1279. // void JetH264Encoder::copyFrame(AVFrame *frame, const webrtc::I420BufferInterface *buffer) {
  1280. // frame->width = buffer->width();
  1281. // frame->height = buffer->height();
  1282. // frame->format = AV_PIX_FMT_YUV420P;
  1283. // frame->data[kYPlaneIndex] = const_cast<uint8_t *>(buffer->DataY());
  1284. // frame->data[kUPlaneIndex] = const_cast<uint8_t *>(buffer->DataU());
  1285. // frame->data[kVPlaneIndex] = const_cast<uint8_t *>(buffer->DataV());
  1286. // }
  1287. // int32_t JetH264Encoder::Release() {
  1288. // while (!encoders_.empty()) {
  1289. // // ISVCEncoder* openh264_encoder = encoders_.back();
  1290. // context_enc_t* encoders_ = encoders_.back();
  1291. // if (encoders_) {
  1292. // RTC_CHECK_EQ(0, openh264_encoder->Uninitialize());
  1293. // WelsDestroySVCEncoder(openh264_encoder);
  1294. // }
  1295. // encoders_.pop_back();
  1296. // }
  1297. // downscaled_buffers_.clear();
  1298. // configurations_.clear();
  1299. // encoded_images_.clear();
  1300. // pictures_.clear();
  1301. // tl0sync_limit_.clear();
  1302. // return WEBRTC_VIDEO_CODEC_OK;
  1303. // }
  1304. int32_t JetH264Encoder::RegisterEncodeCompleteCallback(
  1305. EncodedImageCallback* callback) {
  1306. encoded_image_callback_ = callback;
  1307. return WEBRTC_VIDEO_CODEC_OK;
  1308. }
  1309. // void JetH264Encoder::SetRates(const RateControlParameters& parameters) {
  1310. // if (encoders_.empty()) {
  1311. // RTC_LOG(LS_WARNING) << "SetRates() while uninitialized.";
  1312. // return;
  1313. // }
  1314. // if (parameters.framerate_fps < 1.0) {
  1315. // RTC_LOG(LS_WARNING) << "Invalid frame rate: " << parameters.framerate_fps;
  1316. // return;
  1317. // }
  1318. // if (parameters.bitrate.get_sum_bps() == 0) {
  1319. // // Encoder paused, turn off all encoding.
  1320. // for (size_t i = 0; i < configurations_.size(); ++i) {
  1321. // configurations_[i].SetStreamState(false);
  1322. // }
  1323. // return;
  1324. // }
  1325. // codec_.maxFramerate = static_cast<uint32_t>(parameters.framerate_fps);
  1326. // size_t stream_idx = encoders_.size() - 1;
  1327. // for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) {
  1328. // // Update layer config.
  1329. // configurations_[i].target_bps =
  1330. // parameters.bitrate.GetSpatialLayerSum(stream_idx);
  1331. // configurations_[i].max_frame_rate = parameters.framerate_fps;
  1332. // if (configurations_[i].target_bps) {
  1333. // configurations_[i].SetStreamState(true);
  1334. // // Update h264 encoder.
  1335. // SBitrateInfo target_bitrate;
  1336. // memset(&target_bitrate, 0, sizeof(SBitrateInfo));
  1337. // target_bitrate.iLayer = SPATIAL_LAYER_ALL,
  1338. // target_bitrate.iBitrate = configurations_[i].target_bps;
  1339. // encoders_[i]->SetOption(ENCODER_OPTION_BITRATE, &target_bitrate);
  1340. // encoders_[i]->SetOption(ENCODER_OPTION_FRAME_RATE,
  1341. // &configurations_[i].max_frame_rate);
  1342. // } else {
  1343. // configurations_[i].SetStreamState(false);
  1344. // }
  1345. // }
  1346. // }
  1347. int32_t JetH264Encoder::Encode(
  1348. const VideoFrame& input_frame,
  1349. const std::vector<VideoFrameType>* frame_types) {
  1350. if (encoders_.empty()) {
  1351. ReportError();
  1352. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  1353. }
  1354. if (!encoded_image_callback_) {
  1355. RTC_LOG(LS_WARNING)
  1356. << "InitEncode() has been called, but a callback function "
  1357. "has not been set with RegisterEncodeCompleteCallback()";
  1358. ReportError();
  1359. return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
  1360. }
  1361. rtc::scoped_refptr<const I420BufferInterface> frame_buffer =
  1362. input_frame.video_frame_buffer()->ToI420();
  1363. bool send_key_frame = false;
  1364. for (size_t i = 0; i < configurations_.size(); ++i) {
  1365. if (configurations_[i].key_frame_request && configurations_[i].sending) {
  1366. send_key_frame = true;
  1367. break;
  1368. }
  1369. }
  1370. if (!send_key_frame && frame_types) {
  1371. for (size_t i = 0; i < configurations_.size(); ++i) {
  1372. const size_t simulcast_idx =
  1373. static_cast<size_t>(configurations_[i].simulcast_idx);
  1374. if (configurations_[i].sending && simulcast_idx < frame_types->size() &&
  1375. (*frame_types)[simulcast_idx] == VideoFrameType::kVideoFrameKey) {
  1376. send_key_frame = true;
  1377. break;
  1378. }
  1379. }
  1380. }
  1381. RTC_DCHECK_EQ(configurations_[0].width, frame_buffer->width());
  1382. RTC_DCHECK_EQ(configurations_[0].height, frame_buffer->height());
  1383. // Encode image for each layer.
  1384. for (size_t i = 0; i < encoders_.size(); ++i) {
  1385. // EncodeFrame input.
  1386. // copyFrame(encoders_[i]->frame, frame_buffer);
  1387. pictures_[i] = {0};
  1388. pictures_[i].iPicWidth = configurations_[i].width;
  1389. pictures_[i].iPicHeight = configurations_[i].height;
  1390. pictures_[i].iColorFormat = EVideoFormatType::videoFormatI420;
  1391. pictures_[i].uiTimeStamp = input_frame.ntp_time_ms();
  1392. // Downscale images on second and ongoing layers.
  1393. if (i == 0) {
  1394. pictures_[i].iStride[0] = frame_buffer->StrideY();
  1395. pictures_[i].iStride[1] = frame_buffer->StrideU();
  1396. pictures_[i].iStride[2] = frame_buffer->StrideV();
  1397. pictures_[i].pData[0] = const_cast<uint8_t*>(frame_buffer->DataY());
  1398. pictures_[i].pData[1] = const_cast<uint8_t*>(frame_buffer->DataU());
  1399. pictures_[i].pData[2] = const_cast<uint8_t*>(frame_buffer->DataV());
  1400. } else {
  1401. pictures_[i].iStride[0] = downscaled_buffers_[i - 1]->StrideY();
  1402. pictures_[i].iStride[1] = downscaled_buffers_[i - 1]->StrideU();
  1403. pictures_[i].iStride[2] = downscaled_buffers_[i - 1]->StrideV();
  1404. pictures_[i].pData[0] =
  1405. const_cast<uint8_t*>(downscaled_buffers_[i - 1]->DataY());
  1406. pictures_[i].pData[1] =
  1407. const_cast<uint8_t*>(downscaled_buffers_[i - 1]->DataU());
  1408. pictures_[i].pData[2] =
  1409. const_cast<uint8_t*>(downscaled_buffers_[i - 1]->DataV());
  1410. // Scale the image down a number of times by downsampling factor.
  1411. libyuv::I420Scale(pictures_[i - 1].pData[0], pictures_[i - 1].iStride[0],
  1412. pictures_[i - 1].pData[1], pictures_[i - 1].iStride[1],
  1413. pictures_[i - 1].pData[2], pictures_[i - 1].iStride[2],
  1414. configurations_[i - 1].width,
  1415. configurations_[i - 1].height, pictures_[i].pData[0],
  1416. pictures_[i].iStride[0], pictures_[i].pData[1],
  1417. pictures_[i].iStride[1], pictures_[i].pData[2],
  1418. pictures_[i].iStride[2], configurations_[i].width,
  1419. configurations_[i].height, libyuv::kFilterBilinear);
  1420. }
  1421. if (!configurations_[i].sending) {
  1422. continue;
  1423. }
  1424. if (frame_types != nullptr) {
  1425. // Skip frame?
  1426. if ((*frame_types)[i] == VideoFrameType::kEmptyFrame) {
  1427. continue;
  1428. }
  1429. }
  1430. if (send_key_frame) {
  1431. // API doc says ForceIntraFrame(false) does nothing, but calling this
  1432. // function forces a key frame regardless of the |bIDR| argument's value.
  1433. // (If every frame is a key frame we get lag/delays.)
  1434. encoders_[i]->ForceIntraFrame(true);
  1435. configurations_[i].key_frame_request = false;
  1436. }
  1437. // EncodeFrame output.
  1438. SFrameBSInfo info;
  1439. memset(&info, 0, sizeof(SFrameBSInfo));
  1440. // Encode!
  1441. int enc_ret = encoders_[i]->EncodeFrame(&pictures_[i], &info);
  1442. if (enc_ret != 0) {
  1443. RTC_LOG(LS_ERROR)
  1444. << "OpenH264 frame encoding failed, EncodeFrame returned " << enc_ret
  1445. << ".";
  1446. ReportError();
  1447. return WEBRTC_VIDEO_CODEC_ERROR;
  1448. }
  1449. encoded_images_[i]._encodedWidth = configurations_[i].width;
  1450. encoded_images_[i]._encodedHeight = configurations_[i].height;
  1451. encoded_images_[i].SetTimestamp(input_frame.timestamp());
  1452. encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType);
  1453. encoded_images_[i].SetSpatialIndex(configurations_[i].simulcast_idx);
  1454. // Split encoded image up into fragments. This also updates
  1455. // |encoded_image_|.
  1456. RtpFragmentize(&encoded_images_[i], &info);
  1457. // Encoder can skip frames to save bandwidth in which case
  1458. // |encoded_images_[i]._length| == 0.
  1459. //进行编码帧的输出
  1460. if (encoded_images_[i].size() > 0) {
  1461. // Parse QP.
  1462. h264_bitstream_parser_.ParseBitstream(encoded_images_[i].data(),
  1463. encoded_images_[i].size());
  1464. h264_bitstream_parser_.GetLastSliceQp(&encoded_images_[i].qp_);
  1465. // Deliver encoded image.
  1466. CodecSpecificInfo codec_specific;
  1467. codec_specific.codecType = kVideoCodecH264;
  1468. codec_specific.codecSpecific.H264.packetization_mode =
  1469. packetization_mode_;
  1470. codec_specific.codecSpecific.H264.temporal_idx = kNoTemporalIdx;
  1471. codec_specific.codecSpecific.H264.idr_frame =
  1472. info.eFrameType == videoFrameTypeIDR;
  1473. codec_specific.codecSpecific.H264.base_layer_sync = false;
  1474. if (configurations_[i].num_temporal_layers > 1) {
  1475. const uint8_t tid = info.sLayerInfo[0].uiTemporalId;
  1476. codec_specific.codecSpecific.H264.temporal_idx = tid;
  1477. codec_specific.codecSpecific.H264.base_layer_sync =
  1478. tid > 0 && tid < tl0sync_limit_[i];
  1479. if (codec_specific.codecSpecific.H264.base_layer_sync) {
  1480. tl0sync_limit_[i] = tid;
  1481. }
  1482. if (tid == 0) {
  1483. tl0sync_limit_[i] = configurations_[i].num_temporal_layers;
  1484. }
  1485. }
  1486. encoded_image_callback_->OnEncodedImage(encoded_images_[i],
  1487. &codec_specific);
  1488. }
  1489. }
  1490. return WEBRTC_VIDEO_CODEC_OK;
  1491. }
  1492. // // Initialization parameters.
  1493. // // There are two ways to initialize. There is SEncParamBase (cleared with
  1494. // // memset(&p, 0, sizeof(SEncParamBase)) used in Initialize, and SEncParamExt
  1495. // // which is a superset of SEncParamBase (cleared with GetDefaultParams) used
  1496. // // in InitializeExt.
  1497. // SEncParamExt JetH264Encoder::CreateEncoderParams(size_t i) const {
  1498. // SEncParamExt encoder_params;
  1499. // encoders_[i]->GetDefaultParams(&encoder_params);
  1500. // if (codec_.mode == VideoCodecMode::kRealtimeVideo) {
  1501. // encoder_params.iUsageType = CAMERA_VIDEO_REAL_TIME;
  1502. // } else if (codec_.mode == VideoCodecMode::kScreensharing) {
  1503. // encoder_params.iUsageType = SCREEN_CONTENT_REAL_TIME;
  1504. // } else {
  1505. // printf("other");
  1506. // RTC_NOTREACHED();
  1507. // }
  1508. // encoder_params.iPicWidth = configurations_[i].width;
  1509. // encoder_params.iPicHeight = configurations_[i].height;
  1510. // encoder_params.iTargetBitrate = configurations_[i].target_bps; //目标码率即开始码率
  1511. // // Keep unspecified. WebRTC's max codec bitrate is not the same setting
  1512. // // as OpenH264's iMaxBitrate. More details in https://crbug.com/webrtc/11543
  1513. // encoder_params.iMaxBitrate = UNSPECIFIED_BIT_RATE; //最大码率.默认
  1514. // // Rate Control mode
  1515. // encoder_params.iRCMode = RC_BITRATE_MODE; //速率控制模式
  1516. // encoder_params.fMaxFrameRate = configurations_[i].max_frame_rate;//最大帧率
  1517. // // The following parameters are extension parameters (they're in SEncParamExt,
  1518. // // not in SEncParamBase).
  1519. // encoder_params.bEnableFrameSkip = configurations_[i].frame_dropping_on; //是否开启抽帧
  1520. // // |uiIntraPeriod| - multiple of GOP size
  1521. // // |keyFrameInterval| - number of frames
  1522. // encoder_params.uiIntraPeriod = configurations_[i].key_frame_interval;//关键帧间隔
  1523. // encoder_params.uiMaxNalSize = 0;
  1524. // // Threading model: use auto.
  1525. // // 0: auto (dynamic imp. internal encoder)
  1526. // // 1: single thread (default value)
  1527. // // >1: number of threads
  1528. // encoder_params.iMultipleThreadIdc = NumberOfThreads(
  1529. // encoder_params.iPicWidth, encoder_params.iPicHeight, number_of_cores_);
  1530. // // The base spatial layer 0 is the only one we use. 空间层配置
  1531. // encoder_params.sSpatialLayers[0].iVideoWidth = encoder_params.iPicWidth;
  1532. // encoder_params.sSpatialLayers[0].iVideoHeight = encoder_params.iPicHeight;
  1533. // encoder_params.sSpatialLayers[0].fFrameRate = encoder_params.fMaxFrameRate;
  1534. // encoder_params.sSpatialLayers[0].iSpatialBitrate =
  1535. // encoder_params.iTargetBitrate;
  1536. // encoder_params.sSpatialLayers[0].iMaxSpatialBitrate =
  1537. // encoder_params.iMaxBitrate;
  1538. // encoder_params.iTemporalLayerNum = configurations_[i].num_temporal_layers;
  1539. // if (encoder_params.iTemporalLayerNum > 1) {
  1540. // encoder_params.iNumRefFrame = 1;
  1541. // }
  1542. // RTC_LOG(INFO) << "OpenH264 version is " << OPENH264_MAJOR << "."
  1543. // << OPENH264_MINOR;
  1544. // //打包模式
  1545. // switch (packetization_mode_) {
  1546. // case H264PacketizationMode::SingleNalUnit:
  1547. // // Limit the size of the packets produced.
  1548. // encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1;
  1549. // encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode =
  1550. // SM_SIZELIMITED_SLICE;
  1551. // encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint =
  1552. // static_cast<unsigned int>(max_payload_size_);
  1553. // RTC_LOG(INFO) << "Encoder is configured with NALU constraint: "
  1554. // << max_payload_size_ << " bytes";
  1555. // break;
  1556. // case H264PacketizationMode::NonInterleaved:
  1557. // // When uiSliceMode = SM_FIXEDSLCNUM_SLICE, uiSliceNum = 0 means auto
  1558. // // design it with cpu core number.
  1559. // // TODO(sprang): Set to 0 when we understand why the rate controller borks
  1560. // // when uiSliceNum > 1.
  1561. // encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1;
  1562. // encoder_params.sSpatialLayers[0].sSliceArgument.uiSliceMode =
  1563. // SM_FIXEDSLCNUM_SLICE;
  1564. // break;
  1565. // }
  1566. // return encoder_params;
  1567. // }
  1568. void JetH264Encoder::ReportInit() {
  1569. if (has_reported_init_)
  1570. return;
  1571. RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event",
  1572. kH264EncoderEventInit, kH264EncoderEventMax);
  1573. has_reported_init_ = true;
  1574. }
  1575. void JetH264Encoder::ReportError() {
  1576. if (has_reported_error_)
  1577. return;
  1578. RTC_HISTOGRAM_ENUMERATION("WebRTC.Video.H264EncoderImpl.Event",
  1579. kH264EncoderEventError, kH264EncoderEventMax);
  1580. has_reported_error_ = true;
  1581. }
  1582. VideoEncoder::EncoderInfo JetH264Encoder::GetEncoderInfo() const {
  1583. EncoderInfo info;
  1584. info.supports_native_handle = false;
  1585. info.implementation_name = "OpenH264";
  1586. info.scaling_settings =
  1587. VideoEncoder::ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
  1588. info.is_hardware_accelerated = false;
  1589. info.has_internal_source = false;
  1590. info.supports_simulcast = true;
  1591. return info;
  1592. }
  1593. void JetH264Encoder::LayerConfig::SetStreamState(bool send_stream) {
  1594. if (send_stream && !sending) {
  1595. // Need a key frame if we have not sent this stream before.
  1596. key_frame_request = true;
  1597. }
  1598. sending = send_stream;
  1599. }
  1600. } // namespace webrtc