gsml_capturer.cpp 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598
  1. #include "pch.h"
  2. #include "../common/comm.h"
  3. #include "./include/api.h"
  4. #include "lock.h"
  5. #include <iostream>
  6. #include <linux/videodev2.h>
  7. #include <poll.h>
  8. #include <sys/ioctl.h>
  9. #include <sys/stat.h>
  10. #include <sys/mman.h>
  11. #include <fcntl.h>
  12. #include <errno.h>
  13. #include <string>
  14. #include "capture_op.h"
  15. #include "gsml_capturer.h"
  16. rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> OpenGSMLCapture(CaptureOp* op)
  17. {
  18. auto video_capture=std::make_unique<GSMLCapturer>(op);
  19. video_capture->Start();
  20. rtc::scoped_refptr<GSMLTrackSource> video_source = GSMLTrackSource::Create(std::move(video_capture));
  21. return video_source;
  22. }
  23. GSMLCapturer::GSMLCapturer(CaptureOp* lhs):_op(lhs)
  24. {
  25. }
  26. void GSMLCapturer::Start()
  27. {
  28. _thread = std::thread(std::bind(&GSMLCapturer::Run, this));
  29. }
  30. //视频的捕获
  31. bool GSMLCapturer::open_cam()
  32. {
  33. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  34. _op->_ctx0=(context_t *)malloc(sizeof(context_t));
  35. //for(int i=0;i<count;i++)
  36. {
  37. context_t * p=_op->_ctx0;
  38. p->cam_fd=-1;
  39. p->cam_pixfmt = V4L2_PIX_FMT_YUYV;
  40. p->cam_w = 1280;
  41. p->cam_h = 720;
  42. // p->frame = 0;
  43. p->g_buff = NULL;
  44. p->capture_dmabuf = false; // opencv display v4l2 can't be true
  45. p->fps = 30;
  46. p->enable_verbose = false;
  47. std::string devname="/dev/video" + std::to_string(_op->GetIndex());
  48. // std::cout<<"设备:"<<devname<<std::endl;
  49. //ctx.cam_devname=devname+std::to_string();
  50. p->cam_fd = open(devname.c_str(), O_RDWR); //打开视频设备
  51. if( p->cam_fd==-1)
  52. {
  53. ERROR_RETURN("Failed to open camera device %s: %s (%d)",
  54. devname.c_str(), strerror(errno), errno);
  55. }
  56. //设置采集的视频格式
  57. struct v4l2_format fmt;
  58. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // 视频捕获类型,用于从视频设备捕获图像数据(从摄像头获取实时视频、视频编解码)
  59. fmt.fmt.pix.width = p->cam_w;
  60. fmt.fmt.pix.height = p->cam_h;
  61. fmt.fmt.pix.pixelformat = p->cam_pixfmt; //像素格式
  62. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; //图像扫描方式(交错扫描)V4L2_FIELD_NONE 全帧扫描方式会是一个不错的选择。它可以提供较好的图像质量,如果考虑到带宽问题的话,就选择V4L2_FIELD_INTERLACED。
  63. if (ioctl( p->cam_fd, VIDIOC_S_FMT, &fmt) < 0) //设置摄像头的视频输出格式
  64. ERROR_RETURN("Failed to set camera output format: %s (%d)",
  65. strerror(errno), errno);
  66. /* Get the real format in case the desired is not supported */
  67. memset(&fmt, 0, sizeof fmt);
  68. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  69. if (ioctl( p->cam_fd, VIDIOC_G_FMT, &fmt) < 0) //获取当前摄像头的视频输出格式
  70. ERROR_RETURN("Failed to get camera output format: %s (%d)",
  71. strerror(errno), errno);
  72. if (fmt.fmt.pix.width != p->cam_w ||
  73. fmt.fmt.pix.height != p->cam_h ||
  74. fmt.fmt.pix.pixelformat != p->cam_pixfmt) //判断当前摄像头的输出格式是否与期望的一致
  75. {
  76. WARN("The desired format is not supported");
  77. p->cam_w = fmt.fmt.pix.width;
  78. p->cam_h = fmt.fmt.pix.height;
  79. p->cam_pixfmt =fmt.fmt.pix.pixelformat;
  80. }
  81. //v4l2_streamparm 结构体用于控制视频捕获设备的流参数,如帧率、输入源等
  82. struct v4l2_streamparm streamparm;
  83. memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
  84. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;//一个宏定义,用于指定视频捕获类型的缓冲区。
  85. /*
  86. streamparm.parm.capture.timeperframe.numerator = 1;
  87. streamparm.parm.capture.timeperframe.denominator = 3;
  88. streamparm.parm.output.timeperframe.numerator = 1;
  89. streamparm.parm.output.timeperframe.denominator = 3;
  90. */
  91. //VIDIOC_G_PARM 获取流参数的请求
  92. ioctl ( p->cam_fd, VIDIOC_G_PARM, &streamparm);
  93. printf(">>: Frame rate: %u/%u\n",streamparm.parm.capture.timeperframe.numerator,streamparm.parm.capture.timeperframe.denominator);
  94. INFO("Camera ouput format: (%d x %d) stride: %d, imagesize: %d, frate: %u / %u",
  95. fmt.fmt.pix.width,
  96. fmt.fmt.pix.height,
  97. fmt.fmt.pix.bytesperline,
  98. fmt.fmt.pix.sizeimage,
  99. streamparm.parm.capture.timeperframe.denominator,
  100. streamparm.parm.capture.timeperframe.numerator);
  101. }
  102. return true;
  103. }
  104. void GSMLCapturer::Run()
  105. {
  106. if(!open_cam()) return;
  107. prepare_buffer();
  108. start_streams();
  109. _run=true;
  110. struct pollfd fds[1];
  111. struct v4l2_buffer v4l2_buf;
  112. long long _source = 0,_dst = 0;
  113. while(_run)
  114. {
  115. int cam_fd=-1;
  116. context_t * p=nullptr;
  117. //if((_op->GetType()!=RenderPosition::FRONT&&_op->GetType()!=RenderPosition::BACK)||_op->IsForward())
  118. if(_op->IsForward())
  119. //if(_op->GetType()!=RenderPosition::ALL)
  120. {
  121. cam_fd=_op->_ctx0->cam_fd;
  122. p=_op->_ctx0;
  123. }
  124. else{
  125. cam_fd=_op->_ctx1->cam_fd;
  126. p=_op->_ctx1;
  127. }
  128. //assert(p!=nullptr);
  129. /*
  130. else
  131. {
  132. if(_op->IsForward())
  133. {
  134. cam_fd=_ctx[0].cam_fd;
  135. p=&_ctx[0];
  136. }
  137. else
  138. {
  139. cam_fd=_ctx[1].cam_fd;
  140. p=&_ctx[1];
  141. }
  142. }
  143. */
  144. fds[0].fd = cam_fd;
  145. fds[0].events = POLLIN;
  146. if(poll(fds, 1, 5000) > 0)
  147. {
  148. if (fds[0].revents & POLLIN)
  149. {
  150. /* Dequeue a camera buff */
  151. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  152. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  153. if (p->capture_dmabuf)
  154. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  155. else
  156. v4l2_buf.memory = V4L2_MEMORY_MMAP;
  157. // for(int32_t i=0;i<count;i++)
  158. {
  159. if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  160. printf("Failed to dequeue camera buff: %s (%d)",
  161. strerror(errno), errno);
  162. if(_op->GetType()==RenderPosition::FRONT)
  163. {
  164. _source = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  165. //printf("encode delay:%lld----frame:%ld\r\n", _source);
  166. }
  167. //_ctx.frame++;
  168. rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h);
  169. const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0,
  170. buffer->MutableDataY(), buffer->StrideY(),
  171. buffer->MutableDataU(), buffer->StrideU(),
  172. buffer->MutableDataV(), buffer->StrideV(),
  173. 0, 0,
  174. p->cam_w,p->cam_h,
  175. buffer->width(), buffer->height(),
  176. libyuv::kRotate0, libyuv::FOURCC_YUYV);
  177. int width = p->cam_w;
  178. int height = p->cam_h;
  179. if (conversionResult >= 0)
  180. {
  181. //webrtc::VideoFrame 表示一帧视频数据
  182. webrtc::VideoFrame videoFrame(buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeNanos());
  183. if ((p->cam_w == 0) && (p->cam_h == 0)) {
  184. _broadcaster.OnFrame(videoFrame);
  185. }
  186. else
  187. {
  188. if (height == 0) {
  189. height = (videoFrame.height() * width) / videoFrame.width();
  190. }
  191. else if (width == 0) {
  192. width = (videoFrame.width() * height) / videoFrame.height();
  193. }
  194. int stride_y = width;
  195. int stride_uv = (width + 1) / 2;
  196. rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv);
  197. scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420());
  198. webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeNanos());
  199. _broadcaster.OnFrame(frame);
  200. }
  201. }
  202. if(_op->GetType()==RenderPosition::FRONT)
  203. {
  204. _dst = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  205. //printf("encode delay:%lld\r\n",_dst - _source);
  206. }
  207. // VIDIOC_QBUF 是一个 V4L2 设备控制命令,用于将一个视频帧缓冲区排队到设备的输入/输出缓冲区队列中
  208. if (ioctl(p->cam_fd, VIDIOC_QBUF, &v4l2_buf))
  209. printf("Failed to queue camera buffers: %s (%d)",
  210. strerror(errno), errno);
  211. }
  212. }
  213. //std::this_thread::sleep_for(std::chrono::milliseconds(30));
  214. }
  215. stop_streams();
  216. close_cam();
  217. }
  218. void GSMLCapturer::Destroy() {
  219. Stop();
  220. }
  221. //一个视频接收端(sink)的指针, const rtc::VideoSinkWants& wants 一个视频接收端的sink设置参数
  222. //_broadcaster对象中添加或者更新一个视频接收端sink
  223. void GSMLCapturer::AddOrUpdateSink(
  224. rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
  225. const rtc::VideoSinkWants& wants) {
  226. _broadcaster.AddOrUpdateSink(sink, wants);
  227. }
  228. //从_broadcaster对象中移除一个视频接收端(sink)
  229. void GSMLCapturer::RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
  230. {
  231. _broadcaster.RemoveSink(sink);
  232. }
  233. bool GSMLCapturer::prepare_buffer()
  234. {
  235. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  236. //for(int32_t i=0;i<count;i++)
  237. {
  238. context_t * p=_op->_ctx0;
  239. p->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  240. if ( p->g_buff == NULL)
  241. ERROR_RETURN("Failed to allocate global buffer context");
  242. if ( p->capture_dmabuf) {
  243. if (!request_camera_buff(p))
  244. ERROR_RETURN("Failed to set up camera buff");
  245. } else {
  246. if (!request_camera_buff_mmap(p))
  247. ERROR_RETURN("Failed to set up camera buff");
  248. }
  249. INFO("Succeed in preparing stream buffers");
  250. }
  251. return true;
  252. }
  253. bool GSMLCapturer::request_camera_buff( context_t * p)
  254. {
  255. // for(int32_t i=0;i<count;i++)
  256. {
  257. // context_t * p=&_ctx[i];
  258. struct v4l2_requestbuffers rb;
  259. memset(&rb, 0, sizeof(rb));
  260. rb.count = V4L2_BUFFERS_NUM;
  261. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  262. rb.memory = V4L2_MEMORY_DMABUF;
  263. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  264. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  265. strerror(errno), errno);
  266. if (rb.count != V4L2_BUFFERS_NUM)
  267. ERROR_RETURN("V4l2 buffer number is not as desired");
  268. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  269. {
  270. struct v4l2_buffer buf;
  271. /* Query camera v4l2 buf length */
  272. memset(&buf, 0, sizeof buf);
  273. buf.index = index;
  274. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  275. buf.memory = V4L2_MEMORY_DMABUF;
  276. if (ioctl( p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  277. ERROR_RETURN("Failed to query buff: %s (%d)",
  278. strerror(errno), errno);
  279. /* TODO: add support for multi-planer
  280. Enqueue empty v4l2 buff into camera capture plane */
  281. buf.m.fd = (unsigned long) p->g_buff[index].dmabuff_fd;
  282. if (buf.length != p->g_buff[index].size)
  283. {
  284. WARN("Camera v4l2 buf length is not expected");
  285. p->g_buff[index].size = buf.length;
  286. }
  287. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  288. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  289. strerror(errno), errno);
  290. }
  291. }
  292. return true;
  293. }
  294. bool GSMLCapturer::stop_streams()
  295. {
  296. enum v4l2_buf_type type;
  297. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  298. //for(int32_t i=0;i<count;i++)
  299. {
  300. context_t * p=_op->_ctx0;
  301. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  302. {
  303. if(munmap(p->g_buff[index].start,p->g_buff[index].size)==-1)
  304. {
  305. ERROR_RETURN("munmap failed: %s (%d)", strerror(errno), errno);
  306. }
  307. }
  308. /* Stop v4l2 streaming */
  309. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  310. if (ioctl( p->cam_fd, VIDIOC_STREAMOFF, &type))
  311. ERROR_RETURN("Failed to stop streaming: %s (%d)",
  312. strerror(errno), errno);
  313. INFO("Camera video streaming off ...");
  314. }
  315. return true;
  316. }
  317. //启动视频流的获取
  318. bool GSMLCapturer::start_streams()
  319. {
  320. enum v4l2_buf_type type; //是否可以注释??
  321. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  322. // for(int32_t i=0;i<count;i++)
  323. {
  324. context_t * p=_op->_ctx0;
  325. /* Start v4l2 streaming */
  326. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  327. if (ioctl( p->cam_fd, VIDIOC_STREAMON, &type) < 0) //VIDIOC_STREAMON是启动视频流捕获的命令
  328. ERROR_RETURN("Failed to start streaming: %s (%d)",
  329. strerror(errno), errno);
  330. usleep(200);
  331. INFO("Camera video streaming on ...");
  332. }
  333. return true;
  334. }
  335. bool GSMLCapturer::request_camera_buff_mmap(context_t * p)
  336. {
  337. struct v4l2_requestbuffers rb;
  338. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  339. // for(int32_t i=0;i<count;i++)
  340. {
  341. memset(&rb, 0, sizeof(rb));
  342. rb.count = V4L2_BUFFERS_NUM;
  343. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  344. rb.memory = V4L2_MEMORY_MMAP;
  345. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  346. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  347. strerror(errno), errno);
  348. if (rb.count != V4L2_BUFFERS_NUM)
  349. ERROR_RETURN("V4l2 buffer number is not as desired");
  350. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  351. {
  352. struct v4l2_buffer buf;
  353. /* Query camera v4l2 buf length */
  354. memset(&buf, 0, sizeof buf);
  355. buf.index = index;
  356. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  357. buf.memory = V4L2_MEMORY_MMAP;
  358. if (ioctl(p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  359. ERROR_RETURN("Failed to query buff: %s (%d)",
  360. strerror(errno), errno);
  361. p->g_buff[index].size = buf.length;
  362. p->g_buff[index].start = (unsigned char *)
  363. mmap (NULL /* start anywhere */,
  364. buf.length,
  365. PROT_READ | PROT_WRITE /* required */,
  366. MAP_SHARED /* recommended */,
  367. p->cam_fd, buf.m.offset);
  368. if (MAP_FAILED == p->g_buff[index].start)
  369. ERROR_RETURN("Failed to map buffers");
  370. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  371. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  372. strerror(errno), errno);
  373. }
  374. }
  375. return true;
  376. }
  377. void GSMLCapturer::close_cam()
  378. {
  379. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  380. //for(int32_t i=0;i<count;i++)
  381. {
  382. context_t * p=_op->_ctx0;
  383. if(p->g_buff!=nullptr)
  384. {
  385. free(p->g_buff);
  386. p->g_buff = nullptr;
  387. }
  388. if(p->cam_fd>0)
  389. close(p->cam_fd);
  390. }
  391. free(_op->_ctx0);
  392. }
  393. void GSMLCapturer::Stop()
  394. {
  395. _run=false;
  396. //if(_thread.joinable())
  397. _thread.join();
  398. }
  399. // void GSMLCapturer::Run()
  400. // {
  401. // if(!open_cam()) return;
  402. // prepare_buffer();
  403. // start_streams();
  404. // _run=true;
  405. // struct pollfd fds[1];
  406. // struct v4l2_buffer v4l2_buf;
  407. // long long _source = 0,_dst = 0; //记录视频到编码的时间延迟
  408. // while(_run)
  409. // {
  410. // int cam_fd=-1;
  411. // context_t * p=nullptr;
  412. // //if((_op->GetType()!=RenderPosition::FRONT&&_op->GetType()!=RenderPosition::BACK)||_op->IsForward())
  413. // if(_op->IsForward())
  414. // //if(_op->GetType()!=RenderPosition::ALL)
  415. // {
  416. // cam_fd=_op->_ctx0->cam_fd;
  417. // p=_op->_ctx0;
  418. // }
  419. // else{
  420. // cam_fd=_op->_ctx1->cam_fd;
  421. // p=_op->_ctx1;
  422. // }
  423. // //assert(p!=nullptr);
  424. // /*
  425. // else
  426. // {
  427. // if(_op->IsForward())
  428. // {
  429. // cam_fd=_ctx[0].cam_fd;
  430. // p=&_ctx[0];
  431. // }
  432. // else
  433. // {
  434. // cam_fd=_ctx[1].cam_fd;
  435. // p=&_ctx[1];
  436. // }
  437. // }
  438. // */
  439. // fds[0].fd = cam_fd;
  440. // fds[0].events = POLLIN;
  441. // if(poll(fds, 1, 5000) > 0)
  442. // {
  443. // if (fds[0].revents & POLLIN)
  444. // {
  445. // /* Dequeue a camera buff */
  446. // memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  447. // v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  448. // if (p->capture_dmabuf)
  449. // v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  450. // else
  451. // v4l2_buf.memory = V4L2_MEMORY_MMAP;
  452. // // VIDIOC_DQBUF从视频帧中取出数据存储在v4l2_buff中 入队
  453. // if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  454. // printf("Failed to dequeue camera buff: %s (%d)",
  455. // strerror(errno), errno);
  456. // if(_op->GetType()==RenderPosition::FRONT)
  457. // {
  458. // _source = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  459. // //printf("encode delay:%lld----frame:%ld\r\n", _source);
  460. // }
  461. // //_ctx.frame++;
  462. // //webrtc::I420Buffer类型的buffer存储转换后的I420的数据;
  463. // rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h);
  464. // //yuyv转换到I420格式,(uint8_t*)p->g_buff[v4l2_buf.index].start: 这是指向摄像头缓冲区起始位置的指针,其中包含 YUYV 格式的视频帧数据
  465. // const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0,
  466. // buffer->MutableDataY(), buffer->StrideY(),
  467. // buffer->MutableDataU(), buffer->StrideU(),
  468. // buffer->MutableDataV(), buffer->StrideV(),
  469. // 0, 0,
  470. // p->cam_w,p->cam_h,
  471. // buffer->width(), buffer->height(),
  472. // libyuv::kRotate0, libyuv::FOURCC_YUYV);
  473. // int width = p->cam_w;
  474. // int height = p->cam_h;
  475. // //转换I420成功
  476. // if (conversionResult >= 0)
  477. // {
  478. // webrtc::VideoFrame videoFrame(buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeNanos());
  479. // if ((p->cam_w == 0) && (p->cam_h == 0)) {
  480. // _broadcaster.OnFrame(videoFrame);
  481. // }
  482. // else
  483. // {
  484. // if (height == 0) {
  485. // height = (videoFrame.height() * width) / videoFrame.width();
  486. // }
  487. // else if (width == 0) {
  488. // width = (videoFrame.width() * height) / videoFrame.height();
  489. // }
  490. // int stride_y = width;
  491. // int stride_uv = (width + 1) / 2;
  492. // rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv);
  493. // scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420());
  494. // webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeNanos());
  495. // _broadcaster.OnFrame(frame);
  496. // }
  497. // }
  498. // if(_op->GetType()==RenderPosition::FRONT)
  499. // {
  500. // _dst = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  501. // //printf("encode delay:%lld\r\n",_dst - _source);
  502. // }
  503. // //出队 -> 用户空间,可以注释
  504. // if (ioctl(p->cam_fd, VIDIOC_QBUF, &v4l2_buf))
  505. // printf("Failed to queue camera buffers: %s (%d)",
  506. // strerror(errno), errno);
  507. // }
  508. // }
  509. // //std::this_thread::sleep_for(std::chrono::milliseconds(30));
  510. // }
  511. // stop_streams();
  512. // close_cam();
  513. // }