gsml_capturer.cpp 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495
  1. #include "pch.h"
  2. #include "../common/comm.h"
  3. #include "./include/api.h"
  4. #include "lock.h"
  5. #include <iostream>
  6. #include <linux/videodev2.h>
  7. #include <poll.h>
  8. #include <sys/ioctl.h>
  9. #include <sys/stat.h>
  10. #include <sys/mman.h>
  11. #include <fcntl.h>
  12. #include <errno.h>
  13. #include <string>
  14. #include "capture_op.h"
  15. #include "gsml_capturer.h"
  16. rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> OpenGSMLCapture(CaptureOp* op)
  17. {
  18. auto video_capture=std::make_unique<GSMLCapturer>(op);
  19. video_capture->Start();
  20. rtc::scoped_refptr<GSMLTrackSource> video_source = GSMLTrackSource::Create(std::move(video_capture));
  21. return video_source;
  22. }
  23. GSMLCapturer::GSMLCapturer(CaptureOp* lhs):_op(lhs)
  24. {
  25. }
  26. void GSMLCapturer::Start()
  27. {
  28. _thread = std::thread(std::bind(&GSMLCapturer::Run, this));
  29. }
  30. //视频的捕获
  31. bool GSMLCapturer::open_cam()
  32. {
  33. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  34. _op->_ctx0=(context_t *)malloc(sizeof(context_t));
  35. //for(int i=0;i<count;i++)
  36. {
  37. context_t * p=_op->_ctx0;
  38. p->cam_fd=-1;
  39. p->cam_pixfmt = V4L2_PIX_FMT_YUYV;
  40. p->cam_w = 1280;
  41. p->cam_h = 720;
  42. // p->frame = 0;
  43. p->g_buff = NULL;
  44. // p->capture_dmabuf = true; // opencv display v4l2 can't be true
  45. p->capture_dmabuf = false; // opencv display v4l2 can't be true
  46. p->fps = 30;
  47. p->enable_verbose = false;
  48. std::string devname="/dev/video" + std::to_string(_op->GetIndex());
  49. // std::cout<<"设备:"<<devname<<std::endl;
  50. //ctx.cam_devname=devname+std::to_string();
  51. p->cam_fd = open(devname.c_str(), O_RDWR); //打开视频设备
  52. if( p->cam_fd==-1)
  53. {
  54. ERROR_RETURN("Failed to open camera device %s: %s (%d)",
  55. devname.c_str(), strerror(errno), errno);
  56. }
  57. struct v4l2_format fmt;
  58. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  59. fmt.fmt.pix.width = p->cam_w;
  60. fmt.fmt.pix.height = p->cam_h;
  61. fmt.fmt.pix.pixelformat = p->cam_pixfmt;
  62. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  63. if (ioctl( p->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
  64. ERROR_RETURN("Failed to set camera output format: %s (%d)",
  65. strerror(errno), errno);
  66. /* Get the real format in case the desired is not supported */
  67. memset(&fmt, 0, sizeof fmt);
  68. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  69. if (ioctl( p->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
  70. ERROR_RETURN("Failed to get camera output format: %s (%d)",
  71. strerror(errno), errno);
  72. if (fmt.fmt.pix.width != p->cam_w ||
  73. fmt.fmt.pix.height != p->cam_h ||
  74. fmt.fmt.pix.pixelformat != p->cam_pixfmt)
  75. {
  76. WARN("The desired format is not supported");
  77. p->cam_w = fmt.fmt.pix.width;
  78. p->cam_h = fmt.fmt.pix.height;
  79. p->cam_pixfmt =fmt.fmt.pix.pixelformat;
  80. }
  81. struct v4l2_streamparm streamparm;
  82. memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
  83. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  84. /*
  85. streamparm.parm.capture.timeperframe.numerator = 1;
  86. streamparm.parm.capture.timeperframe.denominator = 3;
  87. streamparm.parm.output.timeperframe.numerator = 1;
  88. streamparm.parm.output.timeperframe.denominator = 3;
  89. */
  90. ioctl ( p->cam_fd, VIDIOC_G_PARM, &streamparm);
  91. printf(">>: Frame rate: %u/%u\n",streamparm.parm.capture.timeperframe.numerator,streamparm.parm.capture.timeperframe.denominator);
  92. INFO("Camera ouput format: (%d x %d) stride: %d, imagesize: %d, frate: %u / %u",
  93. fmt.fmt.pix.width,
  94. fmt.fmt.pix.height,
  95. fmt.fmt.pix.bytesperline,
  96. fmt.fmt.pix.sizeimage,
  97. streamparm.parm.capture.timeperframe.denominator,
  98. streamparm.parm.capture.timeperframe.numerator);
  99. }
  100. return true;
  101. }
  102. void GSMLCapturer::Run()
  103. {
  104. if(!open_cam()) return;
  105. prepare_buffer();
  106. start_streams();
  107. _run=true;
  108. struct pollfd fds[1];
  109. struct v4l2_buffer v4l2_buf;
  110. long long _source = 0,_dst = 0;
  111. while(_run)
  112. {
  113. int cam_fd=-1;
  114. context_t * p=nullptr;
  115. //if((_op->GetType()!=RenderPosition::FRONT&&_op->GetType()!=RenderPosition::BACK)||_op->IsForward())
  116. if(_op->IsForward())
  117. //if(_op->GetType()!=RenderPosition::ALL)
  118. {
  119. cam_fd=_op->_ctx0->cam_fd;
  120. p=_op->_ctx0;
  121. }
  122. else{
  123. cam_fd=_op->_ctx1->cam_fd;
  124. p=_op->_ctx1;
  125. }
  126. //assert(p!=nullptr);
  127. /*
  128. else
  129. {
  130. if(_op->IsForward())
  131. {
  132. cam_fd=_ctx[0].cam_fd;
  133. p=&_ctx[0];
  134. }
  135. else
  136. {
  137. cam_fd=_ctx[1].cam_fd;
  138. p=&_ctx[1];
  139. }
  140. }
  141. */
  142. fds[0].fd = cam_fd;
  143. fds[0].events = POLLIN;
  144. if(poll(fds, 1, 5000) > 0)
  145. {
  146. if (fds[0].revents & POLLIN)
  147. {
  148. /* Dequeue a camera buff */
  149. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  150. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  151. if (p->capture_dmabuf)
  152. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  153. else
  154. v4l2_buf.memory = V4L2_MEMORY_MMAP;
  155. if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  156. printf("Failed to dequeue camera buff: %s (%d)",
  157. strerror(errno), errno);
  158. if(_op->GetType()==RenderPosition::FRONT)
  159. {
  160. _source = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  161. //printf("encode delay:%lld----frame:%ld\r\n", _source);
  162. }
  163. // if (p->g_buff[v4l2_buf.index].start == NULL) {
  164. // printf("Buffer is not allocated or mapped correctly.\n");
  165. // } else {
  166. // printf("Buffer pointer is valid.\n");
  167. // }
  168. //_ctx.frame++;
  169. rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h);
  170. // rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(800,600);
  171. // std::cout << v4l2_buf.index << std::endl;
  172. const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0,
  173. buffer->MutableDataY(), buffer->StrideY(),
  174. buffer->MutableDataU(), buffer->StrideU(),
  175. buffer->MutableDataV(), buffer->StrideV(),
  176. 0, 0,
  177. p->cam_w,p->cam_h,
  178. buffer->width(), buffer->height(),
  179. libyuv::kRotate0, libyuv::FOURCC_YUYV);
  180. // // 假设 p->g_buff[v4l2_buf.index].start 是 YUYV 格式的源数据
  181. // unsigned char* src = static_cast<unsigned char*>(p->g_buff[v4l2_buf.index].start);
  182. // int width = p->cam_w;
  183. // int height = p->cam_h;
  184. // // 创建 I420Buffer
  185. // rtc::scoped_refptr<webrtc::I420Buffer> buffer = webrtc::I420Buffer::Create(
  186. // width, height,
  187. // buffer->StrideY(), // Y plane stride
  188. // buffer->StrideU(), // U plane stride
  189. // buffer->StrideV() // V plane stride
  190. // );
  191. // // 使用 CUDA 进行 YUYV 到 I420 的转换
  192. // gpuConvertYUYVtoI420(
  193. // src,
  194. // buffer->MutableDataY(), buffer->MutableDataU(), buffer->MutableDataV(),
  195. // width, height,
  196. // buffer->StrideY(), buffer->StrideU()
  197. // );
  198. // // 创建 VideoFrame 并传递给 _broadcaster
  199. // webrtc::VideoFrame frame(buffer, webrtc::kVideoRotation_0, rtc::TimeNanos());
  200. // _broadcaster.OnFrame(frame);
  201. int width = p->cam_w;
  202. int height = p->cam_h;
  203. if (conversionResult >= 0)
  204. {
  205. // printf("dma success \n");
  206. webrtc::VideoFrame videoFrame(buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeNanos());
  207. if ((p->cam_w == 0) && (p->cam_h == 0)) {
  208. _broadcaster.OnFrame(videoFrame);
  209. }
  210. else
  211. {
  212. if (height == 0) {
  213. height = (videoFrame.height() * width) / videoFrame.width();
  214. }
  215. else if (width == 0) {
  216. width = (videoFrame.width() * height) / videoFrame.height();
  217. }
  218. int stride_y = width;
  219. int stride_uv = (width + 1) / 2;
  220. rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv);
  221. scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420());
  222. webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeNanos());
  223. _broadcaster.OnFrame(frame);
  224. }
  225. }
  226. if(_op->GetType()==RenderPosition::FRONT)
  227. {
  228. _dst = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  229. //printf("encode delay:%lld\r\n",_dst - _source);
  230. }
  231. if (ioctl(p->cam_fd, VIDIOC_QBUF, &v4l2_buf))
  232. printf("Failed to queue camera buffers: %s (%d)",
  233. strerror(errno), errno);
  234. }
  235. }
  236. //std::this_thread::sleep_for(std::chrono::milliseconds(30));
  237. }
  238. stop_streams();
  239. close_cam();
  240. }
  241. void GSMLCapturer::Destroy() {
  242. Stop();
  243. }
  244. void GSMLCapturer::AddOrUpdateSink(
  245. rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
  246. const rtc::VideoSinkWants& wants) {
  247. _broadcaster.AddOrUpdateSink(sink, wants);
  248. }
  249. void GSMLCapturer::RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
  250. {
  251. _broadcaster.RemoveSink(sink);
  252. }
  253. bool GSMLCapturer::prepare_buffer()
  254. {
  255. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  256. //for(int32_t i=0;i<count;i++)
  257. {
  258. context_t * p=_op->_ctx0;
  259. p->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  260. if ( p->g_buff == NULL)
  261. ERROR_RETURN("Failed to allocate global buffer context");
  262. if ( p->capture_dmabuf) {
  263. if (!request_camera_buff(p))
  264. ERROR_RETURN("Failed to set up camera buff");
  265. } else {
  266. if (!request_camera_buff_mmap(p))
  267. ERROR_RETURN("Failed to set up camera buff");
  268. }
  269. INFO("Succeed in preparing stream buffers");
  270. }
  271. return true;
  272. }
  273. bool GSMLCapturer::request_camera_buff( context_t * p)
  274. {
  275. // for(int32_t i=0;i<count;i++)
  276. {
  277. // context_t * p=&_ctx[i];
  278. struct v4l2_requestbuffers rb;
  279. memset(&rb, 0, sizeof(rb));
  280. rb.count = V4L2_BUFFERS_NUM;
  281. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  282. rb.memory = V4L2_MEMORY_DMABUF;
  283. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  284. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  285. strerror(errno), errno);
  286. if (rb.count != V4L2_BUFFERS_NUM)
  287. ERROR_RETURN("V4l2 buffer number is not as desired");
  288. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  289. {
  290. struct v4l2_buffer buf;
  291. /* Query camera v4l2 buf length */
  292. memset(&buf, 0, sizeof buf);
  293. buf.index = index;
  294. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  295. buf.memory = V4L2_MEMORY_DMABUF;
  296. if (ioctl( p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  297. ERROR_RETURN("Failed to query buff: %s (%d)",
  298. strerror(errno), errno);
  299. /* TODO: add support for multi-planer
  300. Enqueue empty v4l2 buff into camera capture plane */
  301. buf.m.fd = (unsigned long) p->g_buff[index].dmabuff_fd;
  302. if (buf.length != p->g_buff[index].size)
  303. {
  304. WARN("Camera v4l2 buf length is not expected");
  305. p->g_buff[index].size = buf.length;
  306. }
  307. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  308. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  309. strerror(errno), errno);
  310. }
  311. }
  312. return true;
  313. }
  314. bool GSMLCapturer::stop_streams()
  315. {
  316. enum v4l2_buf_type type;
  317. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  318. //for(int32_t i=0;i<count;i++)
  319. {
  320. context_t * p=_op->_ctx0;
  321. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  322. {
  323. if(munmap(p->g_buff[index].start,p->g_buff[index].size)==-1)
  324. {
  325. ERROR_RETURN("munmap failed: %s (%d)", strerror(errno), errno);
  326. }
  327. }
  328. /* Stop v4l2 streaming */
  329. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  330. if (ioctl( p->cam_fd, VIDIOC_STREAMOFF, &type))
  331. ERROR_RETURN("Failed to stop streaming: %s (%d)",
  332. strerror(errno), errno);
  333. INFO("Camera video streaming off ...");
  334. }
  335. return true;
  336. }
  337. bool GSMLCapturer::start_streams()
  338. {
  339. enum v4l2_buf_type type; //是否可以注释??
  340. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  341. // for(int32_t i=0;i<count;i++)
  342. {
  343. context_t * p=_op->_ctx0;
  344. /* Start v4l2 streaming */
  345. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  346. if (ioctl( p->cam_fd, VIDIOC_STREAMON, &type) < 0)
  347. ERROR_RETURN("Failed to start streaming: %s (%d)",
  348. strerror(errno), errno);
  349. usleep(200);
  350. INFO("Camera video streaming on ...");
  351. }
  352. return true;
  353. }
  354. bool GSMLCapturer::request_camera_buff_mmap(context_t * p)
  355. {
  356. struct v4l2_requestbuffers rb;
  357. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  358. // for(int32_t i=0;i<count;i++)
  359. {
  360. memset(&rb, 0, sizeof(rb));
  361. rb.count = V4L2_BUFFERS_NUM;
  362. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  363. rb.memory = V4L2_MEMORY_MMAP;
  364. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  365. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  366. strerror(errno), errno);
  367. if (rb.count != V4L2_BUFFERS_NUM)
  368. ERROR_RETURN("V4l2 buffer number is not as desired");
  369. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  370. {
  371. struct v4l2_buffer buf;
  372. /* Query camera v4l2 buf length */
  373. memset(&buf, 0, sizeof buf);
  374. buf.index = index;
  375. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  376. buf.memory = V4L2_MEMORY_MMAP;
  377. if (ioctl(p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  378. ERROR_RETURN("Failed to query buff: %s (%d)",
  379. strerror(errno), errno);
  380. p->g_buff[index].size = buf.length;
  381. p->g_buff[index].start = (unsigned char *)
  382. mmap (NULL /* start anywhere */,
  383. buf.length,
  384. PROT_READ | PROT_WRITE /* required */,
  385. MAP_SHARED /* recommended */,
  386. p->cam_fd, buf.m.offset);
  387. if (MAP_FAILED == p->g_buff[index].start)
  388. ERROR_RETURN("Failed to map buffers");
  389. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  390. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  391. strerror(errno), errno);
  392. }
  393. }
  394. return true;
  395. }
  396. void GSMLCapturer::close_cam()
  397. {
  398. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  399. //for(int32_t i=0;i<count;i++)
  400. {
  401. context_t * p=_op->_ctx0;
  402. if(p->g_buff!=nullptr)
  403. {
  404. free(p->g_buff);
  405. p->g_buff = nullptr;
  406. }
  407. if(p->cam_fd>0)
  408. close(p->cam_fd);
  409. }
  410. free(_op->_ctx0);
  411. }
  412. void GSMLCapturer::Stop()
  413. {
  414. _run=false;
  415. //if(_thread.joinable())
  416. _thread.join();
  417. }