gsml_capturer.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415
  1. #include "pch.h"
  2. #include "../common/comm.h"
  3. #include "./include/api.h"
  4. #include "lock.h"
  5. #include <iostream>
  6. #include <linux/videodev2.h>
  7. #include <poll.h>
  8. #include <sys/ioctl.h>
  9. #include <sys/stat.h>
  10. #include <sys/mman.h>
  11. #include <fcntl.h>
  12. #include <errno.h>
  13. #include <string>
  14. #include "capture_op.h"
  15. #include "gsml_capturer.h"
  16. rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> OpenGSMLCapture(CaptureOp* op)
  17. {
  18. printf("start web cap \n");
  19. auto video_capture=std::make_unique<GSMLCapturer>(op);
  20. video_capture->Start();
  21. rtc::scoped_refptr<GSMLTrackSource> video_source = GSMLTrackSource::Create(std::move(video_capture));
  22. return video_source;
  23. }
  24. GSMLCapturer::GSMLCapturer(CaptureOp* lhs):_op(lhs)
  25. {
  26. }
  27. void GSMLCapturer::Start()
  28. {
  29. _thread = std::thread(std::bind(&GSMLCapturer::Run, this));
  30. }
  31. bool GSMLCapturer::open_cam()
  32. {
  33. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  34. _op->_ctx0=(context_t *)malloc(sizeof(context_t));
  35. //for(int i=0;i<count;i++)
  36. {
  37. context_t * p= _op->_ctx0;
  38. p->cam_fd=-1;
  39. p->cam_pixfmt = V4L2_PIX_FMT_YUYV;
  40. p->cam_w = 1280;
  41. p->cam_h = 720;
  42. // p->frame = 0;
  43. p->g_buff = NULL;
  44. p->capture_dmabuf = false; // opencv display v4l2 can't be true
  45. p->fps = 30;
  46. p->enable_verbose = false;
  47. std::string devname="/dev/video" + std::to_string(_op->GetIndex());
  48. // std::cout<<"设备:"<<devname<<std::endl;
  49. //ctx.cam_devname=devname+std::to_string();
  50. p->cam_fd = open(devname.c_str(), O_RDWR);
  51. if( p->cam_fd==-1)
  52. {
  53. ERROR_RETURN("Failed to open camera device %s: %s (%d)",
  54. devname.c_str(), strerror(errno), errno);
  55. }
  56. struct v4l2_format fmt;
  57. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  58. fmt.fmt.pix.width = p->cam_w;
  59. fmt.fmt.pix.height = p->cam_h;
  60. fmt.fmt.pix.pixelformat = p->cam_pixfmt;
  61. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  62. if (ioctl( p->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
  63. ERROR_RETURN("Failed to set camera output format: %s (%d)",
  64. strerror(errno), errno);
  65. /* Get the real format in case the desired is not supported */
  66. memset(&fmt, 0, sizeof fmt);
  67. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  68. if (ioctl( p->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
  69. ERROR_RETURN("Failed to get camera output format: %s (%d)",
  70. strerror(errno), errno);
  71. if (fmt.fmt.pix.width != p->cam_w ||
  72. fmt.fmt.pix.height != p->cam_h ||
  73. fmt.fmt.pix.pixelformat != p->cam_pixfmt)
  74. {
  75. WARN("The desired format is not supported");
  76. p->cam_w = fmt.fmt.pix.width;
  77. p->cam_h = fmt.fmt.pix.height;
  78. p->cam_pixfmt =fmt.fmt.pix.pixelformat;
  79. }
  80. struct v4l2_streamparm streamparm;
  81. memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
  82. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  83. ioctl ( p->cam_fd, VIDIOC_G_PARM, &streamparm);
  84. INFO("Camera ouput format: (%d x %d) stride: %d, imagesize: %d, frate: %u / %u",
  85. fmt.fmt.pix.width,
  86. fmt.fmt.pix.height,
  87. fmt.fmt.pix.bytesperline,
  88. fmt.fmt.pix.sizeimage,
  89. streamparm.parm.capture.timeperframe.denominator,
  90. streamparm.parm.capture.timeperframe.numerator);
  91. }
  92. return true;
  93. }
  94. void GSMLCapturer::Run()
  95. {
  96. if(!open_cam()) return;
  97. prepare_buffer();
  98. start_streams();
  99. _run=true;
  100. struct pollfd fds[1];
  101. struct v4l2_buffer v4l2_buf;
  102. while(_run)
  103. {
  104. int cam_fd=-1;
  105. context_t * p=nullptr;
  106. if(/*(_op->GetType()>3)||*/_op->IsForward())
  107. {
  108. cam_fd=_op->_ctx0->cam_fd;
  109. p=_op->_ctx0;
  110. }
  111. else{
  112. cam_fd=_op->_ctx1->cam_fd;
  113. p=_op->_ctx1;
  114. }
  115. fds[0].fd = cam_fd;
  116. fds[0].events = POLLIN;
  117. if(poll(fds, 1, 5000) > 0)
  118. {
  119. if (fds[0].revents & POLLIN)
  120. {
  121. /* Dequeue a camera buff */
  122. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  123. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  124. if (p->capture_dmabuf)
  125. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  126. else
  127. v4l2_buf.memory = V4L2_MEMORY_MMAP;
  128. if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  129. printf("Failed to dequeue camera buff: %s (%d)",
  130. strerror(errno), errno);
  131. //_ctx.frame++;
  132. rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h);
  133. // if(_op->GetIndex()==3)
  134. // {
  135. // std::cout<<buffer->width()<<","<<buffer->height()<<std::endl;
  136. // }
  137. const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0,
  138. buffer->MutableDataY(), buffer->StrideY(),
  139. buffer->MutableDataU(), buffer->StrideU(),
  140. buffer->MutableDataV(), buffer->StrideV(),
  141. 0, 0,
  142. p->cam_w,p->cam_h,
  143. buffer->width(), buffer->height(),
  144. libyuv::kRotate0, libyuv::FOURCC_YUYV);
  145. int width = p->cam_w;
  146. int height = p->cam_h;
  147. if (conversionResult >= 0)
  148. {
  149. webrtc::VideoFrame videoFrame(buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeMicros());
  150. if ((p->cam_w == 0) && (p->cam_h == 0)) {
  151. _broadcaster.OnFrame(videoFrame);
  152. }
  153. else
  154. {
  155. if (height == 0) {
  156. height = (videoFrame.height() * width) / videoFrame.width();
  157. }
  158. else if (width == 0) {
  159. width = (videoFrame.width() * height) / videoFrame.height();
  160. }
  161. int stride_y = width;
  162. int stride_uv = (width + 1) / 2;
  163. rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv);
  164. scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420());
  165. webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeMicros());
  166. _broadcaster.OnFrame(frame);
  167. }
  168. }
  169. if (ioctl(p->cam_fd, VIDIOC_QBUF, &v4l2_buf))
  170. printf("Failed to queue camera buffers: %s (%d)",
  171. strerror(errno), errno);
  172. }
  173. }
  174. }
  175. stop_streams();
  176. close_cam();
  177. }
  178. void GSMLCapturer::Destroy() {
  179. Stop();
  180. }
  181. void GSMLCapturer::AddOrUpdateSink(
  182. rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
  183. const rtc::VideoSinkWants& wants) {
  184. _broadcaster.AddOrUpdateSink(sink, wants);
  185. }
  186. void GSMLCapturer::RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
  187. {
  188. _broadcaster.RemoveSink(sink);
  189. }
  190. bool GSMLCapturer::prepare_buffer()
  191. {
  192. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  193. //for(int32_t i=0;i<count;i++)
  194. {
  195. context_t * p=_op->_ctx0;
  196. p->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  197. if ( p->g_buff == NULL)
  198. ERROR_RETURN("Failed to allocate global buffer context");
  199. if ( p->capture_dmabuf) {
  200. if (!request_camera_buff(p))
  201. ERROR_RETURN("Failed to set up camera buff");
  202. } else {
  203. if (!request_camera_buff_mmap(p))
  204. ERROR_RETURN("Failed to set up camera buff");
  205. }
  206. INFO("Succeed in preparing stream buffers");
  207. }
  208. return true;
  209. }
  210. bool GSMLCapturer::request_camera_buff( context_t * p)
  211. {
  212. // for(int32_t i=0;i<count;i++)
  213. {
  214. // context_t * p=&_ctx[i];
  215. struct v4l2_requestbuffers rb;
  216. memset(&rb, 0, sizeof(rb));
  217. rb.count = V4L2_BUFFERS_NUM;
  218. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  219. rb.memory = V4L2_MEMORY_DMABUF;
  220. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  221. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  222. strerror(errno), errno);
  223. if (rb.count != V4L2_BUFFERS_NUM)
  224. ERROR_RETURN("V4l2 buffer number is not as desired");
  225. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  226. {
  227. struct v4l2_buffer buf;
  228. /* Query camera v4l2 buf length */
  229. memset(&buf, 0, sizeof buf);
  230. buf.index = index;
  231. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  232. buf.memory = V4L2_MEMORY_DMABUF;
  233. if (ioctl( p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  234. ERROR_RETURN("Failed to query buff: %s (%d)",
  235. strerror(errno), errno);
  236. /* TODO: add support for multi-planer
  237. Enqueue empty v4l2 buff into camera capture plane */
  238. buf.m.fd = (unsigned long) p->g_buff[index].dmabuff_fd;
  239. if (buf.length != p->g_buff[index].size)
  240. {
  241. WARN("Camera v4l2 buf length is not expected");
  242. p->g_buff[index].size = buf.length;
  243. }
  244. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  245. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  246. strerror(errno), errno);
  247. }
  248. }
  249. return true;
  250. }
  251. bool GSMLCapturer::stop_streams()
  252. {
  253. enum v4l2_buf_type type;
  254. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  255. //for(int32_t i=0;i<count;i++)
  256. {
  257. context_t * p=_op->_ctx0;
  258. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  259. {
  260. if(munmap(p->g_buff[index].start,p->g_buff[index].size)==-1)
  261. {
  262. ERROR_RETURN("munmap failed: %s (%d)", strerror(errno), errno);
  263. }
  264. }
  265. /* Stop v4l2 streaming */
  266. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  267. if (ioctl( p->cam_fd, VIDIOC_STREAMOFF, &type))
  268. ERROR_RETURN("Failed to stop streaming: %s (%d)",
  269. strerror(errno), errno);
  270. INFO("Camera video streaming off ...");
  271. // free(p->g_buff);
  272. }
  273. return true;
  274. }
  275. bool GSMLCapturer::start_streams()
  276. {
  277. enum v4l2_buf_type type;
  278. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  279. // for(int32_t i=0;i<count;i++)
  280. {
  281. context_t * p=_op->_ctx0;
  282. /* Start v4l2 streaming */
  283. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  284. if (ioctl( p->cam_fd, VIDIOC_STREAMON, &type) < 0)
  285. ERROR_RETURN("Failed to start streaming: %s (%d)",
  286. strerror(errno), errno);
  287. usleep(200);
  288. INFO("Camera video streaming on ...");
  289. }
  290. return true;
  291. }
  292. bool GSMLCapturer::request_camera_buff_mmap(context_t * p)
  293. {
  294. struct v4l2_requestbuffers rb;
  295. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  296. // for(int32_t i=0;i<count;i++)
  297. {
  298. memset(&rb, 0, sizeof(rb));
  299. rb.count = V4L2_BUFFERS_NUM;
  300. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  301. rb.memory = V4L2_MEMORY_MMAP;
  302. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  303. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  304. strerror(errno), errno);
  305. if (rb.count != V4L2_BUFFERS_NUM)
  306. ERROR_RETURN("V4l2 buffer number is not as desired");
  307. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  308. {
  309. struct v4l2_buffer buf;
  310. /* Query camera v4l2 buf length */
  311. memset(&buf, 0, sizeof buf);
  312. buf.index = index;
  313. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  314. buf.memory = V4L2_MEMORY_MMAP;
  315. if (ioctl(p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  316. ERROR_RETURN("Failed to query buff: %s (%d)",
  317. strerror(errno), errno);
  318. p->g_buff[index].size = buf.length;
  319. p->g_buff[index].start = (unsigned char *)
  320. mmap (NULL /* start anywhere */,
  321. buf.length,
  322. PROT_READ | PROT_WRITE /* required */,
  323. MAP_SHARED /* recommended */,
  324. p->cam_fd, buf.m.offset);
  325. if (MAP_FAILED == p->g_buff[index].start)
  326. ERROR_RETURN("Failed to map buffers");
  327. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  328. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  329. strerror(errno), errno);
  330. }
  331. }
  332. return true;
  333. }
  334. void GSMLCapturer::close_cam()
  335. {
  336. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  337. //for(int32_t i=0;i<count;i++)
  338. {
  339. context_t * p=_op->_ctx0;
  340. if(p->g_buff!=nullptr)
  341. {
  342. free(p->g_buff);
  343. p->g_buff=nullptr;
  344. }
  345. if(p->cam_fd>0)
  346. close(p->cam_fd);
  347. }
  348. free(_op->_ctx0);
  349. }
  350. void GSMLCapturer::Stop()
  351. {
  352. _run=false;
  353. _thread.join();
  354. }