gsml_capturer.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414
  1. #include "pch.h"
  2. #include "../common/comm.h"
  3. #include "./include/api.h"
  4. #include "lock.h"
  5. #include <iostream>
  6. #include <linux/videodev2.h>
  7. #include <poll.h>
  8. #include <sys/ioctl.h>
  9. #include <sys/stat.h>
  10. #include <sys/mman.h>
  11. #include <fcntl.h>
  12. #include <errno.h>
  13. #include <string>
  14. #include "capture_op.h"
  15. #include "gsml_capturer.h"
  16. rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> OpenGSMLCapture(CaptureOp* op)
  17. {
  18. auto video_capture=std::make_unique<GSMLCapturer>(op);
  19. video_capture->Start();
  20. rtc::scoped_refptr<GSMLTrackSource> video_source = GSMLTrackSource::Create(std::move(video_capture));
  21. return video_source;
  22. }
  23. GSMLCapturer::GSMLCapturer(CaptureOp* lhs):_op(lhs)
  24. {
  25. }
  26. void GSMLCapturer::Start()
  27. {
  28. _thread = std::thread(std::bind(&GSMLCapturer::Run, this));
  29. }
  30. bool GSMLCapturer::open_cam()
  31. {
  32. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  33. _op->_ctx0=(context_t *)malloc(sizeof(context_t));
  34. //for(int i=0;i<count;i++)
  35. {
  36. context_t * p= _op->_ctx0;
  37. p->cam_fd=-1;
  38. p->cam_pixfmt = V4L2_PIX_FMT_YUYV;
  39. p->cam_w = 1280;
  40. p->cam_h = 720;
  41. // p->frame = 0;
  42. p->g_buff = NULL;
  43. p->capture_dmabuf = false; // opencv display v4l2 can't be true
  44. p->fps = 30;
  45. p->enable_verbose = false;
  46. std::string devname="/dev/video" + std::to_string(_op->GetIndex());
  47. // std::cout<<"设备:"<<devname<<std::endl;
  48. //ctx.cam_devname=devname+std::to_string();
  49. p->cam_fd = open(devname.c_str(), O_RDWR);
  50. if( p->cam_fd==-1)
  51. {
  52. ERROR_RETURN("Failed to open camera device %s: %s (%d)",
  53. devname.c_str(), strerror(errno), errno);
  54. }
  55. struct v4l2_format fmt;
  56. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  57. fmt.fmt.pix.width = p->cam_w;
  58. fmt.fmt.pix.height = p->cam_h;
  59. fmt.fmt.pix.pixelformat = p->cam_pixfmt;
  60. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  61. if (ioctl( p->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
  62. ERROR_RETURN("Failed to set camera output format: %s (%d)",
  63. strerror(errno), errno);
  64. /* Get the real format in case the desired is not supported */
  65. memset(&fmt, 0, sizeof fmt);
  66. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  67. if (ioctl( p->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
  68. ERROR_RETURN("Failed to get camera output format: %s (%d)",
  69. strerror(errno), errno);
  70. if (fmt.fmt.pix.width != p->cam_w ||
  71. fmt.fmt.pix.height != p->cam_h ||
  72. fmt.fmt.pix.pixelformat != p->cam_pixfmt)
  73. {
  74. WARN("The desired format is not supported");
  75. p->cam_w = fmt.fmt.pix.width;
  76. p->cam_h = fmt.fmt.pix.height;
  77. p->cam_pixfmt =fmt.fmt.pix.pixelformat;
  78. }
  79. struct v4l2_streamparm streamparm;
  80. memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
  81. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  82. ioctl ( p->cam_fd, VIDIOC_G_PARM, &streamparm);
  83. INFO("Camera ouput format: (%d x %d) stride: %d, imagesize: %d, frate: %u / %u",
  84. fmt.fmt.pix.width,
  85. fmt.fmt.pix.height,
  86. fmt.fmt.pix.bytesperline,
  87. fmt.fmt.pix.sizeimage,
  88. streamparm.parm.capture.timeperframe.denominator,
  89. streamparm.parm.capture.timeperframe.numerator);
  90. }
  91. return true;
  92. }
  93. void GSMLCapturer::Run()
  94. {
  95. if(!open_cam()) return;
  96. prepare_buffer();
  97. start_streams();
  98. _run=true;
  99. struct pollfd fds[1];
  100. struct v4l2_buffer v4l2_buf;
  101. while(_run)
  102. {
  103. int cam_fd=-1;
  104. context_t * p=nullptr;
  105. if(/*(_op->GetType()>3)||*/_op->IsForward())
  106. {
  107. cam_fd=_op->_ctx0->cam_fd;
  108. p=_op->_ctx0;
  109. }
  110. else{
  111. cam_fd=_op->_ctx1->cam_fd;
  112. p=_op->_ctx1;
  113. }
  114. fds[0].fd = cam_fd;
  115. fds[0].events = POLLIN;
  116. if(poll(fds, 1, 5000) > 0)
  117. {
  118. if (fds[0].revents & POLLIN)
  119. {
  120. /* Dequeue a camera buff */
  121. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  122. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  123. if (p->capture_dmabuf)
  124. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  125. else
  126. v4l2_buf.memory = V4L2_MEMORY_MMAP;
  127. if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  128. printf("Failed to dequeue camera buff: %s (%d)",
  129. strerror(errno), errno);
  130. //_ctx.frame++;
  131. rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h);
  132. // if(_op->GetIndex()==3)
  133. // {
  134. // std::cout<<buffer->width()<<","<<buffer->height()<<std::endl;
  135. // }
  136. const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0,
  137. buffer->MutableDataY(), buffer->StrideY(),
  138. buffer->MutableDataU(), buffer->StrideU(),
  139. buffer->MutableDataV(), buffer->StrideV(),
  140. 0, 0,
  141. p->cam_w,p->cam_h,
  142. buffer->width(), buffer->height(),
  143. libyuv::kRotate0, libyuv::FOURCC_YUYV);
  144. int width = p->cam_w;
  145. int height = p->cam_h;
  146. if (conversionResult >= 0)
  147. {
  148. webrtc::VideoFrame videoFrame(buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeMicros());
  149. if ((p->cam_w == 0) && (p->cam_h == 0)) {
  150. _broadcaster.OnFrame(videoFrame);
  151. }
  152. else
  153. {
  154. if (height == 0) {
  155. height = (videoFrame.height() * width) / videoFrame.width();
  156. }
  157. else if (width == 0) {
  158. width = (videoFrame.width() * height) / videoFrame.height();
  159. }
  160. int stride_y = width;
  161. int stride_uv = (width + 1) / 2;
  162. rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv);
  163. scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420());
  164. webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeMicros());
  165. _broadcaster.OnFrame(frame);
  166. }
  167. }
  168. if (ioctl(p->cam_fd, VIDIOC_QBUF, &v4l2_buf))
  169. printf("Failed to queue camera buffers: %s (%d)",
  170. strerror(errno), errno);
  171. }
  172. }
  173. }
  174. stop_streams();
  175. close_cam();
  176. }
  177. void GSMLCapturer::Destroy() {
  178. Stop();
  179. }
  180. void GSMLCapturer::AddOrUpdateSink(
  181. rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
  182. const rtc::VideoSinkWants& wants) {
  183. _broadcaster.AddOrUpdateSink(sink, wants);
  184. }
  185. void GSMLCapturer::RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
  186. {
  187. _broadcaster.RemoveSink(sink);
  188. }
  189. bool GSMLCapturer::prepare_buffer()
  190. {
  191. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  192. //for(int32_t i=0;i<count;i++)
  193. {
  194. context_t * p=_op->_ctx0;
  195. p->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  196. if ( p->g_buff == NULL)
  197. ERROR_RETURN("Failed to allocate global buffer context");
  198. if ( p->capture_dmabuf) {
  199. if (!request_camera_buff(p))
  200. ERROR_RETURN("Failed to set up camera buff");
  201. } else {
  202. if (!request_camera_buff_mmap(p))
  203. ERROR_RETURN("Failed to set up camera buff");
  204. }
  205. INFO("Succeed in preparing stream buffers");
  206. }
  207. return true;
  208. }
  209. bool GSMLCapturer::request_camera_buff( context_t * p)
  210. {
  211. // for(int32_t i=0;i<count;i++)
  212. {
  213. // context_t * p=&_ctx[i];
  214. struct v4l2_requestbuffers rb;
  215. memset(&rb, 0, sizeof(rb));
  216. rb.count = V4L2_BUFFERS_NUM;
  217. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  218. rb.memory = V4L2_MEMORY_DMABUF;
  219. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  220. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  221. strerror(errno), errno);
  222. if (rb.count != V4L2_BUFFERS_NUM)
  223. ERROR_RETURN("V4l2 buffer number is not as desired");
  224. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  225. {
  226. struct v4l2_buffer buf;
  227. /* Query camera v4l2 buf length */
  228. memset(&buf, 0, sizeof buf);
  229. buf.index = index;
  230. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  231. buf.memory = V4L2_MEMORY_DMABUF;
  232. if (ioctl( p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  233. ERROR_RETURN("Failed to query buff: %s (%d)",
  234. strerror(errno), errno);
  235. /* TODO: add support for multi-planer
  236. Enqueue empty v4l2 buff into camera capture plane */
  237. buf.m.fd = (unsigned long) p->g_buff[index].dmabuff_fd;
  238. if (buf.length != p->g_buff[index].size)
  239. {
  240. WARN("Camera v4l2 buf length is not expected");
  241. p->g_buff[index].size = buf.length;
  242. }
  243. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  244. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  245. strerror(errno), errno);
  246. }
  247. }
  248. return true;
  249. }
  250. bool GSMLCapturer::stop_streams()
  251. {
  252. enum v4l2_buf_type type;
  253. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  254. //for(int32_t i=0;i<count;i++)
  255. {
  256. context_t * p=_op->_ctx0;
  257. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  258. {
  259. if(munmap(p->g_buff[index].start,p->g_buff[index].size)==-1)
  260. {
  261. ERROR_RETURN("munmap failed: %s (%d)", strerror(errno), errno);
  262. }
  263. }
  264. /* Stop v4l2 streaming */
  265. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  266. if (ioctl( p->cam_fd, VIDIOC_STREAMOFF, &type))
  267. ERROR_RETURN("Failed to stop streaming: %s (%d)",
  268. strerror(errno), errno);
  269. INFO("Camera video streaming off ...");
  270. // free(p->g_buff);
  271. }
  272. return true;
  273. }
  274. bool GSMLCapturer::start_streams()
  275. {
  276. enum v4l2_buf_type type;
  277. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  278. // for(int32_t i=0;i<count;i++)
  279. {
  280. context_t * p=_op->_ctx0;
  281. /* Start v4l2 streaming */
  282. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  283. if (ioctl( p->cam_fd, VIDIOC_STREAMON, &type) < 0)
  284. ERROR_RETURN("Failed to start streaming: %s (%d)",
  285. strerror(errno), errno);
  286. usleep(200);
  287. INFO("Camera video streaming on ...");
  288. }
  289. return true;
  290. }
  291. bool GSMLCapturer::request_camera_buff_mmap(context_t * p)
  292. {
  293. struct v4l2_requestbuffers rb;
  294. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  295. // for(int32_t i=0;i<count;i++)
  296. {
  297. memset(&rb, 0, sizeof(rb));
  298. rb.count = V4L2_BUFFERS_NUM;
  299. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  300. rb.memory = V4L2_MEMORY_MMAP;
  301. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  302. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  303. strerror(errno), errno);
  304. if (rb.count != V4L2_BUFFERS_NUM)
  305. ERROR_RETURN("V4l2 buffer number is not as desired");
  306. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  307. {
  308. struct v4l2_buffer buf;
  309. /* Query camera v4l2 buf length */
  310. memset(&buf, 0, sizeof buf);
  311. buf.index = index;
  312. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  313. buf.memory = V4L2_MEMORY_MMAP;
  314. if (ioctl(p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  315. ERROR_RETURN("Failed to query buff: %s (%d)",
  316. strerror(errno), errno);
  317. p->g_buff[index].size = buf.length;
  318. p->g_buff[index].start = (unsigned char *)
  319. mmap (NULL /* start anywhere */,
  320. buf.length,
  321. PROT_READ | PROT_WRITE /* required */,
  322. MAP_SHARED /* recommended */,
  323. p->cam_fd, buf.m.offset);
  324. if (MAP_FAILED == p->g_buff[index].start)
  325. ERROR_RETURN("Failed to map buffers");
  326. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  327. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  328. strerror(errno), errno);
  329. }
  330. }
  331. return true;
  332. }
  333. void GSMLCapturer::close_cam()
  334. {
  335. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  336. //for(int32_t i=0;i<count;i++)
  337. {
  338. context_t * p=_op->_ctx0;
  339. if(p->g_buff!=nullptr)
  340. {
  341. free(p->g_buff);
  342. p->g_buff=nullptr;
  343. }
  344. if(p->cam_fd>0)
  345. close(p->cam_fd);
  346. }
  347. free(_op->_ctx0);
  348. }
  349. void GSMLCapturer::Stop()
  350. {
  351. _run=false;
  352. _thread.join();
  353. }