gsml_capturer.cpp 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516
  1. #include "pch.h"
  2. #include "../common/comm.h"
  3. #include "./include/api.h"
  4. #include "lock.h"
  5. #include <iostream>
  6. #include <linux/videodev2.h>
  7. #include <poll.h>
  8. #include <sys/ioctl.h>
  9. #include <sys/stat.h>
  10. #include <sys/mman.h>
  11. #include <fcntl.h>
  12. #include <errno.h>
  13. #include <string>
  14. #include "capture_op.h"
  15. #include "gsml_capturer.h"
  16. static nv_color_fmt nvcolor_fmt[] =
  17. {
  18. /* TODO: add more pixel format mapping */
  19. {V4L2_PIX_FMT_UYVY, NVBUF_COLOR_FORMAT_UYVY},
  20. {V4L2_PIX_FMT_VYUY, NVBUF_COLOR_FORMAT_VYUY},
  21. {V4L2_PIX_FMT_YUYV, NVBUF_COLOR_FORMAT_YUYV},
  22. {V4L2_PIX_FMT_YVYU, NVBUF_COLOR_FORMAT_YVYU},
  23. {V4L2_PIX_FMT_GREY, NVBUF_COLOR_FORMAT_GRAY8},
  24. {V4L2_PIX_FMT_YUV420M, NVBUF_COLOR_FORMAT_YUV420},
  25. };
  26. static NvBufSurfaceColorFormat get_nvbuff_color_fmt(unsigned int v4l2_pixfmt)
  27. {
  28. unsigned i;
  29. for (i = 0; i < sizeof(nvcolor_fmt) / sizeof(nvcolor_fmt[0]); i++)
  30. {
  31. if (v4l2_pixfmt == nvcolor_fmt[i].v4l2_pixfmt)
  32. return nvcolor_fmt[i].nvbuff_color;
  33. }
  34. return NVBUF_COLOR_FORMAT_INVALID;
  35. }
  36. rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> OpenGSMLCapture(CaptureOp* op)
  37. {
  38. auto video_capture=std::make_unique<GSMLCapturer>(op);
  39. video_capture->Start();
  40. rtc::scoped_refptr<GSMLTrackSource> video_source = GSMLTrackSource::Create(std::move(video_capture));
  41. return video_source;
  42. }
  43. GSMLCapturer::GSMLCapturer(CaptureOp* lhs):_op(lhs)
  44. {
  45. }
  46. void GSMLCapturer::Start()
  47. {
  48. _thread = std::thread(std::bind(&GSMLCapturer::Run, this));
  49. }
  50. //视频的捕获
  51. bool GSMLCapturer::open_cam()
  52. {
  53. _op->_ctx0=(context_t *)malloc(sizeof(context_t));
  54. {
  55. context_t * p=_op->_ctx0;
  56. p->cam_fd=-1;
  57. p->cam_pixfmt = V4L2_PIX_FMT_YUYV;
  58. p->cam_w = 1280;
  59. p->cam_h = 720;
  60. // p->frame = 0;
  61. p->g_buff = NULL;
  62. p->capture_dmabuf = true; // opencv display v4l2 can't be true
  63. // p->capture_dmabuf = false; // opencv display v4l2 can't be true
  64. p->fps = 30;
  65. p->enable_verbose = false;
  66. std::string devname="/dev/video" + std::to_string(_op->GetIndex());
  67. p->cam_fd = open(devname.c_str(), O_RDWR); //打开视频设备
  68. if( p->cam_fd==-1)
  69. {
  70. printf("Failed to open camera device %s: %s (%d)",
  71. devname.c_str(), strerror(errno), errno);
  72. }
  73. struct v4l2_format fmt;
  74. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  75. fmt.fmt.pix.width = p->cam_w;
  76. fmt.fmt.pix.height = p->cam_h;
  77. fmt.fmt.pix.pixelformat = p->cam_pixfmt;
  78. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  79. if (ioctl( p->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
  80. printf("Failed to set camera output format: %s (%d)",
  81. strerror(errno), errno);
  82. /* Get the real format in case the desired is not supported */
  83. memset(&fmt, 0, sizeof fmt);
  84. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  85. if (ioctl( p->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
  86. printf("Failed to get camera output format: %s (%d)",
  87. strerror(errno), errno);
  88. if (fmt.fmt.pix.width != p->cam_w ||
  89. fmt.fmt.pix.height != p->cam_h ||
  90. fmt.fmt.pix.pixelformat != p->cam_pixfmt)
  91. {
  92. printf("The desired format is not supported");
  93. p->cam_w = fmt.fmt.pix.width;
  94. p->cam_h = fmt.fmt.pix.height;
  95. p->cam_pixfmt =fmt.fmt.pix.pixelformat;
  96. }
  97. struct v4l2_streamparm streamparm;
  98. memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
  99. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  100. /*
  101. streamparm.parm.capture.timeperframe.numerator = 1;
  102. streamparm.parm.capture.timeperframe.denominator = 3;
  103. streamparm.parm.output.timeperframe.numerator = 1;
  104. streamparm.parm.output.timeperframe.denominator = 3;
  105. */
  106. ioctl ( p->cam_fd, VIDIOC_G_PARM, &streamparm);
  107. printf(">>: Frame rate: %u/%u\n",streamparm.parm.capture.timeperframe.numerator,streamparm.parm.capture.timeperframe.denominator);
  108. // INFO("Camera ouput format: (%d x %d) stride: %d, imagesize: %d, frate: %u / %u",
  109. // fmt.fmt.pix.width,
  110. // fmt.fmt.pix.height,
  111. // fmt.fmt.pix.bytesperline,
  112. // fmt.fmt.pix.sizeimage,
  113. // streamparm.parm.capture.timeperframe.denominator,
  114. // streamparm.parm.capture.timeperframe.numerator);
  115. }
  116. return true;
  117. }
  118. void GSMLCapturer::Run()
  119. {
  120. if(!open_cam()) return;
  121. prepare_buffer();
  122. start_streams();
  123. // start_capture(&ctx);
  124. _run=true;
  125. struct pollfd fds[1];
  126. struct v4l2_buffer v4l2_buf;
  127. long long _source = 0,_dst = 0;
  128. while(_run)
  129. {
  130. int cam_fd=-1;
  131. context_t * p=nullptr;
  132. if(_op->IsForward())
  133. {
  134. cam_fd=_op->_ctx0->cam_fd;
  135. p=_op->_ctx0;
  136. }
  137. else{
  138. cam_fd=_op->_ctx1->cam_fd;
  139. p=_op->_ctx1;
  140. }
  141. NvBufSurf::NvCommonTransformParams transform_params = {0};
  142. /* Init the NvBufferTransformParams */
  143. transform_params.src_top = 0;
  144. transform_params.src_left = 0;
  145. transform_params.src_width = p->cam_w;
  146. transform_params.src_height = p->cam_h;
  147. transform_params.dst_top = 0;
  148. transform_params.dst_left = 0;
  149. transform_params.dst_width = p->cam_w;
  150. transform_params.dst_height = p->cam_h;
  151. transform_params.flag = NVBUFSURF_TRANSFORM_FILTER;
  152. transform_params.flip = NvBufSurfTransform_None;
  153. transform_params.filter = NvBufSurfTransformInter_Algo3;
  154. fds[0].fd = cam_fd;
  155. fds[0].events = POLLIN;
  156. if(poll(fds, 1, 5000) > 0)
  157. {
  158. if (fds[0].revents & POLLIN)
  159. {
  160. /* Dequeue a camera buff */
  161. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  162. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  163. if (p->capture_dmabuf)
  164. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  165. else
  166. v4l2_buf.memory = V4L2_MEMORY_MMAP;
  167. if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  168. printf("Failed to dequeue camera buff: %s (%d)",
  169. strerror(errno), errno);
  170. if(_op->GetType()==RenderPosition::FRONT)
  171. {
  172. _source = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  173. //printf("encode delay:%lld----frame:%ld\r\n", _source);
  174. }
  175. // NvBufSurface *pSurf = NULL;
  176. // if (-1 == NvBufSurfaceFromFd(p->g_buff[v4l2_buf.index].dmabuff_fd,
  177. // (void**)(&pSurf)))
  178. // printf("Cannot get NvBufSurface from fd");
  179. // if (p->capture_dmabuf) {
  180. // /* Cache sync for VIC operation since the data is from CPU */
  181. // if (-1 == NvBufSurfaceSyncForDevice (pSurf, 0, 0))
  182. // printf("Cannot sync output buffer");
  183. // }
  184. // else {
  185. // /* Copies raw buffer plane contents to an NvBufsurface plane */
  186. // if (-1 == Raw2NvBufSurface (p->g_buff[v4l2_buf.index].start, 0, 0,
  187. // p->cam_w, p->cam_h, pSurf))
  188. // printf("Cannot copy raw buffer to NvBufsurface plane");
  189. // }
  190. // /* Convert the camera buffer from YUV422 to YUV420P */
  191. // if (NvBufSurf::NvTransform(&transform_params, p->g_buff[v4l2_buf.index].dmabuff_fd, p->render_dmabuf_fd))
  192. // printf("Failed to convert the buffer");
  193. // NvBufSurface *pSurf_ = NULL;
  194. // if (-1 == NvBufSurfaceFromFd(p->render_dmabuf_fd, (void**)(&pSurf_)))
  195. // printf("Cannot get NvBufSurface from fd");
  196. int width = p->cam_w;
  197. int height = p->cam_h;
  198. rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h);
  199. const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0,
  200. buffer->MutableDataY(), buffer->StrideY(),
  201. buffer->MutableDataU(), buffer->StrideU(),
  202. buffer->MutableDataV(), buffer->StrideV(),
  203. 0, 0,
  204. p->cam_w,p->cam_h,
  205. buffer->width(), buffer->height(),
  206. libyuv::kRotate0, libyuv::FOURCC_YUYV);
  207. // const char yuv[3*width*height/2];
  208. uint8_t yuv[3 * width * height / 2]; // 更改为 uint8_t 类型
  209. // const int result = libyuv::YUY2ToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 2*width,yuv,width,yuv+width*height,width/2,
  210. // yuv+5*width*height/4,width/2,width,height);
  211. // int YUY2ToI420(const uint8_t* src_yuy2,
  212. // int src_stride_yuy2,
  213. // uint8_t* dst_y,
  214. // int dst_stride_y,
  215. // uint8_t* dst_u,
  216. // int dst_stride_u,
  217. // uint8_t* dst_v,
  218. // int dst_stride_v,
  219. // int width,
  220. // int height);
  221. // if (conversionResult >= 0)
  222. {
  223. // printf("dma success \n");
  224. webrtc::VideoFrame videoFrame(buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeNanos());
  225. if ((p->cam_w == 0) && (p->cam_h == 0)) {
  226. _broadcaster.OnFrame(videoFrame);
  227. }
  228. else
  229. {
  230. if (height == 0) {
  231. height = (videoFrame.height() * width) / videoFrame.width();
  232. }
  233. else if (width == 0) {
  234. width = (videoFrame.width() * height) / videoFrame.height();
  235. }
  236. int stride_y = width;
  237. int stride_uv = (width + 1) / 2;
  238. rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv);
  239. scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420());
  240. webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeNanos());
  241. _broadcaster.OnFrame(frame);
  242. }
  243. }
  244. if(_op->GetType()==RenderPosition::FRONT)
  245. {
  246. _dst = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  247. // printf("encode delay:%lld\r\n",_dst - _source);
  248. }
  249. if (ioctl(p->cam_fd, VIDIOC_QBUF, &v4l2_buf))
  250. printf("Failed to queue camera buffers: %s (%d)",
  251. strerror(errno), errno);
  252. }
  253. }
  254. }
  255. stop_streams();
  256. // NvBufSurf::NvDestroy(p->render_dmabuf_fd);
  257. close_cam();
  258. }
  259. void GSMLCapturer::Destroy() {
  260. Stop();
  261. }
  262. void GSMLCapturer::AddOrUpdateSink(
  263. rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
  264. const rtc::VideoSinkWants& wants) {
  265. _broadcaster.AddOrUpdateSink(sink, wants);
  266. }
  267. void GSMLCapturer::RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
  268. {
  269. _broadcaster.RemoveSink(sink);
  270. }
  271. bool GSMLCapturer::prepare_buffer()
  272. {
  273. context_t * p=_op->_ctx0;
  274. NvBufSurf::NvCommonAllocateParams camparams = {0};
  275. int fd[V4L2_BUFFERS_NUM] = {0};
  276. /* Allocate global buffer context */
  277. p->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  278. if (p->g_buff == NULL)
  279. printf("Failed to allocate global buffer context");
  280. camparams.memType = NVBUF_MEM_SURFACE_ARRAY;
  281. camparams.width = p->cam_w;
  282. camparams.height = p->cam_h;
  283. camparams.layout = NVBUF_LAYOUT_PITCH;
  284. camparams.colorFormat = get_nvbuff_color_fmt(p->cam_pixfmt);
  285. camparams.memtag = NvBufSurfaceTag_CAMERA;
  286. if (NvBufSurf::NvAllocate(&camparams, V4L2_BUFFERS_NUM, fd))
  287. printf("Failed to create NvBuffer");
  288. /* Create buffer and provide it with camera */
  289. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  290. {
  291. NvBufSurface *pSurf = NULL;
  292. p->g_buff[index].dmabuff_fd = fd[index];
  293. if (-1 == NvBufSurfaceFromFd(fd[index], (void**)(&pSurf)))
  294. printf("Failed to get NvBuffer parameters");
  295. if (p->cam_pixfmt == V4L2_PIX_FMT_GREY &&
  296. pSurf->surfaceList[0].pitch != pSurf->surfaceList[0].width)
  297. p->capture_dmabuf = false;
  298. /* TODO: add multi-planar support
  299. Currently only supports YUV422 interlaced single-planar */
  300. if (p->capture_dmabuf) {
  301. if (-1 == NvBufSurfaceMap (pSurf, 0, 0, NVBUF_MAP_READ_WRITE))
  302. printf("Failed to map buffer");
  303. p->g_buff[index].start = (unsigned char *)pSurf->surfaceList[0].mappedAddr.addr[0];
  304. p->g_buff[index].size = pSurf->surfaceList[0].dataSize;
  305. }
  306. }
  307. camparams.colorFormat = get_nvbuff_color_fmt(V4L2_PIX_FMT_YUV420M);
  308. camparams.memtag = NvBufSurfaceTag_NONE;
  309. // /* Create Render buffer */
  310. if (NvBufSurf::NvAllocate(&camparams, 1, &p->render_dmabuf_fd))
  311. printf("Failed to create NvBuffer");
  312. if (p->capture_dmabuf) {
  313. if (!request_camera_buff(p))
  314. printf("Failed to set up camera buff");
  315. }
  316. // else {
  317. // if (!request_camera_buff_mmap(ctx))
  318. // ERROR_RETURN("Failed to set up camera buff");
  319. // }
  320. // INFO("Succeed in preparing stream buffers");
  321. return true;
  322. }
  323. bool GSMLCapturer::request_camera_buff( context_t * p)
  324. {
  325. /* Request camera v4l2 buffer */
  326. struct v4l2_requestbuffers rb;
  327. memset(&rb, 0, sizeof(rb));
  328. rb.count = V4L2_BUFFERS_NUM;
  329. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  330. rb.memory = V4L2_MEMORY_DMABUF;
  331. if (ioctl(p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  332. printf("Failed to request v4l2 buffers: %s (%d)",
  333. strerror(errno), errno);
  334. if (rb.count != V4L2_BUFFERS_NUM)
  335. printf("V4l2 buffer number is not as desired");
  336. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  337. {
  338. struct v4l2_buffer buf;
  339. /* Query camera v4l2 buf length */
  340. memset(&buf, 0, sizeof buf);
  341. buf.index = index;
  342. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  343. buf.memory = V4L2_MEMORY_DMABUF;
  344. if (ioctl(p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  345. printf("Failed to query buff: %s (%d)",
  346. strerror(errno), errno);
  347. /* TODO: add support for multi-planer
  348. Enqueue empty v4l2 buff into camera capture plane */
  349. buf.m.fd = (unsigned long)p->g_buff[index].dmabuff_fd;
  350. if (buf.length != p->g_buff[index].size)
  351. {
  352. printf("Camera v4l2 buf length is not expected");
  353. p->g_buff[index].size = buf.length;
  354. }
  355. if (ioctl(p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  356. printf("Failed to enqueue buffers: %s (%d)",
  357. strerror(errno), errno);
  358. }
  359. return true;
  360. }
  361. bool GSMLCapturer::stop_streams()
  362. {
  363. enum v4l2_buf_type type;
  364. {
  365. context_t * p=_op->_ctx0;
  366. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  367. {
  368. if(munmap(p->g_buff[index].start,p->g_buff[index].size)==-1)
  369. {
  370. printf("munmap failed: %s (%d)", strerror(errno), errno);
  371. }
  372. }
  373. /* Stop v4l2 streaming */
  374. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  375. if (ioctl( p->cam_fd, VIDIOC_STREAMOFF, &type))
  376. printf("Failed to stop streaming: %s (%d)",
  377. strerror(errno), errno);
  378. // INFO("Camera video streaming off ...");
  379. }
  380. return true;
  381. }
  382. bool GSMLCapturer::start_streams()
  383. {
  384. enum v4l2_buf_type type;
  385. context_t * p=_op->_ctx0;
  386. /* Start v4l2 streaming */
  387. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  388. if (ioctl(p->cam_fd, VIDIOC_STREAMON, &type) < 0)
  389. printf("Failed to start streaming: %s (%d)",
  390. strerror(errno), errno);
  391. usleep(200);
  392. // INFO("Camera video streaming on ...");
  393. return true;
  394. }
  395. void GSMLCapturer::close_cam()
  396. {
  397. {
  398. context_t * p=_op->_ctx0;
  399. if(p->g_buff!=nullptr)
  400. {
  401. free(p->g_buff);
  402. p->g_buff = nullptr;
  403. }
  404. if(p->cam_fd>0)
  405. close(p->cam_fd);
  406. }
  407. free(_op->_ctx0);
  408. }
  409. void GSMLCapturer::Stop()
  410. {
  411. _run=false;
  412. _thread.join();
  413. }