gsml_capturer.cpp 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512
  1. #include "pch.h"
  2. #include "../common/comm.h"
  3. #include "./include/api.h"
  4. #include "lock.h"
  5. #include <iostream>
  6. #include <linux/videodev2.h>
  7. #include <poll.h>
  8. #include <sys/ioctl.h>
  9. #include <sys/stat.h>
  10. #include <sys/mman.h>
  11. #include <fcntl.h>
  12. #include <errno.h>
  13. #include <string>
  14. #include "capture_op.h"
  15. #include "gsml_capturer.h"
  16. #include "/home/nvidia/devdata/test_jet_enc_web/jetson_enc/JetsonEnc.h"
  17. char *output;
  18. std::ofstream output_file;
  19. rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> OpenGSMLCapture(CaptureOp* op)
  20. {
  21. auto video_capture=std::make_unique<GSMLCapturer>(op);
  22. video_capture->Start();
  23. rtc::scoped_refptr<GSMLTrackSource> video_source = GSMLTrackSource::Create(std::move(video_capture));
  24. return video_source;
  25. }
  26. GSMLCapturer::GSMLCapturer(CaptureOp* lhs):_op(lhs)
  27. {
  28. }
  29. void GSMLCapturer::Start()
  30. {
  31. _thread = std::thread(std::bind(&GSMLCapturer::Run, this));
  32. }
  33. //视频的捕获
  34. bool GSMLCapturer::open_cam()
  35. {
  36. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  37. _op->_ctx0=(context_t *)malloc(sizeof(context_t));
  38. //for(int i=0;i<count;i++)
  39. {
  40. context_t * p=_op->_ctx0;
  41. p->cam_fd=-1;
  42. p->cam_pixfmt = V4L2_PIX_FMT_YUYV;
  43. p->cam_w = 1280;
  44. p->cam_h = 720;
  45. // p->frame = 0;
  46. p->g_buff = NULL;
  47. // p->capture_dmabuf = true; // opencv display v4l2 can't be true
  48. p->capture_dmabuf = false; // opencv display v4l2 can't be true
  49. p->fps = 30;
  50. p->enable_verbose = false;
  51. std::string devname="/dev/video" + std::to_string(_op->GetIndex());
  52. // std::cout<<"设备:"<<devname<<std::endl;
  53. //ctx.cam_devname=devname+std::to_string();
  54. p->cam_fd = open(devname.c_str(), O_RDWR); //打开视频设备
  55. if( p->cam_fd==-1)
  56. {
  57. ERROR_RETURN("Failed to open camera device %s: %s (%d)",
  58. devname.c_str(), strerror(errno), errno);
  59. }
  60. struct v4l2_format fmt;
  61. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  62. fmt.fmt.pix.width = p->cam_w;
  63. fmt.fmt.pix.height = p->cam_h;
  64. fmt.fmt.pix.pixelformat = p->cam_pixfmt;
  65. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  66. if (ioctl( p->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
  67. ERROR_RETURN("Failed to set camera output format: %s (%d)",
  68. strerror(errno), errno);
  69. /* Get the real format in case the desired is not supported */
  70. memset(&fmt, 0, sizeof fmt);
  71. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  72. if (ioctl( p->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
  73. ERROR_RETURN("Failed to get camera output format: %s (%d)",
  74. strerror(errno), errno);
  75. if (fmt.fmt.pix.width != p->cam_w ||
  76. fmt.fmt.pix.height != p->cam_h ||
  77. fmt.fmt.pix.pixelformat != p->cam_pixfmt)
  78. {
  79. WARN("The desired format is not supported");
  80. p->cam_w = fmt.fmt.pix.width;
  81. p->cam_h = fmt.fmt.pix.height;
  82. p->cam_pixfmt =fmt.fmt.pix.pixelformat;
  83. }
  84. struct v4l2_streamparm streamparm;
  85. memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
  86. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  87. /*
  88. streamparm.parm.capture.timeperframe.numerator = 1;
  89. streamparm.parm.capture.timeperframe.denominator = 3;
  90. streamparm.parm.output.timeperframe.numerator = 1;
  91. streamparm.parm.output.timeperframe.denominator = 3;
  92. */
  93. ioctl ( p->cam_fd, VIDIOC_G_PARM, &streamparm);
  94. printf(">>: Frame rate: %u/%u\n",streamparm.parm.capture.timeperframe.numerator,streamparm.parm.capture.timeperframe.denominator);
  95. INFO("Camera ouput format: (%d x %d) stride: %d, imagesize: %d, frate: %u / %u",
  96. fmt.fmt.pix.width,
  97. fmt.fmt.pix.height,
  98. fmt.fmt.pix.bytesperline,
  99. fmt.fmt.pix.sizeimage,
  100. streamparm.parm.capture.timeperframe.denominator,
  101. streamparm.parm.capture.timeperframe.numerator);
  102. }
  103. return true;
  104. }
  105. class EncDataWriter : public JetsonEncListner
  106. {
  107. void OnJetsonEncData(unsigned char *data, int data_len)
  108. {
  109. output_file.write(reinterpret_cast<char *>(data), data_len);
  110. printf("write bytes :%d \n", data_len);
  111. return;
  112. }
  113. };
  114. void GSMLCapturer::Run()
  115. {
  116. _op->_ctx0=(context_t *)malloc(sizeof(context_t));
  117. if(!open_cam()) return;
  118. prepare_buffer();
  119. start_streams();
  120. _run=true;
  121. struct pollfd fds[1];
  122. struct v4l2_buffer v4l2_buf;
  123. long long _source = 0,_dst = 0;
  124. output = "test_out.h264";
  125. size_t frame_size = 1280 * 720 * 3 / 2; // YUV420P
  126. int width = 1280;
  127. int height = 720;
  128. int fps = 30;
  129. // output
  130. output_file.open(output, std::ios::binary | std::ios::app);
  131. if (!output_file.is_open()) {
  132. printf("Error opening the output file.\n");
  133. // yuv_file.close();
  134. // return -1;
  135. }
  136. // encoder
  137. JetsonEnc *test = new JetsonEnc(width, height, fps);
  138. EncDataWriter *writer = new EncDataWriter;
  139. int frames = 0;
  140. test->SetDecCallBack(static_cast<JetsonEncListner *>(writer)); // 将writer 作为回调函数的接收者,设置到test对象中
  141. while(_run)
  142. {
  143. int cam_fd=-1;
  144. context_t * p=nullptr;
  145. //if((_op->GetType()!=RenderPosition::FRONT&&_op->GetType()!=RenderPosition::BACK)||_op->IsForward())
  146. if(_op->IsForward())
  147. //if(_op->GetType()!=RenderPosition::ALL)
  148. {
  149. cam_fd=_op->_ctx0->cam_fd;
  150. p=_op->_ctx0;
  151. }
  152. else{
  153. cam_fd=_op->_ctx1->cam_fd;
  154. p=_op->_ctx1;
  155. }
  156. //assert(p!=nullptr);
  157. /*
  158. else
  159. {
  160. if(_op->IsForward())
  161. {
  162. cam_fd=_ctx[0].cam_fd;
  163. p=&_ctx[0];
  164. }
  165. else
  166. {
  167. cam_fd=_ctx[1].cam_fd;
  168. p=&_ctx[1];
  169. }
  170. }
  171. */
  172. fds[0].fd = cam_fd;
  173. fds[0].events = POLLIN;
  174. if(poll(fds, 1, 5000) > 0)
  175. {
  176. // if (fds[0].revents & POLLIN && _op->GetIndex()==3)
  177. if (fds[0].revents & POLLIN)
  178. {
  179. // printf("jijijijijiiiiiiiiiiiiijji\n");
  180. /* Dequeue a camera buff */
  181. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  182. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  183. if (p->capture_dmabuf)
  184. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  185. else
  186. v4l2_buf.memory = V4L2_MEMORY_MMAP;
  187. if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  188. printf("Failed to dequeue camera buff: %s (%d)",
  189. strerror(errno), errno);
  190. if(_op->GetType()==RenderPosition::FRONT)
  191. {
  192. _source = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  193. //printf("encode delay:%lld----frame:%ld\r\n", _source);
  194. }
  195. //_ctx.frame++;
  196. rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h);
  197. // rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(800,600);
  198. // std::cout << v4l2_buf.index << std::endl;
  199. const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0,
  200. buffer->MutableDataY(), buffer->StrideY(),
  201. buffer->MutableDataU(), buffer->StrideU(),
  202. buffer->MutableDataV(), buffer->StrideV(),
  203. 0, 0,
  204. p->cam_w,p->cam_h,
  205. buffer->width(), buffer->height(),
  206. libyuv::kRotate0, libyuv::FOURCC_YUYV);
  207. // uint8_t *yuv = new uint8_t[frame_size];
  208. // int result = libyuv::YUY2ToI420((uint8_t*)p->g_buff[v4l2_buf.index].start,2*width,yuv,width,yuv+width*height,
  209. // width/2,yuv+5*width*height/4,width/2,width,height);
  210. // int frame_size = width * height * 3 / 2; // I420 是 Y, U, V 三个平面
  211. // test->AddFrame(yuv, frame_size);
  212. // frames++;
  213. // printf("Frame : %d\n", frames);
  214. // if(test->GetQueueSize() >= 5){
  215. // usleep(1000 * 100);
  216. // }
  217. // usleep(1000 * 1000 / fps);
  218. if (conversionResult >= 0)
  219. {
  220. webrtc::VideoFrame videoFrame(buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeNanos());
  221. if ((p->cam_w == 0) && (p->cam_h == 0)) {
  222. _broadcaster.OnFrame(videoFrame);
  223. }
  224. else
  225. {
  226. if (height == 0) {
  227. height = (videoFrame.height() * width) / videoFrame.width();
  228. }
  229. else if (width == 0) {
  230. width = (videoFrame.width() * height) / videoFrame.height();
  231. }
  232. int stride_y = width;
  233. int stride_uv = (width + 1) / 2;
  234. rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv);
  235. scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420());
  236. webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeNanos());
  237. _broadcaster.OnFrame(frame);
  238. }
  239. }
  240. if(_op->GetType()==RenderPosition::FRONT)
  241. {
  242. _dst = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  243. //printf("encode delay:%lld\r\n",_dst - _source);
  244. }
  245. if (ioctl(p->cam_fd, VIDIOC_QBUF, &v4l2_buf))
  246. printf("Failed to queue camera buffers: %s (%d)",
  247. strerror(errno), errno);
  248. }
  249. }
  250. //std::this_thread::sleep_for(std::chrono::milliseconds(30));
  251. }
  252. stop_streams();
  253. close_cam();
  254. }
  255. void GSMLCapturer::Destroy() {
  256. Stop();
  257. }
  258. void GSMLCapturer::AddOrUpdateSink(
  259. rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
  260. const rtc::VideoSinkWants& wants) {
  261. _broadcaster.AddOrUpdateSink(sink, wants);
  262. }
  263. void GSMLCapturer::RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
  264. {
  265. _broadcaster.RemoveSink(sink);
  266. }
  267. bool GSMLCapturer::prepare_buffer()
  268. {
  269. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  270. //for(int32_t i=0;i<count;i++)
  271. {
  272. context_t * p=_op->_ctx0;
  273. p->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  274. if ( p->g_buff == NULL)
  275. ERROR_RETURN("Failed to allocate global buffer context");
  276. if ( p->capture_dmabuf) {
  277. if (!request_camera_buff(p))
  278. ERROR_RETURN("Failed to set up camera buff");
  279. } else {
  280. if (!request_camera_buff_mmap(p))
  281. ERROR_RETURN("Failed to set up camera buff");
  282. }
  283. INFO("Succeed in preparing stream buffers");
  284. }
  285. return true;
  286. }
  287. bool GSMLCapturer::request_camera_buff( context_t * p)
  288. {
  289. // for(int32_t i=0;i<count;i++)
  290. {
  291. // context_t * p=&_ctx[i];
  292. struct v4l2_requestbuffers rb;
  293. memset(&rb, 0, sizeof(rb));
  294. rb.count = V4L2_BUFFERS_NUM;
  295. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  296. rb.memory = V4L2_MEMORY_DMABUF;
  297. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  298. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  299. strerror(errno), errno);
  300. if (rb.count != V4L2_BUFFERS_NUM)
  301. ERROR_RETURN("V4l2 buffer number is not as desired");
  302. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  303. {
  304. struct v4l2_buffer buf;
  305. /* Query camera v4l2 buf length */
  306. memset(&buf, 0, sizeof buf);
  307. buf.index = index;
  308. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  309. buf.memory = V4L2_MEMORY_DMABUF;
  310. if (ioctl( p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  311. ERROR_RETURN("Failed to query buff: %s (%d)",
  312. strerror(errno), errno);
  313. /* TODO: add support for multi-planer
  314. Enqueue empty v4l2 buff into camera capture plane */
  315. buf.m.fd = (unsigned long) p->g_buff[index].dmabuff_fd;
  316. if (buf.length != p->g_buff[index].size)
  317. {
  318. WARN("Camera v4l2 buf length is not expected");
  319. p->g_buff[index].size = buf.length;
  320. }
  321. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  322. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  323. strerror(errno), errno);
  324. }
  325. }
  326. return true;
  327. }
  328. bool GSMLCapturer::stop_streams()
  329. {
  330. enum v4l2_buf_type type;
  331. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  332. //for(int32_t i=0;i<count;i++)
  333. {
  334. context_t * p=_op->_ctx0;
  335. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  336. {
  337. if(munmap(p->g_buff[index].start,p->g_buff[index].size)==-1)
  338. {
  339. ERROR_RETURN("munmap failed: %s (%d)", strerror(errno), errno);
  340. }
  341. }
  342. /* Stop v4l2 streaming */
  343. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  344. if (ioctl( p->cam_fd, VIDIOC_STREAMOFF, &type))
  345. ERROR_RETURN("Failed to stop streaming: %s (%d)",
  346. strerror(errno), errno);
  347. INFO("Camera video streaming off ...");
  348. }
  349. return true;
  350. }
  351. bool GSMLCapturer::start_streams()
  352. {
  353. enum v4l2_buf_type type; //是否可以注释??
  354. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  355. // for(int32_t i=0;i<count;i++)
  356. {
  357. context_t * p=_op->_ctx0;
  358. /* Start v4l2 streaming */
  359. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  360. if (ioctl( p->cam_fd, VIDIOC_STREAMON, &type) < 0)
  361. ERROR_RETURN("Failed to start streaming: %s (%d)",
  362. strerror(errno), errno);
  363. usleep(200);
  364. INFO("Camera video streaming on ...");
  365. }
  366. return true;
  367. }
  368. bool GSMLCapturer::request_camera_buff_mmap(context_t * p)
  369. {
  370. struct v4l2_requestbuffers rb;
  371. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  372. // for(int32_t i=0;i<count;i++)
  373. {
  374. memset(&rb, 0, sizeof(rb));
  375. rb.count = V4L2_BUFFERS_NUM;
  376. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  377. rb.memory = V4L2_MEMORY_MMAP;
  378. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  379. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  380. strerror(errno), errno);
  381. if (rb.count != V4L2_BUFFERS_NUM)
  382. ERROR_RETURN("V4l2 buffer number is not as desired");
  383. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  384. {
  385. struct v4l2_buffer buf;
  386. /* Query camera v4l2 buf length */
  387. memset(&buf, 0, sizeof buf);
  388. buf.index = index;
  389. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  390. buf.memory = V4L2_MEMORY_MMAP;
  391. if (ioctl(p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  392. ERROR_RETURN("Failed to query buff: %s (%d)",
  393. strerror(errno), errno);
  394. p->g_buff[index].size = buf.length;
  395. p->g_buff[index].start = (unsigned char *)
  396. mmap (NULL /* start anywhere */,
  397. buf.length,
  398. PROT_READ | PROT_WRITE /* required */,
  399. MAP_SHARED /* recommended */,
  400. p->cam_fd, buf.m.offset);
  401. if (MAP_FAILED == p->g_buff[index].start)
  402. ERROR_RETURN("Failed to map buffers");
  403. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  404. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  405. strerror(errno), errno);
  406. }
  407. }
  408. return true;
  409. }
  410. void GSMLCapturer::close_cam()
  411. {
  412. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  413. //for(int32_t i=0;i<count;i++)
  414. {
  415. context_t * p=_op->_ctx0;
  416. if(p->g_buff!=nullptr)
  417. {
  418. free(p->g_buff);
  419. p->g_buff = nullptr;
  420. }
  421. if(p->cam_fd>0)
  422. close(p->cam_fd);
  423. }
  424. free(_op->_ctx0);
  425. }
  426. void GSMLCapturer::Stop()
  427. {
  428. _run=false;
  429. //if(_thread.joinable())
  430. _thread.join();
  431. }