gsml_capturer.cpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595
  1. #include "pch.h"
  2. #include "../common/comm.h"
  3. #include "./include/api.h"
  4. #include "lock.h"
  5. #include <iostream>
  6. #include <linux/videodev2.h>
  7. #include <poll.h>
  8. #include <sys/ioctl.h>
  9. #include <sys/stat.h>
  10. #include <sys/mman.h>
  11. #include <fcntl.h>
  12. #include <errno.h>
  13. #include <string>
  14. #include "capture_op.h"
  15. #include "gsml_capturer.h"
  16. rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> OpenGSMLCapture(CaptureOp* op,int32_t GSML)
  17. {
  18. auto video_capture=std::make_unique<GSMLCapturer>(op);
  19. video_capture->Start(GSML);
  20. rtc::scoped_refptr<GSMLTrackSource> video_source = GSMLTrackSource::Create(std::move(video_capture));
  21. return video_source;
  22. }
  23. GSMLCapturer::GSMLCapturer(CaptureOp* lhs):_op(lhs)
  24. {
  25. }
  26. void GSMLCapturer::Start(int32_t GSML)
  27. {
  28. count = 0;
  29. CameraRecord = GSML;
  30. _thread = std::thread(std::bind(&GSMLCapturer::Run, this));
  31. }
  32. bool GSMLCapturer::open_cam()
  33. {
  34. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  35. _op->_ctx0=(context_t *)malloc(sizeof(context_t));
  36. //for(int i=0;i<count;i++)
  37. {
  38. context_t * p=_op->_ctx0;
  39. _op->_ctx0->m_Gsml_Info.m_device = 10;
  40. _op->_ctx0->m_Gsml_Info.m_record = 0;
  41. _op->_ctx0->m_Gsml_Info.m_time = 0;
  42. p->cam_fd=-1;
  43. p->cam_pixfmt = V4L2_PIX_FMT_YUYV;
  44. //p->cam_w = 1280;
  45. //p->cam_h = 720;
  46. p->cam_w = 1920;
  47. p->cam_h = 1080;
  48. p->g_buff = NULL;
  49. p->capture_dmabuf = false; // opencv display v4l2 can't be true
  50. p->fps = 30;
  51. p->enable_verbose = false;
  52. std::string devname="/dev/video" + std::to_string(_op->GetIndex());
  53. // std::cout<<"设备:"<<devname<<std::endl;
  54. //ctx.cam_devname=devname+std::to_string();
  55. p->cam_fd = open(devname.c_str(), O_RDWR | O_NONBLOCK);
  56. if( p->cam_fd==-1)
  57. {
  58. ERROR_RETURN("Failed to open camera device %s: %s (%d)",
  59. devname.c_str(), strerror(errno), errno);
  60. }
  61. /*
  62. struct v4l2_fmtdesc fmtdesc;
  63. fmtdesc.index = 0;
  64. fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  65. while (0 == ioctl(p->cam_fd, VIDIOC_ENUM_FMT, &fmtdesc)) {
  66. printf("fmt: %s <0x%x>\n", fmtdesc.description, fmtdesc.pixelformat);
  67. fmtdesc.index++;
  68. }*/
  69. /*
  70. struct v4l2_selection crop;
  71. crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  72. crop.target = V4L2_SEL_TGT_CROP_BOUNDS;
  73. ioctl(p->cam_fd, VIDIOC_G_SELECTION, &crop);
  74. crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  75. crop.target = V4L2_SEL_TGT_CROP;
  76. crop.r.left = 0;
  77. crop.r.top = 0;
  78. crop.r.width = p->cam_w;
  79. crop.r.height = p->cam_h;
  80. ioctl(p->cam_fd, VIDIOC_S_SELECTION, &crop);
  81. crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  82. crop.target = V4L2_SEL_TGT_COMPOSE;
  83. crop.r.left = 0;
  84. crop.r.top = 0;
  85. crop.r.width = 1280;
  86. crop.r.height = 720;*/
  87. struct v4l2_format fmt;
  88. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  89. fmt.fmt.pix.width = p->cam_w;
  90. fmt.fmt.pix.height = p->cam_h;
  91. fmt.fmt.pix.pixelformat = p->cam_pixfmt;
  92. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  93. if (ioctl( p->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
  94. ERROR_RETURN("Failed to set camera output format: %s (%d)",
  95. strerror(errno), errno);
  96. /* Get the real format in case the desired is not supported */
  97. memset(&fmt, 0, sizeof fmt);
  98. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  99. if (ioctl( p->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
  100. ERROR_RETURN("Failed to get camera output format: %s (%d)",
  101. strerror(errno), errno);
  102. if (fmt.fmt.pix.width != p->cam_w ||
  103. fmt.fmt.pix.height != p->cam_h ||
  104. fmt.fmt.pix.pixelformat != p->cam_pixfmt)
  105. {
  106. WARN("The desired format is not supported");
  107. p->cam_w = fmt.fmt.pix.width;
  108. p->cam_h = fmt.fmt.pix.height;
  109. p->cam_pixfmt =fmt.fmt.pix.pixelformat;
  110. }
  111. struct v4l2_streamparm streamparm;
  112. memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
  113. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  114. /*
  115. streamparm.parm.capture.timeperframe.numerator = 1;
  116. streamparm.parm.capture.timeperframe.denominator = 3;
  117. streamparm.parm.output.timeperframe.numerator = 1;
  118. streamparm.parm.output.timeperframe.denominator = 3;
  119. */
  120. ioctl ( p->cam_fd, VIDIOC_G_PARM, &streamparm);
  121. printf(">>: Frame rate: %u/%u\n",streamparm.parm.capture.timeperframe.numerator,streamparm.parm.capture.timeperframe.denominator);
  122. INFO("Camera ouput format: (%d x %d) stride: %d, imagesize: %d, frate: %u / %u",
  123. fmt.fmt.pix.width,
  124. fmt.fmt.pix.height,
  125. fmt.fmt.pix.bytesperline,
  126. fmt.fmt.pix.sizeimage,
  127. streamparm.parm.capture.timeperframe.denominator,
  128. streamparm.parm.capture.timeperframe.numerator);
  129. }
  130. return true;
  131. }
  132. void GSMLCapturer::Run()
  133. {
  134. if(!open_cam()) return;
  135. prepare_buffer();
  136. start_streams();
  137. _run=true;
  138. struct pollfd fds[1];
  139. struct v4l2_buffer v4l2_buf;
  140. while(_run)
  141. {
  142. int cam_fd=-1;
  143. context_t * p=nullptr;
  144. //if((_op->GetType()!=RenderPosition::FRONT&&_op->GetType()!=RenderPosition::BACK)||_op->IsForward())
  145. if(_op->IsForward())
  146. {
  147. cam_fd=_op->_ctx0->cam_fd;
  148. p=_op->_ctx0;
  149. }
  150. else{
  151. cam_fd=_op->_ctx1->cam_fd;
  152. p=_op->_ctx1;
  153. }
  154. //assert(p!=nullptr);
  155. /*
  156. else
  157. {
  158. if(_op->IsForward())
  159. {
  160. cam_fd=_ctx[0].cam_fd;
  161. p=&_ctx[0];
  162. }
  163. else
  164. {
  165. cam_fd=_ctx[1].cam_fd;
  166. p=&_ctx[1];
  167. }
  168. }
  169. */
  170. /*
  171. fd_set fds;
  172. struct timeval timeout = {0,0};
  173. timeout.tv_sec = 1;
  174. FD_ZERO(&fds);
  175. FD_SET(cam_fd, &fds);
  176. int err = select(cam_fd + 1, &fds, NULL, NULL, &timeout);
  177. if (err != -1 && FD_ISSET(cam_fd, &fds)) */
  178. fds[0].fd = cam_fd;
  179. fds[0].events = POLLIN;
  180. if(poll(&fds[0], 1, 3000) > 0)
  181. {
  182. if (fds[0].revents & POLLIN)
  183. {
  184. /* Dequeue a camera buff */
  185. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  186. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  187. if (p->capture_dmabuf)
  188. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  189. else
  190. v4l2_buf.memory = V4L2_MEMORY_MMAP;
  191. if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  192. printf("Failed to dequeue camera buff: %s (%d)\r\n",strerror(errno), errno);
  193. //_ctx.frame++;
  194. //rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(1280,720);
  195. rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h);
  196. /*
  197. const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0,
  198. buffer->MutableDataY(), buffer->StrideY(),
  199. buffer->MutableDataU(), buffer->StrideU(),
  200. buffer->MutableDataV(), buffer->StrideV(),
  201. 0, 0,
  202. p->cam_w,p->cam_h,
  203. 1280, 720,
  204. libyuv::kRotate0, libyuv::FOURCC_YUYV);
  205. */
  206. if(!CameraRecord)
  207. {
  208. rtc::scoped_refptr<webrtc::I420Buffer> buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h);
  209. const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0,
  210. buffer->MutableDataY(), buffer->StrideY(),
  211. buffer->MutableDataU(), buffer->StrideU(),
  212. buffer->MutableDataV(), buffer->StrideV(),
  213. 0, 0,
  214. p->cam_w,p->cam_h,
  215. buffer->width(), buffer->height(),
  216. libyuv::kRotate0, libyuv::FOURCC_YUYV);
  217. int width = p->cam_w;
  218. int height = p->cam_h;
  219. if (conversionResult >= 0)
  220. {
  221. webrtc::VideoFrame videoFrame(buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeNanos());
  222. if ((p->cam_w == 0) && (p->cam_h == 0)) {
  223. _broadcaster.OnFrame(videoFrame);
  224. }
  225. else
  226. {
  227. if (height == 0) {
  228. height = (videoFrame.height() * width) / videoFrame.width();
  229. }
  230. else if (width == 0) {
  231. width = (videoFrame.width() * height) / videoFrame.height();
  232. }
  233. width = 1080;
  234. height = 640;
  235. int stride_y = width;
  236. int stride_uv = (width + 1) / 2;
  237. rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv);
  238. scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420());
  239. webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeNanos());
  240. count++;
  241. //if(count == 2)
  242. {
  243. _broadcaster.OnFrame(frame);
  244. count = 0;
  245. }
  246. //if(_op->GetType()!=RenderPosition::FRONT)
  247. }
  248. }
  249. }
  250. else
  251. {
  252. //if(_op->GetType()==RenderPosition::DASHBOARD || _op->GetType()==RenderPosition::DASHBOARD_m)
  253. if((_op->GetType() == _op->_ctx0->m_Gsml_Info.m_device) && _op->_ctx0->m_Gsml_Info.m_record && !count)
  254. {
  255. printf("---%ld----%d---%d---\r\n",_op->_ctx0->m_Gsml_Info.m_time,_op->_ctx0->m_Gsml_Info.m_record,_op->_ctx0->m_Gsml_Info.m_device);
  256. time_t rawtime;
  257. struct tm* info;
  258. time(&rawtime);
  259. info = localtime(&rawtime);
  260. uint8_t secret[64] = { 0 };
  261. sprintf((char*)secret, "log//%d-%.2d-%.2d_%.2d-%.2d-%.2d--%d.yuv", info->tm_year + 1900, info->tm_mon + 1, info->tm_mday,info->tm_hour,info->tm_min,info->tm_sec,
  262. _op->_ctx0->m_Gsml_Info.m_device);
  263. printf("%s\r\n",secret);
  264. File_fd = open((const char *)secret,O_WRONLY | O_CREAT | O_NOCTTY | O_TRUNC);
  265. write(File_fd,p->g_buff[v4l2_buf.index].start,v4l2_buf.length);
  266. if(_op->_ctx0->m_Gsml_Info.m_time)//time
  267. {
  268. _curStopTick = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  269. count = 1;
  270. }
  271. else//notime
  272. {
  273. _op->_ctx0->m_Gsml_Info.m_record = 0;
  274. close(File_fd);
  275. }
  276. }
  277. else if((_op->GetType() == _op->_ctx0->m_Gsml_Info.m_device) && _op->_ctx0->m_Gsml_Info.m_record && count)
  278. {
  279. long long _dstStopTick = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
  280. if(_dstStopTick - _curStopTick >= 2000)
  281. {
  282. _op->_ctx0->m_Gsml_Info.m_record = 0;
  283. count = 0;
  284. close(File_fd);
  285. }
  286. else
  287. {
  288. write(File_fd,p->g_buff[v4l2_buf.index].start,v4l2_buf.length);
  289. }
  290. }
  291. else if((_op->GetType() == _op->_ctx0->m_Gsml_Info.m_device) && !_op->_ctx0->m_Gsml_Info.m_record && count)
  292. {
  293. write(File_fd,p->g_buff[v4l2_buf.index].start,v4l2_buf.length);
  294. _op->_ctx0->m_Gsml_Info.m_record = 0;
  295. count = 0;
  296. close(File_fd);
  297. }
  298. }
  299. if (ioctl(p->cam_fd, VIDIOC_QBUF, &v4l2_buf))
  300. printf("Failed to queue camera buffers: %s (%d)\r\n",strerror(errno), errno);
  301. }
  302. }
  303. if(!CameraRecord)
  304. std::this_thread::sleep_for(std::chrono::milliseconds(30));
  305. }
  306. stop_streams();
  307. close_cam();
  308. }
  309. void GSMLCapturer::Destroy() {
  310. Stop();
  311. }
  312. void GSMLCapturer::AddOrUpdateSink(
  313. rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
  314. const rtc::VideoSinkWants& wants) {
  315. _broadcaster.AddOrUpdateSink(sink, wants);
  316. }
  317. void GSMLCapturer::RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
  318. {
  319. _broadcaster.RemoveSink(sink);
  320. }
  321. bool GSMLCapturer::prepare_buffer()
  322. {
  323. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  324. //for(int32_t i=0;i<count;i++)
  325. {
  326. context_t * p=_op->_ctx0;
  327. p->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  328. if ( p->g_buff == NULL)
  329. ERROR_RETURN("Failed to allocate global buffer context");
  330. if ( p->capture_dmabuf) {
  331. if (!request_camera_buff(p))
  332. ERROR_RETURN("Failed to set up camera buff");
  333. } else {
  334. if (!request_camera_buff_mmap(p))
  335. ERROR_RETURN("Failed to set up camera buff");
  336. }
  337. INFO("Succeed in preparing stream buffers");
  338. }
  339. return true;
  340. }
  341. bool GSMLCapturer::request_camera_buff( context_t * p)
  342. {
  343. // for(int32_t i=0;i<count;i++)
  344. {
  345. // context_t * p=&_ctx[i];
  346. struct v4l2_requestbuffers rb;
  347. memset(&rb, 0, sizeof(rb));
  348. rb.count = V4L2_BUFFERS_NUM;
  349. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  350. rb.memory = V4L2_MEMORY_DMABUF;
  351. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  352. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  353. strerror(errno), errno);
  354. if (rb.count != V4L2_BUFFERS_NUM)
  355. ERROR_RETURN("V4l2 buffer number is not as desired");
  356. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  357. {
  358. struct v4l2_buffer buf;
  359. /* Query camera v4l2 buf length */
  360. memset(&buf, 0, sizeof buf);
  361. buf.index = index;
  362. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  363. buf.memory = V4L2_MEMORY_DMABUF;
  364. if (ioctl( p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  365. ERROR_RETURN("Failed to query buff: %s (%d)",
  366. strerror(errno), errno);
  367. /* TODO: add support for multi-planer
  368. Enqueue empty v4l2 buff into camera capture plane */
  369. buf.m.fd = (unsigned long) p->g_buff[index].dmabuff_fd;
  370. if (buf.length != p->g_buff[index].size)
  371. {
  372. WARN("Camera v4l2 buf length is not expected");
  373. p->g_buff[index].size = buf.length;
  374. }
  375. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  376. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  377. strerror(errno), errno);
  378. }
  379. }
  380. return true;
  381. }
  382. bool GSMLCapturer::stop_streams()
  383. {
  384. enum v4l2_buf_type type;
  385. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  386. //for(int32_t i=0;i<count;i++)
  387. {
  388. context_t * p=_op->_ctx0;
  389. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  390. {
  391. if(munmap(p->g_buff[index].start,p->g_buff[index].size)==-1)
  392. {
  393. ERROR_RETURN("munmap failed: %s (%d)", strerror(errno), errno);
  394. }
  395. }
  396. /* Stop v4l2 streaming */
  397. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  398. if (ioctl( p->cam_fd, VIDIOC_STREAMOFF, &type))
  399. ERROR_RETURN("Failed to stop streaming: %s (%d)",
  400. strerror(errno), errno);
  401. INFO("Camera video streaming off ...");
  402. }
  403. return true;
  404. }
  405. bool GSMLCapturer::start_streams()
  406. {
  407. enum v4l2_buf_type type;
  408. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  409. // for(int32_t i=0;i<count;i++)
  410. {
  411. context_t * p=_op->_ctx0;
  412. /* Start v4l2 streaming */
  413. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  414. if (ioctl( p->cam_fd, VIDIOC_STREAMON, &type) < 0)
  415. ERROR_RETURN("Failed to start streaming: %s (%d)",
  416. strerror(errno), errno);
  417. usleep(200);
  418. INFO("Camera video streaming on ...");
  419. }
  420. return true;
  421. }
  422. bool GSMLCapturer::request_camera_buff_mmap(context_t * p)
  423. {
  424. struct v4l2_requestbuffers rb;
  425. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  426. // for(int32_t i=0;i<count;i++)
  427. {
  428. memset(&rb, 0, sizeof(rb));
  429. rb.count = V4L2_BUFFERS_NUM;
  430. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  431. rb.memory = V4L2_MEMORY_MMAP;
  432. if (ioctl( p->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  433. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  434. strerror(errno), errno);
  435. if (rb.count != V4L2_BUFFERS_NUM)
  436. ERROR_RETURN("V4l2 buffer number is not as desired");
  437. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  438. {
  439. struct v4l2_buffer buf;
  440. /* Query camera v4l2 buf length */
  441. memset(&buf, 0, sizeof buf);
  442. buf.index = index;
  443. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  444. buf.memory = V4L2_MEMORY_MMAP;
  445. if (ioctl(p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  446. ERROR_RETURN("Failed to query buff: %s (%d)",
  447. strerror(errno), errno);
  448. p->g_buff[index].size = buf.length;
  449. p->g_buff[index].start = (unsigned char *)
  450. mmap (NULL /* start anywhere */,
  451. buf.length,
  452. PROT_READ | PROT_WRITE /* required */,
  453. MAP_SHARED /* recommended */,
  454. p->cam_fd, buf.m.offset);
  455. if (MAP_FAILED == p->g_buff[index].start)
  456. ERROR_RETURN("Failed to map buffers");
  457. if (ioctl( p->cam_fd, VIDIOC_QBUF, &buf) < 0)
  458. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  459. strerror(errno), errno);
  460. }
  461. }
  462. return true;
  463. }
  464. void GSMLCapturer::close_cam()
  465. {
  466. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  467. //for(int32_t i=0;i<count;i++)
  468. {
  469. context_t * p=_op->_ctx0;
  470. if(p->g_buff!=nullptr)
  471. {
  472. free(p->g_buff);
  473. p->g_buff = nullptr;
  474. }
  475. if(p->cam_fd>0)
  476. close(p->cam_fd);
  477. }
  478. free(_op->_ctx0);
  479. }
  480. void GSMLCapturer::Stop()
  481. {
  482. count = 0;
  483. CameraRecord = 0;
  484. _op->_ctx0->m_Gsml_Info.m_device = 10;
  485. _op->_ctx0->m_Gsml_Info.m_record = 0;
  486. _op->_ctx0->m_Gsml_Info.m_time = 0;
  487. _run=false;
  488. //if(_thread.joinable())
  489. _thread.join();
  490. }