camera_v4l2_cuda.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762
  1. /*
  2. * Copyright (c) 2016-2018, NVIDIA CORPORATION. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. *
  8. * * Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. * * Redistributions in binary form must reproduce the above copyright
  11. * notice, this list of conditions and the following disclaimer in the
  12. * documentation and/or other materials provided with the distribution.
  13. * * Neither the name of NVIDIA CORPORATION nor the names of its
  14. * contributors may be used to endorse or promote products derived
  15. * from this software without specific prior written permission.
  16. *
  17. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
  18. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  19. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  20. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
  21. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  22. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  23. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  24. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  25. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  26. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  27. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  28. */
  29. #include <stdio.h>
  30. #include <unistd.h>
  31. #include <sys/ioctl.h>
  32. #include <sys/stat.h>
  33. #include <sys/mman.h>
  34. #include <fcntl.h>
  35. #include <errno.h>
  36. #include <stdlib.h>
  37. #include <signal.h>
  38. #include <poll.h>
  39. #include "NvEglRenderer.h"
  40. #include "NvUtils.h"
  41. #include "NvCudaProc.h"
  42. #include "camera_v4l2_cuda.h"
  43. #define MJPEG_EOS_SEARCH_SIZE 4096
  44. static bool quit = false;
  45. using namespace std;
  46. static void
  47. print_usage(void) {
  48. printf("\n\tUsage: cameras_egl_demo [OPTIONS]\n\n"
  49. "\tExample: \n"
  50. "\t./cameras_egl_demo -d /dev/video0 -s 1280x720\n\n"
  51. "\tSupported options:\n"
  52. "\t-d\t\tSet V4l2 video device node\n"
  53. "\t-s\t\tSet output resolution of video device\n"
  54. "\t-f\t\tSet output pixel format of video device (supports only YUYV/YVYU/UYVY/VYUY/GREY/MJPEG)\n"
  55. "\t-r\t\tSet renderer frame rate (30 fps by default)\n"
  56. "\t-n\t\tSave the n-th frame before VIC processing\n"
  57. "\t-c\t\tEnable CUDA aglorithm (draw a black box in the upper left corner)\n"
  58. "\t-v\t\tEnable verbose message\n"
  59. "\t-h\t\tPrint this usage\n\n"
  60. "\tNOTE: It runs infinitely until you terminate it with <ctrl+c>\n");
  61. }
  62. static bool
  63. parse_cmdline(context_t * ctx, int argc, char **argv)
  64. {
  65. int c;
  66. if (argc < 2)
  67. {
  68. print_usage();
  69. exit(EXIT_SUCCESS);
  70. }
  71. while ((c = getopt(argc, argv, "d:s:f:r:n:cvh")) != -1)
  72. {
  73. switch (c)
  74. {
  75. case 'd':
  76. ctx->cam_devname = optarg;
  77. break;
  78. case 's':
  79. if (sscanf(optarg, "%dx%d",
  80. &ctx->cam_w, &ctx->cam_h) != 2)
  81. {
  82. print_usage();
  83. return false;
  84. }
  85. break;
  86. case 'f':
  87. if (strcmp(optarg, "YUYV") == 0)
  88. ctx->cam_pixfmt = V4L2_PIX_FMT_YUYV;
  89. else if (strcmp(optarg, "YVYU") == 0)
  90. ctx->cam_pixfmt = V4L2_PIX_FMT_YVYU;
  91. else if (strcmp(optarg, "VYUY") == 0)
  92. ctx->cam_pixfmt = V4L2_PIX_FMT_VYUY;
  93. else if (strcmp(optarg, "UYVY") == 0)
  94. ctx->cam_pixfmt = V4L2_PIX_FMT_UYVY;
  95. else if (strcmp(optarg, "GREY") == 0)
  96. ctx->cam_pixfmt = V4L2_PIX_FMT_GREY;
  97. else if (strcmp(optarg, "MJPEG") == 0)
  98. ctx->cam_pixfmt = V4L2_PIX_FMT_MJPEG;
  99. else
  100. {
  101. print_usage();
  102. return false;
  103. }
  104. sprintf(ctx->cam_file, "camera.%s", optarg);
  105. break;
  106. case 'r':
  107. ctx->fps = strtol(optarg, NULL, 10);
  108. break;
  109. case 'n':
  110. ctx->save_n_frame = strtol(optarg, NULL, 10);
  111. break;
  112. case 'c':
  113. ctx->enable_cuda = true;
  114. break;
  115. case 'v':
  116. ctx->enable_verbose = true;
  117. break;
  118. case 'h':
  119. print_usage();
  120. exit(EXIT_SUCCESS);
  121. break;
  122. default:
  123. print_usage();
  124. return false;
  125. }
  126. }
  127. return true;
  128. }
  129. static void
  130. set_defaults(context_t * ctx)
  131. {
  132. memset(ctx, 0, sizeof(context_t));
  133. ctx->cam_devname = "/dev/video0";
  134. ctx->cam_fd = -1;
  135. ctx->cam_pixfmt = V4L2_PIX_FMT_UYVY;
  136. ctx->cam_w = 1280;
  137. ctx->cam_h = 720;
  138. ctx->frame = 0;
  139. ctx->save_n_frame = 0;
  140. ctx->g_buff = NULL;
  141. ctx->capture_dmabuf = true;
  142. ctx->renderer = NULL;
  143. ctx->fps = 30;
  144. ctx->enable_cuda = false;
  145. ctx->egl_image = NULL;
  146. ctx->egl_display = EGL_NO_DISPLAY;
  147. ctx->enable_verbose = false;
  148. }
  149. static nv_color_fmt nvcolor_fmt[] =
  150. {
  151. /* TODO: add more pixel format mapping */
  152. {V4L2_PIX_FMT_UYVY, NVBUF_COLOR_FORMAT_UYVY},
  153. {V4L2_PIX_FMT_VYUY, NVBUF_COLOR_FORMAT_VYUY},
  154. {V4L2_PIX_FMT_YUYV, NVBUF_COLOR_FORMAT_YUYV},
  155. {V4L2_PIX_FMT_YVYU, NVBUF_COLOR_FORMAT_YVYU},
  156. {V4L2_PIX_FMT_GREY, NVBUF_COLOR_FORMAT_GRAY8},
  157. {V4L2_PIX_FMT_YUV420M, NVBUF_COLOR_FORMAT_YUV420},
  158. };
  159. static NvBufSurfaceColorFormat
  160. get_nvbuff_color_fmt(unsigned int v4l2_pixfmt)
  161. {
  162. unsigned i;
  163. for (i = 0; i < sizeof(nvcolor_fmt) / sizeof(nvcolor_fmt[0]); i++)
  164. {
  165. if (v4l2_pixfmt == nvcolor_fmt[i].v4l2_pixfmt)
  166. return nvcolor_fmt[i].nvbuff_color;
  167. }
  168. return NVBUF_COLOR_FORMAT_INVALID;
  169. }
  170. static bool
  171. save_frame_to_file(context_t * ctx, struct v4l2_buffer * buf)
  172. {
  173. int file;
  174. file = open(ctx->cam_file, O_CREAT | O_WRONLY | O_APPEND | O_TRUNC,
  175. S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH);
  176. if (-1 == file)
  177. ERROR_RETURN("Failed to open file for frame saving");
  178. if (-1 == write(file, ctx->g_buff[buf->index].start,
  179. ctx->g_buff[buf->index].size))
  180. {
  181. close(file);
  182. ERROR_RETURN("Failed to write frame into file");
  183. }
  184. close(file);
  185. return true;
  186. }
  187. static bool
  188. nvbuff_do_clearchroma (int dmabuf_fd)
  189. {
  190. int ret = 0;
  191. unsigned i;
  192. NvBufSurface *pSurf = NULL;
  193. if (-1 == NvBufSurfaceFromFd(dmabuf_fd, (void**)(&pSurf)))
  194. ERROR_RETURN("%s: NvBufSurfaceFromFd Failed \n", __func__);
  195. for (i = 1; i < pSurf->surfaceList[0].planeParams.num_planes; i++) {
  196. ret = NvBufSurfaceMemSet(pSurf, 0, i, 0x80);
  197. if (ret != 0)
  198. ERROR_RETURN("%s: NvBufSurfaceMemSet Failed \n", __func__);
  199. }
  200. return true;
  201. }
  202. static bool
  203. camera_initialize(context_t * ctx)
  204. {
  205. struct v4l2_format fmt;
  206. /* Open camera device */
  207. ctx->cam_fd = open(ctx->cam_devname, O_RDWR);
  208. if (ctx->cam_fd == -1)
  209. ERROR_RETURN("Failed to open camera device %s: %s (%d)",
  210. ctx->cam_devname, strerror(errno), errno);
  211. /* Set camera output format */
  212. memset(&fmt, 0, sizeof(fmt));
  213. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  214. fmt.fmt.pix.width = ctx->cam_w;
  215. fmt.fmt.pix.height = ctx->cam_h;
  216. fmt.fmt.pix.pixelformat = ctx->cam_pixfmt;
  217. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  218. if (ioctl(ctx->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
  219. ERROR_RETURN("Failed to set camera output format: %s (%d)",
  220. strerror(errno), errno);
  221. /* Get the real format in case the desired is not supported */
  222. memset(&fmt, 0, sizeof fmt);
  223. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  224. if (ioctl(ctx->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
  225. ERROR_RETURN("Failed to get camera output format: %s (%d)",
  226. strerror(errno), errno);
  227. if (fmt.fmt.pix.width != ctx->cam_w ||
  228. fmt.fmt.pix.height != ctx->cam_h ||
  229. fmt.fmt.pix.pixelformat != ctx->cam_pixfmt)
  230. {
  231. WARN("The desired format is not supported");
  232. ctx->cam_w = fmt.fmt.pix.width;
  233. ctx->cam_h = fmt.fmt.pix.height;
  234. ctx->cam_pixfmt =fmt.fmt.pix.pixelformat;
  235. }
  236. struct v4l2_streamparm streamparm;
  237. memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
  238. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  239. ioctl (ctx->cam_fd, VIDIOC_G_PARM, &streamparm);
  240. INFO("Camera ouput format: (%d x %d) stride: %d, imagesize: %d, frate: %u / %u",
  241. fmt.fmt.pix.width,
  242. fmt.fmt.pix.height,
  243. fmt.fmt.pix.bytesperline,
  244. fmt.fmt.pix.sizeimage,
  245. streamparm.parm.capture.timeperframe.denominator,
  246. streamparm.parm.capture.timeperframe.numerator);
  247. return true;
  248. }
  249. static bool
  250. display_initialize(context_t * ctx)
  251. {
  252. /* Create EGL renderer */
  253. ctx->renderer = NvEglRenderer::createEglRenderer("renderer0",
  254. ctx->cam_w / 4, ctx->cam_h / 4, (1920 - ctx->cam_w / 4) / 2, (1080 - ctx->cam_h / 4) / 2);
  255. if (!ctx->renderer)
  256. ERROR_RETURN("Failed to create EGL renderer");
  257. ctx->renderer->setFPS(ctx->fps);
  258. if (ctx->enable_cuda)
  259. {
  260. /* Get defalut EGL display */
  261. ctx->egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
  262. if (ctx->egl_display == EGL_NO_DISPLAY)
  263. ERROR_RETURN("Failed to get EGL display connection");
  264. /* Init EGL display connection */
  265. if (!eglInitialize(ctx->egl_display, NULL, NULL))
  266. ERROR_RETURN("Failed to initialize EGL display connection");
  267. }
  268. return true;
  269. }
  270. static bool
  271. init_components(context_t * ctx)
  272. {
  273. if (!camera_initialize(ctx))
  274. ERROR_RETURN("Failed to initialize camera device");
  275. if (!display_initialize(ctx))
  276. ERROR_RETURN("Failed to initialize display");
  277. INFO("Initialize v4l2 components successfully");
  278. return true;
  279. }
  280. static bool
  281. request_camera_buff(context_t *ctx)
  282. {
  283. /* Request camera v4l2 buffer */
  284. struct v4l2_requestbuffers rb;
  285. memset(&rb, 0, sizeof(rb));
  286. rb.count = V4L2_BUFFERS_NUM;
  287. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  288. rb.memory = V4L2_MEMORY_DMABUF;
  289. if (ioctl(ctx->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  290. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  291. strerror(errno), errno);
  292. if (rb.count != V4L2_BUFFERS_NUM)
  293. ERROR_RETURN("V4l2 buffer number is not as desired");
  294. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  295. {
  296. struct v4l2_buffer buf;
  297. // Query camera v4l2 buf length
  298. memset(&buf, 0, sizeof buf);
  299. buf.index = index;
  300. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  301. buf.memory = V4L2_MEMORY_DMABUF;
  302. if (ioctl(ctx->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  303. ERROR_RETURN("Failed to query buff: %s (%d)",
  304. strerror(errno), errno);
  305. // TODO add support for multi-planer
  306. // Enqueue empty v4l2 buff into camera capture plane
  307. buf.m.fd = (unsigned long) ctx->g_buff[index].dmabuff_fd;
  308. if (buf.length != ctx->g_buff[index].size)
  309. {
  310. WARN("Camera v4l2 buf length is not expected");
  311. ctx->g_buff[index].size = buf.length;
  312. }
  313. if (ioctl(ctx->cam_fd, VIDIOC_QBUF, &buf) < 0)
  314. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  315. strerror(errno), errno);
  316. }
  317. return true;
  318. }
  319. static bool
  320. request_camera_buff_mmap(context_t *ctx)
  321. {
  322. /* Request camera v4l2 buffer */
  323. struct v4l2_requestbuffers rb;
  324. memset(&rb, 0, sizeof(rb));
  325. rb.count = V4L2_BUFFERS_NUM;
  326. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  327. rb.memory = V4L2_MEMORY_MMAP;
  328. if (ioctl(ctx->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  329. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  330. strerror(errno), errno);
  331. if (rb.count != V4L2_BUFFERS_NUM)
  332. ERROR_RETURN("V4l2 buffer number is not as desired");
  333. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  334. {
  335. struct v4l2_buffer buf;
  336. // Query camera v4l2 buf length
  337. memset(&buf, 0, sizeof buf);
  338. buf.index = index;
  339. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  340. buf.memory = V4L2_MEMORY_MMAP;
  341. if (ioctl(ctx->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  342. ERROR_RETURN("Failed to query buff: %s (%d)",
  343. strerror(errno), errno);
  344. ctx->g_buff[index].size = buf.length;
  345. ctx->g_buff[index].start = (unsigned char *)
  346. mmap(NULL /* start anywhere */,
  347. buf.length,
  348. PROT_READ | PROT_WRITE /* required */,
  349. MAP_SHARED /* recommended */,
  350. ctx->cam_fd, buf.m.offset);
  351. if (MAP_FAILED == ctx->g_buff[index].start)
  352. ERROR_RETURN("Failed to map buffers");
  353. if (ioctl(ctx->cam_fd, VIDIOC_QBUF, &buf) < 0)
  354. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  355. strerror(errno), errno);
  356. }
  357. return true;
  358. }
  359. static bool
  360. prepare_buffers_mjpeg(context_t * ctx)
  361. {
  362. NvBufSurf::NvCommonAllocateParams params = {0};
  363. /* Allocate global buffer context */
  364. ctx->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  365. if (ctx->g_buff == NULL)
  366. ERROR_RETURN("Failed to allocate global buffer context");
  367. memset(ctx->g_buff, 0, V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  368. params.memType = NVBUF_MEM_SURFACE_ARRAY;
  369. params.width = ctx->cam_w;
  370. params.height = ctx->cam_h;
  371. params.layout = NVBUF_LAYOUT_PITCH;
  372. params.colorFormat = get_nvbuff_color_fmt(V4L2_PIX_FMT_YUV420M);
  373. params.memtag = NvBufSurfaceTag_NONE;
  374. /* Create Render buffer */
  375. if (NvBufSurf::NvAllocate(&params, 1, &ctx->render_dmabuf_fd))
  376. ERROR_RETURN("Failed to create NvBuffer");
  377. ctx->capture_dmabuf = false;
  378. if (!request_camera_buff_mmap(ctx))
  379. ERROR_RETURN("Failed to set up camera buff");
  380. INFO("Succeed in preparing mjpeg buffers");
  381. return true;
  382. }
  383. static bool
  384. prepare_buffers(context_t * ctx)
  385. {
  386. NvBufSurf::NvCommonAllocateParams camparams = {0};
  387. int fd[V4L2_BUFFERS_NUM] = {0};
  388. /* Allocate global buffer context */
  389. ctx->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  390. if (ctx->g_buff == NULL)
  391. ERROR_RETURN("Failed to allocate global buffer context");
  392. camparams.memType = NVBUF_MEM_SURFACE_ARRAY;
  393. camparams.width = ctx->cam_w;
  394. camparams.height = ctx->cam_h;
  395. camparams.layout = NVBUF_LAYOUT_PITCH;
  396. camparams.colorFormat = get_nvbuff_color_fmt(ctx->cam_pixfmt);
  397. camparams.memtag = NvBufSurfaceTag_CAMERA;
  398. if (NvBufSurf::NvAllocate(&camparams, V4L2_BUFFERS_NUM, fd))
  399. ERROR_RETURN("Failed to create NvBuffer");
  400. /* Create buffer and provide it with camera */
  401. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  402. {
  403. NvBufSurface *pSurf = NULL;
  404. ctx->g_buff[index].dmabuff_fd = fd[index];
  405. if (-1 == NvBufSurfaceFromFd(fd[index], (void**)(&pSurf)))
  406. ERROR_RETURN("Failed to get NvBuffer parameters");
  407. if (ctx->cam_pixfmt == V4L2_PIX_FMT_GREY &&
  408. pSurf->surfaceList[0].pitch != pSurf->surfaceList[0].width)
  409. ctx->capture_dmabuf = false;
  410. /* TODO: add multi-planar support
  411. Currently only supports YUV422 interlaced single-planar */
  412. if (ctx->capture_dmabuf) {
  413. if (-1 == NvBufSurfaceMap (pSurf, 0, 0, NVBUF_MAP_READ_WRITE))
  414. ERROR_RETURN("Failed to map buffer");
  415. ctx->g_buff[index].start = (unsigned char *)pSurf->surfaceList[0].mappedAddr.addr[0];
  416. ctx->g_buff[index].size = pSurf->surfaceList[0].dataSize;
  417. }
  418. }
  419. camparams.colorFormat = get_nvbuff_color_fmt(V4L2_PIX_FMT_YUV420M);
  420. camparams.memtag = NvBufSurfaceTag_NONE;
  421. /* Create Render buffer */
  422. if (NvBufSurf::NvAllocate(&camparams, 1, &ctx->render_dmabuf_fd))
  423. ERROR_RETURN("Failed to create NvBuffer");
  424. if (ctx->capture_dmabuf) {
  425. if (!request_camera_buff(ctx))
  426. ERROR_RETURN("Failed to set up camera buff");
  427. } else {
  428. if (!request_camera_buff_mmap(ctx))
  429. ERROR_RETURN("Failed to set up camera buff");
  430. }
  431. INFO("Succeed in preparing stream buffers");
  432. return true;
  433. }
  434. static bool
  435. start_stream(context_t * ctx)
  436. {
  437. enum v4l2_buf_type type;
  438. // Start v4l2 streaming
  439. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  440. if (ioctl(ctx->cam_fd, VIDIOC_STREAMON, &type) < 0)
  441. ERROR_RETURN("Failed to start streaming: %s (%d)",
  442. strerror(errno), errno);
  443. usleep(200);
  444. INFO("Camera video streaming on ...");
  445. return true;
  446. }
  447. static void
  448. signal_handle(int signum)
  449. {
  450. printf("Quit due to exit command from user!\n");
  451. quit = true;
  452. }
  453. static bool
  454. cuda_postprocess(context_t *ctx, int fd)
  455. {
  456. if (ctx->enable_cuda)
  457. {
  458. NvBufSurface *pSurf = NULL;
  459. /* Create EGLImage from dmabuf fd */
  460. if (-1 == NvBufSurfaceFromFd(fd, (void**)(&pSurf)))
  461. ERROR_RETURN("Failed to get NvBufSurface from FD");
  462. NvBufSurfaceMapEglImage(pSurf, 0);
  463. ctx->egl_image = pSurf->surfaceList[0].mappedAddr.eglImage;
  464. if (ctx->egl_image == NULL)
  465. ERROR_RETURN("Failed to map dmabuf fd (0x%X) to EGLImage",
  466. ctx->render_dmabuf_fd);
  467. // Running algo process with EGLImage via GPU multi cores
  468. HandleEGLImage(&ctx->egl_image);
  469. // Destroy EGLImage
  470. NvBufSurfaceUnMapEglImage(pSurf, 0);
  471. ctx->egl_image = NULL;
  472. }
  473. return true;
  474. }
  475. static bool
  476. start_capture(context_t * ctx)
  477. {
  478. struct sigaction sig_action;
  479. struct pollfd fds[1];
  480. NvBufSurf::NvCommonTransformParams transform_params = {0};
  481. // Ensure a clean shutdown if user types <ctrl+c>
  482. sig_action.sa_handler = signal_handle;
  483. sigemptyset(&sig_action.sa_mask);
  484. sig_action.sa_flags = 0;
  485. sigaction(SIGINT, &sig_action, NULL);
  486. if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG)
  487. ctx->jpegdec = NvJPEGDecoder::createJPEGDecoder("jpegdec");
  488. /* Init the NvBufferTransformParams */
  489. transform_params.src_top = 0;
  490. transform_params.src_left = 0;
  491. transform_params.src_width = ctx->cam_w;
  492. transform_params.src_height = ctx->cam_h;
  493. transform_params.dst_top = 0;
  494. transform_params.dst_left = 0;
  495. transform_params.dst_width = ctx->cam_w;
  496. transform_params.dst_height = ctx->cam_h;
  497. transform_params.flag = NVBUFSURF_TRANSFORM_FILTER;
  498. transform_params.flip = NvBufSurfTransform_None;
  499. transform_params.filter = NvBufSurfTransformInter_Algo3;
  500. // Enable render profiling information
  501. ctx->renderer->enableProfiling();
  502. fds[0].fd = ctx->cam_fd;
  503. fds[0].events = POLLIN;
  504. while (poll(fds, 1, 5000) > 0 && !quit)
  505. {
  506. if (fds[0].revents & POLLIN) {
  507. struct v4l2_buffer v4l2_buf;
  508. // Dequeue camera buff
  509. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  510. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  511. if (ctx->capture_dmabuf)
  512. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  513. else
  514. v4l2_buf.memory = V4L2_MEMORY_MMAP;
  515. if (ioctl(ctx->cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  516. ERROR_RETURN("Failed to dequeue camera buff: %s (%d)",
  517. strerror(errno), errno);
  518. ctx->frame++;
  519. printf("frame No : %d\n", ctx->frame);
  520. if (ctx->frame == ctx->save_n_frame)
  521. save_frame_to_file(ctx, &v4l2_buf);
  522. if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG) {
  523. int fd = 0;
  524. uint32_t width, height, pixfmt;
  525. unsigned int i = 0;
  526. unsigned int eos_search_size = MJPEG_EOS_SEARCH_SIZE;
  527. unsigned int bytesused = v4l2_buf.bytesused;
  528. uint8_t *p;
  529. // v4l2_buf.bytesused may have padding bytes for alignment
  530. // Search for EOF to get exact size
  531. if (eos_search_size > bytesused)
  532. eos_search_size = bytesused;
  533. for (i = 0; i < eos_search_size; i++) {
  534. p =(uint8_t *)(ctx->g_buff[v4l2_buf.index].start + bytesused);
  535. if ((*(p-2) == 0xff) && (*(p-1) == 0xd9)) {
  536. break;
  537. }
  538. bytesused--;
  539. }
  540. if (ctx->jpegdec->decodeToFd(fd, ctx->g_buff[v4l2_buf.index].start,
  541. bytesused, pixfmt, width, height) < 0)
  542. ERROR_RETURN("Cannot decode MJPEG");
  543. /* Convert the decoded buffer to YUV420P */
  544. if (NvBufSurf::NvTransform(&transform_params, fd, ctx->render_dmabuf_fd))
  545. ERROR_RETURN("Failed to convert the buffer");
  546. } else {
  547. NvBufSurface *pSurf = NULL;
  548. if (-1 == NvBufSurfaceFromFd(ctx->g_buff[v4l2_buf.index].dmabuff_fd,
  549. (void**)(&pSurf)))
  550. ERROR_RETURN("Cannot get NvBufSurface from fd");
  551. if (ctx->capture_dmabuf) {
  552. /* Cache sync for VIC operation since the data is from CPU */
  553. if (-1 == NvBufSurfaceSyncForDevice (pSurf, 0, 0))
  554. ERROR_RETURN("Cannot sync output buffer");
  555. } else {
  556. /* Copies raw buffer plane contents to an NvBufsurface plane */
  557. if (-1 == Raw2NvBufSurface (ctx->g_buff[v4l2_buf.index].start, 0, 0,
  558. ctx->cam_w, ctx->cam_h, pSurf))
  559. ERROR_RETURN("Cannot copy raw buffer to NvBufsurface plane");
  560. }
  561. /* Convert the camera buffer from YUV422 to YUV420P */
  562. if (NvBufSurf::NvTransform(&transform_params, ctx->g_buff[v4l2_buf.index].dmabuff_fd, ctx->render_dmabuf_fd))
  563. ERROR_RETURN("Failed to convert the buffer");
  564. if (ctx->cam_pixfmt == V4L2_PIX_FMT_GREY) {
  565. if(!nvbuff_do_clearchroma(ctx->render_dmabuf_fd))
  566. ERROR_RETURN("Failed to clear chroma");
  567. }
  568. }
  569. cuda_postprocess(ctx, ctx->render_dmabuf_fd);
  570. ctx->renderer->render(ctx->render_dmabuf_fd);
  571. // Enqueue camera buff
  572. if (ioctl(ctx->cam_fd, VIDIOC_QBUF, &v4l2_buf))
  573. ERROR_RETURN("Failed to queue camera buffers: %s (%d)",
  574. strerror(errno), errno);
  575. }
  576. }
  577. // Print profiling information when streaming stops.
  578. ctx->renderer->printProfilingStats();
  579. if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG)
  580. delete ctx->jpegdec;
  581. return true;
  582. }
  583. static bool
  584. stop_stream(context_t * ctx)
  585. {
  586. enum v4l2_buf_type type;
  587. /* Stop v4l2 streaming */
  588. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  589. if (ioctl(ctx->cam_fd, VIDIOC_STREAMOFF, &type))
  590. ERROR_RETURN("Failed to stop streaming: %s (%d)",
  591. strerror(errno), errno);
  592. INFO("Camera video streaming off ...");
  593. return true;
  594. }
  595. int
  596. main(int argc, char *argv[])
  597. {
  598. context_t ctx;
  599. int error = 0;
  600. set_defaults(&ctx);
  601. CHECK_ERROR(parse_cmdline(&ctx, argc, argv), cleanup,
  602. "Invalid options specified");
  603. CHECK_ERROR(init_components(&ctx), cleanup,
  604. "Failed to initialize v4l2 components");
  605. if (ctx.cam_pixfmt == V4L2_PIX_FMT_MJPEG) {
  606. CHECK_ERROR(prepare_buffers_mjpeg(&ctx), cleanup,
  607. "Failed to prepare v4l2 buffs");
  608. } else {
  609. CHECK_ERROR(prepare_buffers(&ctx), cleanup,
  610. "Failed to prepare v4l2 buffs");
  611. }
  612. CHECK_ERROR(start_stream(&ctx), cleanup,
  613. "Failed to start streaming");
  614. CHECK_ERROR(start_capture(&ctx), cleanup,
  615. "Failed to start capturing");
  616. CHECK_ERROR(stop_stream(&ctx), cleanup,
  617. "Failed to stop streaming");
  618. cleanup:
  619. if (ctx.cam_fd > 0)
  620. close(ctx.cam_fd);
  621. if (ctx.renderer != NULL)
  622. delete ctx.renderer;
  623. if (ctx.egl_display && !eglTerminate(ctx.egl_display))
  624. printf("Failed to terminate EGL display connection\n");
  625. if (ctx.g_buff != NULL)
  626. {
  627. for (unsigned i = 0; i < V4L2_BUFFERS_NUM; i++) {
  628. if (ctx.g_buff[i].dmabuff_fd)
  629. NvBufSurf::NvDestroy(ctx.g_buff[i].dmabuff_fd);
  630. if (ctx.cam_pixfmt == V4L2_PIX_FMT_MJPEG)
  631. munmap(ctx.g_buff[i].start, ctx.g_buff[i].size);
  632. }
  633. free(ctx.g_buff);
  634. }
  635. NvBufSurf::NvDestroy(ctx.render_dmabuf_fd);
  636. if (error)
  637. printf("App run failed\n");
  638. else
  639. printf("App run was successful\n");
  640. return -error;
  641. }