v4l2_camera_main.cpp 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485
  1. #include "JetsonEnc.h"
  2. #include <fstream>
  3. #include <iostream>
  4. #include <unistd.h>
  5. #include <linux/videodev2.h>
  6. #include <poll.h>
  7. #include <sys/ioctl.h>
  8. #include <sys/stat.h>
  9. #include <sys/mman.h>
  10. #include <fcntl.h>
  11. #include <errno.h>
  12. #include <string>
  13. #include <stdlib.h>
  14. #include <thread>
  15. #include <string>
  16. // #include <pc/video_track_source.h>
  17. #include <thread>
  18. #include <string>
  19. #include "../common/comm.h"
  20. char *input;
  21. int width;
  22. int height;
  23. int fps;
  24. char *output;
  25. std::ifstream yuv_file;
  26. std::ofstream output_file;
  27. #define V4L2_BUFFERS_NUM 4
  28. context_t* _ctx0=nullptr;
  29. #define ERROR_RETURN(fmt, ...) \
  30. do { \
  31. printf("ERROR: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__); \
  32. return false; \
  33. } while(0)
  34. #define INFO(fmt, ...) \
  35. if (ctx->enable_verbose) \
  36. printf("INFO: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__);
  37. #define WARN(fmt, ...) \
  38. printf("WARN: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__);
  39. class EncDataWriter : public JetsonEncListner
  40. {
  41. void OnJetsonEncData(unsigned char *data, int data_len)
  42. {
  43. output_file.write(reinterpret_cast<char *>(data), data_len);
  44. printf("write bytes :%d \n", data_len);
  45. return;
  46. }
  47. };
  48. bool request_camera_buff( context_t *ctx)
  49. {
  50. // for(int32_t i=0;i<count;i++)
  51. {
  52. // context_t * p=&_ctx[i];
  53. struct v4l2_requestbuffers rb;
  54. memset(&rb, 0, sizeof(rb));
  55. rb.count = V4L2_BUFFERS_NUM;
  56. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  57. rb.memory = V4L2_MEMORY_DMABUF;
  58. if (ioctl( ctx->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  59. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  60. strerror(errno), errno);
  61. if (rb.count != V4L2_BUFFERS_NUM)
  62. ERROR_RETURN("V4l2 buffer number is not as desired");
  63. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  64. {
  65. struct v4l2_buffer buf;
  66. /* Query camera v4l2 buf length */
  67. memset(&buf, 0, sizeof buf);
  68. buf.index = index;
  69. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  70. buf.memory = V4L2_MEMORY_DMABUF;
  71. if (ioctl( ctx->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  72. ERROR_RETURN("Failed to query buff: %s (%d)",
  73. strerror(errno), errno);
  74. /* TODO: add support for multi-planer
  75. Enqueue empty v4l2 buff into camera capture plane */
  76. buf.m.fd = (unsigned long) ctx->g_buff[index].dmabuff_fd;
  77. if (buf.length != ctx->g_buff[index].size)
  78. {
  79. WARN("Camera v4l2 buf length is not expected");
  80. ctx->g_buff[index].size = buf.length;
  81. }
  82. if (ioctl( ctx->cam_fd, VIDIOC_QBUF, &buf) < 0)
  83. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  84. strerror(errno), errno);
  85. }
  86. }
  87. return true;
  88. }
  89. bool request_camera_buff_mmap(context_t * ctx)
  90. {
  91. struct v4l2_requestbuffers rb;
  92. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  93. // for(int32_t i=0;i<count;i++)
  94. {
  95. memset(&rb, 0, sizeof(rb));
  96. rb.count = V4L2_BUFFERS_NUM;
  97. rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  98. rb.memory = V4L2_MEMORY_MMAP;
  99. if (ioctl( ctx->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
  100. ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
  101. strerror(errno), errno);
  102. if (rb.count != V4L2_BUFFERS_NUM)
  103. ERROR_RETURN("V4l2 buffer number is not as desired");
  104. for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
  105. {
  106. struct v4l2_buffer buf;
  107. /* Query camera v4l2 buf length */
  108. memset(&buf, 0, sizeof buf);
  109. buf.index = index;
  110. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  111. buf.memory = V4L2_MEMORY_MMAP;
  112. if (ioctl(ctx->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
  113. ERROR_RETURN("Failed to query buff: %s (%d)",
  114. strerror(errno), errno);
  115. ctx->g_buff[index].size = buf.length;
  116. ctx->g_buff[index].start = (unsigned char *)
  117. mmap (NULL /* start anywhere */,
  118. buf.length,
  119. PROT_READ | PROT_WRITE /* required */,
  120. MAP_SHARED /* recommended */,
  121. ctx->cam_fd, buf.m.offset);
  122. if (MAP_FAILED == ctx->g_buff[index].start)
  123. ERROR_RETURN("Failed to map buffers");
  124. if (ioctl( ctx->cam_fd, VIDIOC_QBUF, &buf) < 0)
  125. ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
  126. strerror(errno), errno);
  127. }
  128. }
  129. return true;
  130. }
  131. //视频的捕获
  132. bool open_cam(context_t * ctx)
  133. {
  134. memset(ctx, 0, sizeof(context_t));
  135. {
  136. // context_t *p = _ctx0;
  137. ctx->cam_fd=-1;
  138. ctx->cam_pixfmt = V4L2_PIX_FMT_YUYV;
  139. ctx->cam_w = 1280;
  140. ctx->cam_h = 720;
  141. ctx->g_buff = NULL;
  142. ctx->capture_dmabuf = false; // opencv display v4l2 can't be true
  143. ctx->fps = 30;
  144. ctx->enable_verbose = false;
  145. std::string devname="/dev/video" + std::to_string(1);
  146. ctx->cam_fd = open(devname.c_str(), O_RDWR); //打开视频设备
  147. if( ctx->cam_fd==-1)
  148. {
  149. printf("Failed to open camera device %s: %s (%d)",
  150. devname.c_str(), strerror(errno), errno);
  151. }
  152. struct v4l2_format fmt;
  153. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  154. fmt.fmt.pix.width = ctx->cam_w;
  155. fmt.fmt.pix.height = ctx->cam_h;
  156. fmt.fmt.pix.pixelformat = ctx->cam_pixfmt;
  157. fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  158. if (ioctl( ctx->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
  159. printf("Failed to set camera output format: %s (%d)",
  160. strerror(errno), errno);
  161. /* Get the real format in case the desired is not supported */
  162. memset(&fmt, 0, sizeof fmt);
  163. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  164. if (ioctl( ctx->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
  165. printf("Failed to get camera output format: %s (%d)",
  166. strerror(errno), errno);
  167. if (fmt.fmt.pix.width != ctx->cam_w ||
  168. fmt.fmt.pix.height != ctx->cam_h ||
  169. fmt.fmt.pix.pixelformat != ctx->cam_pixfmt)
  170. {
  171. printf("The desired format is not supported");
  172. ctx->cam_w = fmt.fmt.pix.width;
  173. ctx->cam_h = fmt.fmt.pix.height;
  174. ctx->cam_pixfmt =fmt.fmt.pix.pixelformat;
  175. }
  176. struct v4l2_streamparm streamparm;
  177. memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
  178. streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  179. /*
  180. streamparm.parm.capture.timeperframe.numerator = 1;
  181. streamparm.parm.capture.timeperframe.denominator = 3;
  182. streamparm.parm.output.timeperframe.numerator = 1;
  183. streamparm.parm.output.timeperframe.denominator = 3;
  184. */
  185. ioctl ( ctx->cam_fd, VIDIOC_G_PARM, &streamparm);
  186. printf(">>: Frame rate: %u/%u\n",streamparm.parm.capture.timeperframe.numerator,streamparm.parm.capture.timeperframe.denominator);
  187. }
  188. return true;
  189. }
  190. bool prepare_buffer(context_t* ctx)
  191. {
  192. //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  193. //for(int32_t i=0;i<count;i++)
  194. {
  195. // context_t *p = _ctx0;
  196. ctx->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
  197. if ( ctx->g_buff == NULL)
  198. ERROR_RETURN("Failed to allocate global buffer context");
  199. if ( ctx->capture_dmabuf) {
  200. if (!request_camera_buff(ctx))
  201. ERROR_RETURN("Failed to set up camera buff");
  202. } else {
  203. if (!request_camera_buff_mmap(ctx))
  204. ERROR_RETURN("Failed to set up camera buff");
  205. }
  206. INFO("Succeed in preparing stream buffers");
  207. }
  208. return true;
  209. }
  210. bool start_streams(context_t* ctx)
  211. {
  212. enum v4l2_buf_type type; //是否可以注释??
  213. // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
  214. // for(int32_t i=0;i<count;i++)
  215. {
  216. // context_t * p=_ctx0;
  217. /* Start v4l2 streaming */
  218. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  219. if (ioctl( ctx->cam_fd, VIDIOC_STREAMON, &type) < 0)
  220. ERROR_RETURN("Failed to start streaming: %s (%d)",
  221. strerror(errno), errno);
  222. usleep(200);
  223. INFO("Camera video streaming on ...");
  224. }
  225. return true;
  226. }
  227. // void ConvertYUYVtoYUV420(uint8_t* yuyv, uint8_t* yuv420, int width, int height) {
  228. // int frameSize = width * height;
  229. // // unsigned char *data= malloc(frameSize);
  230. // uint8_t* yPlane = yuv420;
  231. // uint8_t* uPlane = yuv420 + frameSize;
  232. // uint8_t* vPlane = yuv420 + frameSize + (frameSize / 4);
  233. // for (int j = 0; j < height; j += 2) {
  234. // for (int i = 0; i < width; i += 2) {
  235. // int yIndex = j * width + i;
  236. // int yIndexNextRow = (j + 1) * width + i;
  237. // int uIndex = (j / 2) * (width / 2) + (i / 2);
  238. // int vIndex = uIndex;
  239. // yPlane[yIndex] = yuyv[(yIndex * 2) + 0];
  240. // yPlane[yIndex + 1] = yuyv[(yIndex * 2) + 2];
  241. // yPlane[yIndexNextRow] = yuyv[(yIndexNextRow * 2) + 0];
  242. // yPlane[yIndexNextRow + 1] = yuyv[(yIndexNextRow * 2) + 2];
  243. // uPlane[uIndex] = (yuyv[(yIndex * 2) + 1] + yuyv[(yIndexNextRow * 2) + 1]) / 2;
  244. // vPlane[vIndex] = (yuyv[(yIndex * 2) + 3] + yuyv[(yIndexNextRow * 2) + 3]) / 2;
  245. // }
  246. // }
  247. // }
  248. void YUY2toI420(int inWidth, int inHeight, uint8_t* pSrc, uint8_t* pDest)
  249. {
  250. if (pSrc == nullptr) {
  251. // 指针无效,处理错误
  252. std::cerr << "pSrc is null!" << std::endl;
  253. }
  254. printf("99999\n");
  255. int i, j;
  256. uint8_t* u = pDest + (inWidth * inHeight);
  257. uint8_t* v = u + (inWidth * inHeight) / 4;
  258. for (i = 0; i < inHeight / 2; i++) {
  259. uint8_t* src_l1 = pSrc + inWidth * 2 * 2 * i;
  260. uint8_t* src_l2 = src_l1 + inWidth * 2;
  261. uint8_t* y_l1 = pDest + inWidth * 2 * i;
  262. uint8_t* y_l2 = y_l1 + inWidth;
  263. for (j = 0; j < inWidth / 2; j++) {
  264. *y_l1++ = src_l1[0];
  265. *u++ = src_l1[1];
  266. *y_l1++ = src_l1[2];
  267. *v++ = src_l1[3];
  268. *y_l2++ = src_l2[0];
  269. *y_l2++ = src_l2[2];
  270. src_l1 += 4;
  271. src_l2 += 4;
  272. }
  273. }
  274. }
  275. int main(int argc, char **argv)
  276. {
  277. context_t ctx;
  278. // if(!open_cam(&ctx)) return;
  279. open_cam(&ctx);
  280. prepare_buffer(&ctx);
  281. start_streams(&ctx);
  282. int ret = 0;
  283. int error = 0;
  284. bool eos = false;
  285. bool _run=true;
  286. struct pollfd fds[1];
  287. struct v4l2_buffer v4l2_buf;
  288. input = argv[1];
  289. width = 1280;
  290. height = 720;
  291. fps = 30;
  292. output = "test_out.h264";
  293. // input
  294. yuv_file.open(input, std::ios::binary);
  295. if (!yuv_file.is_open()) {
  296. printf("Error opening the YUV file.\n");
  297. return 1;
  298. }
  299. size_t frame_size = width * height * 3 / 2; // YUV420P
  300. // output
  301. output_file.open(output, std::ios::binary | std::ios::app);
  302. if (!output_file.is_open()) {
  303. printf("Error opening the output file.\n");
  304. yuv_file.close();
  305. return -1;
  306. }
  307. // encoder
  308. JetsonEnc *test = new JetsonEnc(width, height, fps);
  309. EncDataWriter *writer = new EncDataWriter;
  310. int frames = 0;
  311. test->SetDecCallBack(static_cast<JetsonEncListner *>(writer)); // 将writer 作为回调函数的接收者,设置到test对象中
  312. while(_run)
  313. { printf("_run \n");
  314. // int cam_fd=-1;
  315. int cam_fd = ctx.cam_fd;
  316. // context_t * p=nullptr;
  317. fds[0].fd = cam_fd;
  318. fds[0].events = POLLIN;
  319. if(poll(fds, 1, 5000) > 0)
  320. {
  321. printf("poll\n");
  322. if (fds[0].revents & POLLIN)
  323. {
  324. printf("fds\n");
  325. /* Dequeue a camera buff */
  326. memset(&v4l2_buf, 0, sizeof(v4l2_buf));
  327. v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  328. // if (p->capture_dmabuf)
  329. if (ctx.capture_dmabuf)
  330. v4l2_buf.memory = V4L2_MEMORY_DMABUF;
  331. else
  332. v4l2_buf.memory = V4L2_MEMORY_MMAP;
  333. if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
  334. printf("Failed to dequeue camera buff: %s (%d)",
  335. strerror(errno), errno);
  336. // printf("succ \n");
  337. // char *buffer = new char[frame_size];
  338. // 准备 YUY2 输入数据
  339. // uint8_t* yuy2_data = (uint8_t* )ctx.g_buff[v4l2_buf.index].start; // YUY2 数据大小
  340. // 假设 yuy2_data 已经被填充
  341. // 准备 I420 输出数据
  342. // uint8_t* i420_data = new uint8_t[width * height * 3 / 2]; // I420 数据大小
  343. // 调用转换函数
  344. // YUY2toI420(width, height, yuy2_data, i420_data);
  345. // test->AddFrame(i420_data, frame_size);
  346. // const int result = libyuv::YUY2ToI420((uint8_t*)ctx.g_buff[v4l2_buf.index].start, 2*width,yuv,width,yuv+width*height,width/2,
  347. // yuv+5*width*height/4,width/2,width,height);
  348. char *buffer = new char[frame_size];
  349. yuv_file.read(buffer, frame_size);
  350. test->AddFrame(buffer, frame_size);
  351. // JetsonEnc中编码完会释放buffer,此处无需释放,这样做的目的是减少拷贝
  352. // delete buffer;
  353. frames++;
  354. printf("Frame : %d\n", frames);
  355. if(test->GetQueueSize() >= 5){
  356. usleep(1000 * 100);
  357. }
  358. usleep(1000 * 1000 / fps);
  359. if (ioctl(ctx.cam_fd, VIDIOC_QBUF, &v4l2_buf))
  360. printf("Failed to queue camera buffers: %s (%d)",
  361. strerror(errno), errno);
  362. }
  363. }
  364. }
  365. // while (!yuv_file.eof()) {
  366. // char *buffer = new char[frame_size];
  367. // yuv_file.read(buffer, frame_size);
  368. // test->AddFrame(buffer, frame_size);
  369. // // JetsonEnc中编码完会释放buffer,此处无需释放,这样做的目的是减少拷贝
  370. // // delete buffer;
  371. // frames++;
  372. // printf("Frame : %d\n", frames);
  373. // if(test->GetQueueSize() >= 5){
  374. // usleep(1000 * 100);
  375. // }
  376. // usleep(1000 * 1000 / fps);
  377. // }
  378. printf("file over\n");
  379. // waiting encoder finish
  380. while(test->GetQueueSize() != 0){
  381. usleep(1000 * 1000 / fps);
  382. }
  383. // release
  384. delete test;
  385. // delete writer;
  386. yuv_file.close();
  387. output_file.close();
  388. return 0;
  389. }