main_20240823.cpp 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471
  1. #include <opencv2/opencv.hpp>
  2. #include <opencv2/core/core.hpp>
  3. #include <opencv2/highgui/highgui.hpp>
  4. #include <string>
  5. #include <iostream>
  6. #include <zconf.h>
  7. #include <csignal>
  8. #include <thread>
  9. #include "MvGmslCamera.h"
  10. #include <fstream>
  11. #include <chrono>
  12. #include <fcntl.h>
  13. #include <unistd.h>
  14. #include <sys/ioctl.h>
  15. #include <linux/videodev2.h>
  16. #include <thread>
  17. #define VIDEO_OUT "/dev/video8"
  18. using namespace std::chrono;
  19. using std::string;
  20. sig_atomic_t exitRequested = 0;
  21. uint camera_num = 6;
  22. struct sync_out_a_cfg_client_t stCameraCfgSend = {};
  23. char dev_node[32] = "/dev/video0";
  24. std::string camera_fmt_str = "UYVY";
  25. std::string output_fmt_str = "BGRA32";
  26. uint cam_w = 1920;
  27. uint cam_h = 1080;
  28. // true font 1 false back 0
  29. bool MODEL = true;
  30. uint64_t timestampbefore[8] = {0};
  31. uint64_t LinuxGetFrameTimeBefore[8] = {0};
  32. void handler(int)
  33. {
  34. std::cout << "will exit..." << std::endl;
  35. exitRequested = true;
  36. }
  37. std::string GetCurrentTimeStamp(int time_stamp_type = 0)
  38. {
  39. std::chrono::system_clock::time_point now = std::chrono::system_clock::now();
  40. std::time_t now_time_t = std::chrono::system_clock::to_time_t(now);
  41. std::tm *now_tm = std::localtime(&now_time_t);
  42. char buffer[128];
  43. strftime(buffer, sizeof(buffer), "%F %T", now_tm);
  44. std::ostringstream ss;
  45. ss.fill('0');
  46. std::chrono::milliseconds ms;
  47. std::chrono::microseconds cs;
  48. std::chrono::nanoseconds ns;
  49. switch (time_stamp_type)
  50. {
  51. case 0:
  52. ss << buffer;
  53. break;
  54. case 1:
  55. ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()) % 1000;
  56. ss << buffer << ":" << ms.count();
  57. break;
  58. case 2:
  59. ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()) % 1000;
  60. cs = std::chrono::duration_cast<std::chrono::microseconds>(now.time_since_epoch()) % 1000000;
  61. ss << buffer << ":" << ms.count() << ":" << cs.count() % 1000;
  62. break;
  63. case 3:
  64. ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()) % 1000;
  65. cs = std::chrono::duration_cast<std::chrono::microseconds>(now.time_since_epoch()) % 1000000;
  66. ns = std::chrono::duration_cast<std::chrono::nanoseconds>(now.time_since_epoch()) % 1000000000;
  67. ss << buffer << ":" << ms.count() << ":" << cs.count() % 1000 << ":" << ns.count() % 1000;
  68. break;
  69. default:
  70. ss << buffer;
  71. break;
  72. }
  73. return ss.str();
  74. }
  75. //------------- F 1920 * 1080
  76. cv::Mat meger;
  77. uint meger_width = 2020;
  78. uint meger_height = 970;
  79. // 0 1 2 3 4 5
  80. int f_frame_size[] = {1280, 640, 1280, 640, 640, 360, 640, 360, 640, 320, 640, 320};
  81. cv::Rect f_rectCenter = cv::Rect(370, 330, f_frame_size[0], f_frame_size[1]); // font frame_0
  82. cv::Rect f_rectCenterUp = cv::Rect(650, 0, f_frame_size[2] - 560, f_frame_size[3] - 320); // back frame_1
  83. // cv::Rect frame1_rect = cv::Rect(160, 90, f_frame_size[2] - 320, f_frame_size[3] - 180);
  84. cv::Rect f_rectLeftDown = cv::Rect(0, 330, f_frame_size[5], f_frame_size[4]); // left frame_2
  85. // cv::Rect frame2_rect = cv::Rect(60, 0, f_frame_size[4] - 120, f_frame_size[5]);
  86. cv::Rect f_rectRightDown = cv::Rect(1660, 330, f_frame_size[7], f_frame_size[6]); // right frame_3
  87. // cv::Rect frame3_rect = cv::Rect(60, 0, f_frame_size[6] - 120, f_frame_size[7]);
  88. // cv::Rect f_rectLeftUp = cv::Rect(0, 0, f_frame_size[8], f_frame_size[9]); // chabao left frame_4
  89. // cv::Rect f_rectRightUp = cv::Rect(1380, 0, f_frame_size[10], f_frame_size[11]); // chabao right frame_5
  90. cv::Rect f_rectLeftUp = cv::Rect(1380, 0, f_frame_size[8], f_frame_size[9]); // chabao left frame_4
  91. cv::Rect f_rectRightUp = cv::Rect(0, 0, f_frame_size[10], f_frame_size[11]); // chabao right frame_5
  92. cv::Mat frame_0, frame_1, frame_2, frame_3, frame_4, frame_5;
  93. int is_save = false;
  94. cv::Mat fCmeraMeger(cv::Mat outMat[])
  95. {
  96. for (uint32_t i = 0; i < camera_num; i++)
  97. {
  98. switch (i)
  99. {
  100. case 0:
  101. if(is_save){
  102. // auto now = std::chrono::system_clock::now();
  103. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  104. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  105. cv::imwrite("/home/nvidia/newdisk/hkpc/1.jpg", outMat[i]);
  106. }
  107. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  108. frame_0 = outMat[i];
  109. break;
  110. case 1:
  111. if(is_save){
  112. // auto now = std::chrono::system_clock::now();
  113. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  114. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  115. cv::imwrite("/home/nvidia/newdisk/hkpc/2.jpg", outMat[i]);
  116. }
  117. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  118. frame_1 = outMat[i];
  119. resize(frame_1, frame_1, cv::Size(720, 320));
  120. break;
  121. case 2:
  122. if(is_save){
  123. // auto now = std::chrono::system_clock::now();
  124. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  125. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  126. //cv::imwrite("/home/nvidia/newdisk/hkpc/3.jpg", outMat[i]);
  127. }
  128. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  129. frame_2 = outMat[i];
  130. // ROTATE_90_CLOCKWISE
  131. // ROTATE_180
  132. // ROTATE_90_COUNTERCLOCKWISE
  133. cv::rotate(frame_2, frame_2, cv::ROTATE_90_CLOCKWISE);
  134. //cv::rotate(frame_2, frame_2, cv::ROTATE_90_COUNTERCLOCKWISE);
  135. break;
  136. case 3:
  137. if(is_save){
  138. // auto now = std::chrono::system_clock::now();
  139. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  140. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  141. //cv::imwrite("/home/nvidia/newdisk/hkpc/4.jpg", outMat[i]);
  142. is_save = false;
  143. }
  144. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  145. frame_3 = outMat[i];
  146. // cv::rotate(frame_3, frame_3, cv::ROTATE_90_CLOCKWISE);
  147. cv::rotate(frame_3, frame_3, cv::ROTATE_90_COUNTERCLOCKWISE);
  148. break;
  149. case 4:
  150. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  151. frame_4 = outMat[i];
  152. break;
  153. case 5:
  154. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  155. frame_5 = outMat[i];
  156. break;
  157. default:
  158. break;
  159. }
  160. }
  161. // cv::Mat f_meger = cv::Mat::zeros(meger_height, meger_width, frame_0.type());
  162. frame_0.copyTo(meger(f_rectCenter));
  163. frame_1.copyTo(meger(f_rectCenterUp));
  164. frame_2.copyTo(meger(f_rectLeftDown));
  165. frame_3.copyTo(meger(f_rectRightDown));
  166. frame_4.copyTo(meger(f_rectLeftUp));
  167. frame_5.copyTo(meger(f_rectRightUp));
  168. return meger;
  169. }
  170. // uint meger_width = 1060;
  171. // uint meger_height = 550;
  172. //-------R 1902 * 852 //font_0 back_1 left_2 right_3 cha1_4 cha2_5
  173. int r_frame_size[] = {1280, 640, 1280, 640, 640, 360, 640, 360, 640, 320, 640, 320};
  174. cv::Rect r_rectCenterUp = cv::Rect(650, 0, r_frame_size[0] - 560, r_frame_size[1] - 320); // font frame_0
  175. //cv::Rect r_frame0_rect = cv::Rect(120, 90, r_frame_size[0] - 240, r_frame_size[1] - 180);
  176. cv::Rect r_rectCenter = cv::Rect(370, 330, r_frame_size[2], r_frame_size[3]); // back frame_1
  177. cv::Rect r_rectRightDown = cv::Rect(1660, 330, r_frame_size[5], r_frame_size[4]); // left frame_2
  178. // cv::Rect r_frame2_rect = cv::Rect(60, 0, r_frame_size[4] - 120, r_frame_size[5]);
  179. cv::Rect r_rectLeftDown = cv::Rect(0, 330, r_frame_size[7], r_frame_size[6]); // right frame_3
  180. // cv::Rect r_frame3_rect = cv::Rect(60, 0, r_frame_size[6] - 120, r_frame_size[7]);
  181. cv::Rect r_rectRightUp = cv::Rect(1380, 0, r_frame_size[8], r_frame_size[9]); // chabao left frame_4
  182. cv::Rect r_rectLeftUp = cv::Rect(0, 0, r_frame_size[10], r_frame_size[11]); // chabao right frame_5
  183. cv::Mat r_frame_0, r_frame_1, r_frame_2, r_frame_3, r_frame_4, r_frame_5;
  184. cv::Mat rCmeraMeger(cv::Mat outMat[])
  185. {
  186. for (uint32_t i = 0; i < camera_num; i++)
  187. {
  188. switch (i)
  189. {
  190. case 0:
  191. if(is_save){
  192. // auto now = std::chrono::system_clock::now();
  193. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  194. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  195. cv::imwrite("/home/nvidia/newdisk/hkpc/1.jpg", outMat[i]);
  196. }
  197. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  198. r_frame_0 = outMat[i];
  199. resize(r_frame_0, r_frame_0, cv::Size(720, 320));
  200. break;
  201. case 1:
  202. if(is_save){
  203. // auto now = std::chrono::system_clock::now();
  204. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  205. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  206. cv::imwrite("/home/nvidia/newdisk/hkpc/2.jpg", outMat[i]);
  207. }
  208. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  209. r_frame_1 = outMat[i];
  210. break;
  211. case 2:
  212. if(is_save){
  213. // auto now = std::chrono::system_clock::now();
  214. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  215. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  216. //cv::imwrite("/home/nvidia/newdisk/hkpc/3.jpg", outMat[i]);
  217. }
  218. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  219. r_frame_2 = outMat[i];
  220. cv::rotate(r_frame_2, r_frame_2, cv::ROTATE_90_CLOCKWISE);
  221. break;
  222. case 3:
  223. if(is_save){
  224. // auto now = std::chrono::system_clock::now();
  225. // auto timestamp_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now.time_since_epoch()).count();
  226. // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]);
  227. //cv::imwrite("/home/nvidia/newdisk/hkpc/4.jpg", outMat[i]);
  228. is_save = false;
  229. }
  230. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  231. r_frame_3 = outMat[i];
  232. cv::rotate(r_frame_3, r_frame_3, cv::ROTATE_90_COUNTERCLOCKWISE);
  233. break;
  234. case 4:
  235. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  236. r_frame_4 = outMat[i];
  237. break;
  238. case 5:
  239. cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB);
  240. r_frame_5 = outMat[i];
  241. break;
  242. default:
  243. break;
  244. }
  245. }
  246. // cv::Mat f_meger = cv::Mat::zeros(meger_height, meger_width, frame_0.type());
  247. r_frame_0.copyTo(meger(r_rectCenterUp));
  248. r_frame_1.copyTo(meger(r_rectCenter));
  249. r_frame_2.copyTo(meger(r_rectRightDown));
  250. r_frame_3.copyTo(meger(r_rectLeftDown));
  251. r_frame_4.copyTo(meger(r_rectRightUp));
  252. r_frame_5.copyTo(meger(r_rectLeftUp));
  253. return meger;
  254. }
  255. // 线程
  256. std::ifstream inFile;
  257. char c;
  258. bool current_model = true;
  259. void thread_1()
  260. {
  261. while (1)
  262. {
  263. sleep(0.1);
  264. try
  265. {
  266. std::ifstream inFile("/tmp/vedioFront", std::ios::in | std::ios::binary);
  267. if (!inFile)
  268. {
  269. // std::cout << "error" << std::endl;
  270. // return 0;
  271. }
  272. else
  273. {
  274. inFile.get(c);
  275. inFile.close();
  276. current_model = c - '0';
  277. }
  278. }
  279. catch (const std::exception &e)
  280. {
  281. std::cerr << e.what() << '\n';
  282. }
  283. }
  284. }
  285. /*demo程序主函数,分别打开n个窗口,并通过反复分别调用GetImageCvMat和GetImagePtr接口获取图像和时间戳在窗口中显示*/
  286. int main(int argc, char *argv[])
  287. {
  288. // open output device
  289. int output = open(VIDEO_OUT, O_RDWR);
  290. if (output < 0)
  291. {
  292. std::cerr << "ERROR: could not open output device!\n"
  293. << strerror(errno);
  294. return -2;
  295. }
  296. // configure params for output device
  297. struct v4l2_format vid_format;
  298. memset(&vid_format, 0, sizeof(vid_format));
  299. vid_format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
  300. if (ioctl(output, VIDIOC_G_FMT, &vid_format) < 0)
  301. {
  302. std::cerr << "ERROR: unable to get video format!\n"
  303. << strerror(errno);
  304. return -1;
  305. }
  306. // output to video8
  307. size_t framesize = meger_width * meger_height * 3;
  308. vid_format.fmt.pix.width = meger_width;
  309. vid_format.fmt.pix.height = meger_height;
  310. vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
  311. vid_format.fmt.pix.sizeimage = framesize;
  312. vid_format.fmt.pix.field = V4L2_FIELD_NONE;
  313. if (ioctl(output, VIDIOC_S_FMT, &vid_format) < 0)
  314. {
  315. std::cerr << "ERROR: unable to set video format!\n"
  316. << strerror(errno);
  317. return -1;
  318. }
  319. camera_context_t ctx[8] = {};
  320. stCameraCfgSend.async_camera_num = 0;
  321. stCameraCfgSend.async_freq = 0;
  322. stCameraCfgSend.async_camera_bit_draw = 0;
  323. stCameraCfgSend.sync_camera_num = 8;
  324. stCameraCfgSend.sync_freq = 30;
  325. stCameraCfgSend.sync_camera_bit_draw = 0xff;
  326. // setenv("CHECK_TIME","0",0);
  327. char dev_node_tmp = dev_node[10];
  328. for (int i = 0; i < camera_num; i++)
  329. {
  330. dev_node[10] = dev_node_tmp + i;
  331. ctx[i].dev_node = dev_node;
  332. ctx[i].camera_fmt_str = camera_fmt_str;
  333. ctx[i].output_fmt_str = output_fmt_str;
  334. ctx[i].cam_w = cam_w;
  335. ctx[i].cam_h = cam_h;
  336. ctx[i].out_w = f_frame_size[i * 2];
  337. ctx[i].out_h = f_frame_size[i * 2 + 1];
  338. }
  339. miivii::MvGmslCamera mvcam(ctx, camera_num, stCameraCfgSend);
  340. cv::Mat outMat[camera_num];
  341. uint8_t *outbuf[camera_num];
  342. cv::Mat imgbuf[camera_num];
  343. signal(SIGINT, &handler);
  344. bool quit = false;
  345. uint64_t timestamp;
  346. // background
  347. meger = cv::imread("./base.jpg");
  348. cv::cvtColor(meger, meger, cv::COLOR_BGRA2RGB);
  349. resize(meger, meger, cv::Size(meger_width, meger_height));
  350. uint8_t camera_no = dev_node[10] - 0x30;
  351. cv::Mat mege_frame;
  352. int count = 0;
  353. bool is_open_file = false;
  354. // 读取文件
  355. std::thread file_thread(thread_1);
  356. file_thread.detach();
  357. while (!quit)
  358. {
  359. if (exitRequested)
  360. {
  361. quit = true;
  362. break;
  363. }
  364. // count++;
  365. // if(count > 30){
  366. // is_save = true;
  367. // count = 0;
  368. // }
  369. /*use cv data to get image*/
  370. if (mvcam.GetImageCvMat(outMat, timestamp, camera_no))
  371. {
  372. if (current_model != MODEL)
  373. {
  374. meger = cv::imread("./base.jpg");
  375. cv::cvtColor(meger, meger, cv::COLOR_BGRA2RGB);
  376. resize(meger, meger, cv::Size(meger_width, meger_height));
  377. MODEL = current_model;
  378. }
  379. if (MODEL)
  380. {
  381. mege_frame = fCmeraMeger(outMat);
  382. }
  383. else
  384. {
  385. mege_frame = rCmeraMeger(outMat);
  386. }
  387. cv::putText(mege_frame, GetCurrentTimeStamp(0), cv::Point(870, 25), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(255, 255, 255), 2);
  388. // write frame to output device
  389. size_t written = write(output, mege_frame.data, framesize);
  390. if (written < 0)
  391. {
  392. std::cerr << "ERROR: could not write to output device!\n";
  393. close(output);
  394. break;
  395. }
  396. }
  397. else
  398. {
  399. std::cerr << "Can't get image form camera." << std::endl;
  400. }
  401. if (cv::waitKey(1) == 27)
  402. {
  403. break;
  404. }
  405. sleep(0.002);
  406. }
  407. close(output);
  408. return 0;
  409. }