#include #include #include #include #include #include #include #include #include "MvGmslCamera.h" #include #include #include #include #include #include #include #define VIDEO_OUT "/dev/video8" using namespace std::chrono; using std::string; sig_atomic_t exitRequested = 0; uint camera_num = 6; struct sync_out_a_cfg_client_t stCameraCfgSend = {}; char dev_node[32] = "/dev/video0"; std::string camera_fmt_str = "UYVY"; std::string output_fmt_str = "BGRA32"; uint cam_w = 1920; uint cam_h = 1080; // true font 1 false back 0 bool MODEL = true; uint64_t timestampbefore[8] = {0}; uint64_t LinuxGetFrameTimeBefore[8] = {0}; void handler(int) { std::cout << "will exit..." << std::endl; exitRequested = true; } std::string GetCurrentTimeStamp(int time_stamp_type = 0) { std::chrono::system_clock::time_point now = std::chrono::system_clock::now(); std::time_t now_time_t = std::chrono::system_clock::to_time_t(now); std::tm *now_tm = std::localtime(&now_time_t); char buffer[128]; strftime(buffer, sizeof(buffer), "%F %T", now_tm); std::ostringstream ss; ss.fill('0'); std::chrono::milliseconds ms; std::chrono::microseconds cs; std::chrono::nanoseconds ns; switch (time_stamp_type) { case 0: ss << buffer; break; case 1: ms = std::chrono::duration_cast(now.time_since_epoch()) % 1000; ss << buffer << ":" << ms.count(); break; case 2: ms = std::chrono::duration_cast(now.time_since_epoch()) % 1000; cs = std::chrono::duration_cast(now.time_since_epoch()) % 1000000; ss << buffer << ":" << ms.count() << ":" << cs.count() % 1000; break; case 3: ms = std::chrono::duration_cast(now.time_since_epoch()) % 1000; cs = std::chrono::duration_cast(now.time_since_epoch()) % 1000000; ns = std::chrono::duration_cast(now.time_since_epoch()) % 1000000000; ss << buffer << ":" << ms.count() << ":" << cs.count() % 1000 << ":" << ns.count() % 1000; break; default: ss << buffer; break; } return ss.str(); } //------------- F 1920 * 1080 cv::Mat meger; uint meger_width = 2020; uint meger_height = 970; // 0 1 2 3 4 5 int f_frame_size[] = {1280, 640, 1280, 640, 640, 360, 640, 360, 640, 320, 640, 320}; cv::Rect f_rectCenter = cv::Rect(370, 330, f_frame_size[0], f_frame_size[1]); // font frame_0 cv::Rect f_rectCenterUp = cv::Rect(650, 0, f_frame_size[2] - 560, f_frame_size[3] - 320); // back frame_1 // cv::Rect frame1_rect = cv::Rect(160, 90, f_frame_size[2] - 320, f_frame_size[3] - 180); cv::Rect f_rectLeftDown = cv::Rect(0, 330, f_frame_size[5], f_frame_size[4]); // left frame_2 // cv::Rect frame2_rect = cv::Rect(60, 0, f_frame_size[4] - 120, f_frame_size[5]); cv::Rect f_rectRightDown = cv::Rect(1660, 330, f_frame_size[7], f_frame_size[6]); // right frame_3 // cv::Rect frame3_rect = cv::Rect(60, 0, f_frame_size[6] - 120, f_frame_size[7]); // cv::Rect f_rectLeftUp = cv::Rect(0, 0, f_frame_size[8], f_frame_size[9]); // chabao left frame_4 // cv::Rect f_rectRightUp = cv::Rect(1380, 0, f_frame_size[10], f_frame_size[11]); // chabao right frame_5 cv::Rect f_rectLeftUp = cv::Rect(1380, 0, f_frame_size[8], f_frame_size[9]); // chabao left frame_4 cv::Rect f_rectRightUp = cv::Rect(0, 0, f_frame_size[10], f_frame_size[11]); // chabao right frame_5 cv::Mat frame_0, frame_1, frame_2, frame_3, frame_4, frame_5; int is_save = false; cv::Mat fCmeraMeger(cv::Mat outMat[]) { for (uint32_t i = 0; i < camera_num; i++) { switch (i) { case 0: if(is_save){ // auto now = std::chrono::system_clock::now(); // auto timestamp_ms = std::chrono::duration_cast(now.time_since_epoch()).count(); // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]); cv::imwrite("/home/nvidia/newdisk/hkpc/1.jpg", outMat[i]); } cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); frame_0 = outMat[i]; break; case 1: if(is_save){ // auto now = std::chrono::system_clock::now(); // auto timestamp_ms = std::chrono::duration_cast(now.time_since_epoch()).count(); // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]); cv::imwrite("/home/nvidia/newdisk/hkpc/2.jpg", outMat[i]); } cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); frame_1 = outMat[i]; resize(frame_1, frame_1, cv::Size(720, 320)); break; case 2: if(is_save){ // auto now = std::chrono::system_clock::now(); // auto timestamp_ms = std::chrono::duration_cast(now.time_since_epoch()).count(); // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]); //cv::imwrite("/home/nvidia/newdisk/hkpc/3.jpg", outMat[i]); } cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); frame_2 = outMat[i]; // ROTATE_90_CLOCKWISE // ROTATE_180 // ROTATE_90_COUNTERCLOCKWISE cv::rotate(frame_2, frame_2, cv::ROTATE_90_CLOCKWISE); //cv::rotate(frame_2, frame_2, cv::ROTATE_90_COUNTERCLOCKWISE); break; case 3: if(is_save){ // auto now = std::chrono::system_clock::now(); // auto timestamp_ms = std::chrono::duration_cast(now.time_since_epoch()).count(); // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]); //cv::imwrite("/home/nvidia/newdisk/hkpc/4.jpg", outMat[i]); is_save = false; } cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); frame_3 = outMat[i]; // cv::rotate(frame_3, frame_3, cv::ROTATE_90_CLOCKWISE); cv::rotate(frame_3, frame_3, cv::ROTATE_90_COUNTERCLOCKWISE); break; case 4: cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); frame_4 = outMat[i]; break; case 5: cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); frame_5 = outMat[i]; break; default: break; } } // cv::Mat f_meger = cv::Mat::zeros(meger_height, meger_width, frame_0.type()); frame_0.copyTo(meger(f_rectCenter)); frame_1.copyTo(meger(f_rectCenterUp)); frame_2.copyTo(meger(f_rectLeftDown)); frame_3.copyTo(meger(f_rectRightDown)); frame_4.copyTo(meger(f_rectLeftUp)); frame_5.copyTo(meger(f_rectRightUp)); return meger; } // uint meger_width = 1060; // uint meger_height = 550; //-------R 1902 * 852 //font_0 back_1 left_2 right_3 cha1_4 cha2_5 int r_frame_size[] = {1280, 640, 1280, 640, 640, 360, 640, 360, 640, 320, 640, 320}; cv::Rect r_rectCenterUp = cv::Rect(650, 0, r_frame_size[0] - 560, r_frame_size[1] - 320); // font frame_0 //cv::Rect r_frame0_rect = cv::Rect(120, 90, r_frame_size[0] - 240, r_frame_size[1] - 180); cv::Rect r_rectCenter = cv::Rect(370, 330, r_frame_size[2], r_frame_size[3]); // back frame_1 cv::Rect r_rectRightDown = cv::Rect(1660, 330, r_frame_size[5], r_frame_size[4]); // left frame_2 // cv::Rect r_frame2_rect = cv::Rect(60, 0, r_frame_size[4] - 120, r_frame_size[5]); cv::Rect r_rectLeftDown = cv::Rect(0, 330, r_frame_size[7], r_frame_size[6]); // right frame_3 // cv::Rect r_frame3_rect = cv::Rect(60, 0, r_frame_size[6] - 120, r_frame_size[7]); cv::Rect r_rectRightUp = cv::Rect(1380, 0, r_frame_size[8], r_frame_size[9]); // chabao left frame_4 cv::Rect r_rectLeftUp = cv::Rect(0, 0, r_frame_size[10], r_frame_size[11]); // chabao right frame_5 cv::Mat r_frame_0, r_frame_1, r_frame_2, r_frame_3, r_frame_4, r_frame_5; cv::Mat rCmeraMeger(cv::Mat outMat[]) { for (uint32_t i = 0; i < camera_num; i++) { switch (i) { case 0: if(is_save){ // auto now = std::chrono::system_clock::now(); // auto timestamp_ms = std::chrono::duration_cast(now.time_since_epoch()).count(); // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]); cv::imwrite("/home/nvidia/newdisk/hkpc/1.jpg", outMat[i]); } cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); r_frame_0 = outMat[i]; resize(r_frame_0, r_frame_0, cv::Size(720, 320)); break; case 1: if(is_save){ // auto now = std::chrono::system_clock::now(); // auto timestamp_ms = std::chrono::duration_cast(now.time_since_epoch()).count(); // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]); cv::imwrite("/home/nvidia/newdisk/hkpc/2.jpg", outMat[i]); } cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); r_frame_1 = outMat[i]; break; case 2: if(is_save){ // auto now = std::chrono::system_clock::now(); // auto timestamp_ms = std::chrono::duration_cast(now.time_since_epoch()).count(); // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]); //cv::imwrite("/home/nvidia/newdisk/hkpc/3.jpg", outMat[i]); } cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); r_frame_2 = outMat[i]; cv::rotate(r_frame_2, r_frame_2, cv::ROTATE_90_CLOCKWISE); break; case 3: if(is_save){ // auto now = std::chrono::system_clock::now(); // auto timestamp_ms = std::chrono::duration_cast(now.time_since_epoch()).count(); // cv::imwrite("/home/nvidia/newdisk/hkpc/images/"+std::to_string(timestamp_ms)+".jpg", outMat[i]); //cv::imwrite("/home/nvidia/newdisk/hkpc/4.jpg", outMat[i]); is_save = false; } cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); r_frame_3 = outMat[i]; cv::rotate(r_frame_3, r_frame_3, cv::ROTATE_90_COUNTERCLOCKWISE); break; case 4: cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); r_frame_4 = outMat[i]; break; case 5: cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2RGB); r_frame_5 = outMat[i]; break; default: break; } } // cv::Mat f_meger = cv::Mat::zeros(meger_height, meger_width, frame_0.type()); r_frame_0.copyTo(meger(r_rectCenterUp)); r_frame_1.copyTo(meger(r_rectCenter)); r_frame_2.copyTo(meger(r_rectRightDown)); r_frame_3.copyTo(meger(r_rectLeftDown)); r_frame_4.copyTo(meger(r_rectRightUp)); r_frame_5.copyTo(meger(r_rectLeftUp)); return meger; } // 线程 std::ifstream inFile; char c; bool current_model = true; void thread_1() { while (1) { sleep(0.1); try { std::ifstream inFile("/tmp/vedioFront", std::ios::in | std::ios::binary); if (!inFile) { // std::cout << "error" << std::endl; // return 0; } else { inFile.get(c); inFile.close(); current_model = c - '0'; } } catch (const std::exception &e) { std::cerr << e.what() << '\n'; } } } /*demo程序主函数,分别打开n个窗口,并通过反复分别调用GetImageCvMat和GetImagePtr接口获取图像和时间戳在窗口中显示*/ int main(int argc, char *argv[]) { // open output device int output = open(VIDEO_OUT, O_RDWR); if (output < 0) { std::cerr << "ERROR: could not open output device!\n" << strerror(errno); return -2; } // configure params for output device struct v4l2_format vid_format; memset(&vid_format, 0, sizeof(vid_format)); vid_format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; if (ioctl(output, VIDIOC_G_FMT, &vid_format) < 0) { std::cerr << "ERROR: unable to get video format!\n" << strerror(errno); return -1; } // output to video8 size_t framesize = meger_width * meger_height * 3; vid_format.fmt.pix.width = meger_width; vid_format.fmt.pix.height = meger_height; vid_format.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; vid_format.fmt.pix.sizeimage = framesize; vid_format.fmt.pix.field = V4L2_FIELD_NONE; if (ioctl(output, VIDIOC_S_FMT, &vid_format) < 0) { std::cerr << "ERROR: unable to set video format!\n" << strerror(errno); return -1; } camera_context_t ctx[8] = {}; stCameraCfgSend.async_camera_num = 0; stCameraCfgSend.async_freq = 0; stCameraCfgSend.async_camera_bit_draw = 0; stCameraCfgSend.sync_camera_num = 8; stCameraCfgSend.sync_freq = 30; stCameraCfgSend.sync_camera_bit_draw = 0xff; // setenv("CHECK_TIME","0",0); char dev_node_tmp = dev_node[10]; for (int i = 0; i < camera_num; i++) { dev_node[10] = dev_node_tmp + i; ctx[i].dev_node = dev_node; ctx[i].camera_fmt_str = camera_fmt_str; ctx[i].output_fmt_str = output_fmt_str; ctx[i].cam_w = cam_w; ctx[i].cam_h = cam_h; ctx[i].out_w = f_frame_size[i * 2]; ctx[i].out_h = f_frame_size[i * 2 + 1]; } miivii::MvGmslCamera mvcam(ctx, camera_num, stCameraCfgSend); cv::Mat outMat[camera_num]; uint8_t *outbuf[camera_num]; cv::Mat imgbuf[camera_num]; signal(SIGINT, &handler); bool quit = false; uint64_t timestamp; // background meger = cv::imread("./base.jpg"); cv::cvtColor(meger, meger, cv::COLOR_BGRA2RGB); resize(meger, meger, cv::Size(meger_width, meger_height)); uint8_t camera_no = dev_node[10] - 0x30; cv::Mat mege_frame; int count = 0; bool is_open_file = false; // 读取文件 std::thread file_thread(thread_1); file_thread.detach(); while (!quit) { if (exitRequested) { quit = true; break; } // count++; // if(count > 30){ // is_save = true; // count = 0; // } /*use cv data to get image*/ if (mvcam.GetImageCvMat(outMat, timestamp, camera_no)) { if (current_model != MODEL) { meger = cv::imread("./base.jpg"); cv::cvtColor(meger, meger, cv::COLOR_BGRA2RGB); resize(meger, meger, cv::Size(meger_width, meger_height)); MODEL = current_model; } if (MODEL) { mege_frame = fCmeraMeger(outMat); } else { mege_frame = rCmeraMeger(outMat); } cv::putText(mege_frame, GetCurrentTimeStamp(0), cv::Point(870, 25), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(255, 255, 255), 2); // write frame to output device size_t written = write(output, mege_frame.data, framesize); if (written < 0) { std::cerr << "ERROR: could not write to output device!\n"; close(output); break; } } else { std::cerr << "Can't get image form camera." << std::endl; } if (cv::waitKey(1) == 27) { break; } sleep(0.002); } close(output); return 0; }