123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485 |
- #include "JetsonEnc.h"
- #include <fstream>
- #include <iostream>
- #include <unistd.h>
- #include <linux/videodev2.h>
- #include <poll.h>
- #include <sys/ioctl.h>
- #include <sys/stat.h>
- #include <sys/mman.h>
- #include <fcntl.h>
- #include <errno.h>
- #include <string>
- #include <stdlib.h>
- #include <thread>
- #include <string>
- // #include <pc/video_track_source.h>
- #include <thread>
- #include <string>
- #include "../common/comm.h"
- char *input;
- int width;
- int height;
- int fps;
- char *output;
- std::ifstream yuv_file;
- std::ofstream output_file;
- #define V4L2_BUFFERS_NUM 4
- context_t* _ctx0=nullptr;
- #define ERROR_RETURN(fmt, ...) \
- do { \
- printf("ERROR: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__); \
- return false; \
- } while(0)
- #define INFO(fmt, ...) \
- if (ctx->enable_verbose) \
- printf("INFO: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__);
- #define WARN(fmt, ...) \
- printf("WARN: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__);
- class EncDataWriter : public JetsonEncListner
- {
-
- void OnJetsonEncData(unsigned char *data, int data_len)
- {
- output_file.write(reinterpret_cast<char *>(data), data_len);
- printf("write bytes :%d \n", data_len);
- return;
- }
- };
- bool request_camera_buff( context_t *ctx)
- {
-
- // for(int32_t i=0;i<count;i++)
- {
- // context_t * p=&_ctx[i];
- struct v4l2_requestbuffers rb;
- memset(&rb, 0, sizeof(rb));
- rb.count = V4L2_BUFFERS_NUM;
- rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- rb.memory = V4L2_MEMORY_DMABUF;
- if (ioctl( ctx->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
- ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
- strerror(errno), errno);
- if (rb.count != V4L2_BUFFERS_NUM)
- ERROR_RETURN("V4l2 buffer number is not as desired");
- for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
- {
- struct v4l2_buffer buf;
- /* Query camera v4l2 buf length */
- memset(&buf, 0, sizeof buf);
- buf.index = index;
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_DMABUF;
- if (ioctl( ctx->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
- ERROR_RETURN("Failed to query buff: %s (%d)",
- strerror(errno), errno);
- /* TODO: add support for multi-planer
- Enqueue empty v4l2 buff into camera capture plane */
- buf.m.fd = (unsigned long) ctx->g_buff[index].dmabuff_fd;
- if (buf.length != ctx->g_buff[index].size)
- {
- WARN("Camera v4l2 buf length is not expected");
- ctx->g_buff[index].size = buf.length;
- }
- if (ioctl( ctx->cam_fd, VIDIOC_QBUF, &buf) < 0)
- ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
- strerror(errno), errno);
- }
- }
- return true;
- }
- bool request_camera_buff_mmap(context_t * ctx)
- {
- struct v4l2_requestbuffers rb;
- // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
- // for(int32_t i=0;i<count;i++)
- {
-
- memset(&rb, 0, sizeof(rb));
- rb.count = V4L2_BUFFERS_NUM;
- rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- rb.memory = V4L2_MEMORY_MMAP;
- if (ioctl( ctx->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
- ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
- strerror(errno), errno);
- if (rb.count != V4L2_BUFFERS_NUM)
- ERROR_RETURN("V4l2 buffer number is not as desired");
- for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
- {
- struct v4l2_buffer buf;
- /* Query camera v4l2 buf length */
- memset(&buf, 0, sizeof buf);
- buf.index = index;
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_MMAP;
- if (ioctl(ctx->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
- ERROR_RETURN("Failed to query buff: %s (%d)",
- strerror(errno), errno);
- ctx->g_buff[index].size = buf.length;
- ctx->g_buff[index].start = (unsigned char *)
- mmap (NULL /* start anywhere */,
- buf.length,
- PROT_READ | PROT_WRITE /* required */,
- MAP_SHARED /* recommended */,
- ctx->cam_fd, buf.m.offset);
- if (MAP_FAILED == ctx->g_buff[index].start)
- ERROR_RETURN("Failed to map buffers");
- if (ioctl( ctx->cam_fd, VIDIOC_QBUF, &buf) < 0)
- ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
- strerror(errno), errno);
- }
- }
- return true;
- }
- //视频的捕获
- bool open_cam(context_t * ctx)
- {
- memset(ctx, 0, sizeof(context_t));
-
- {
- // context_t *p = _ctx0;
-
- ctx->cam_fd=-1;
- ctx->cam_pixfmt = V4L2_PIX_FMT_YUYV;
- ctx->cam_w = 1280;
- ctx->cam_h = 720;
- ctx->g_buff = NULL;
- ctx->capture_dmabuf = false; // opencv display v4l2 can't be true
- ctx->fps = 30;
- ctx->enable_verbose = false;
- std::string devname="/dev/video" + std::to_string(1);
-
- ctx->cam_fd = open(devname.c_str(), O_RDWR); //打开视频设备
- if( ctx->cam_fd==-1)
- {
- printf("Failed to open camera device %s: %s (%d)",
- devname.c_str(), strerror(errno), errno);
- }
- struct v4l2_format fmt;
- fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- fmt.fmt.pix.width = ctx->cam_w;
- fmt.fmt.pix.height = ctx->cam_h;
- fmt.fmt.pix.pixelformat = ctx->cam_pixfmt;
- fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
- if (ioctl( ctx->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
- printf("Failed to set camera output format: %s (%d)",
- strerror(errno), errno);
- /* Get the real format in case the desired is not supported */
- memset(&fmt, 0, sizeof fmt);
- fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (ioctl( ctx->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
- printf("Failed to get camera output format: %s (%d)",
- strerror(errno), errno);
- if (fmt.fmt.pix.width != ctx->cam_w ||
- fmt.fmt.pix.height != ctx->cam_h ||
- fmt.fmt.pix.pixelformat != ctx->cam_pixfmt)
- {
- printf("The desired format is not supported");
- ctx->cam_w = fmt.fmt.pix.width;
- ctx->cam_h = fmt.fmt.pix.height;
- ctx->cam_pixfmt =fmt.fmt.pix.pixelformat;
- }
- struct v4l2_streamparm streamparm;
- memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
- streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- /*
- streamparm.parm.capture.timeperframe.numerator = 1;
- streamparm.parm.capture.timeperframe.denominator = 3;
- streamparm.parm.output.timeperframe.numerator = 1;
- streamparm.parm.output.timeperframe.denominator = 3;
- */
- ioctl ( ctx->cam_fd, VIDIOC_G_PARM, &streamparm);
- printf(">>: Frame rate: %u/%u\n",streamparm.parm.capture.timeperframe.numerator,streamparm.parm.capture.timeperframe.denominator);
- }
-
- return true;
- }
- bool prepare_buffer(context_t* ctx)
- {
- //int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
- //for(int32_t i=0;i<count;i++)
- {
- // context_t *p = _ctx0;
- ctx->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
- if ( ctx->g_buff == NULL)
- ERROR_RETURN("Failed to allocate global buffer context");
- if ( ctx->capture_dmabuf) {
- if (!request_camera_buff(ctx))
- ERROR_RETURN("Failed to set up camera buff");
- } else {
- if (!request_camera_buff_mmap(ctx))
- ERROR_RETURN("Failed to set up camera buff");
- }
- INFO("Succeed in preparing stream buffers");
- }
-
- return true;
- }
- bool start_streams(context_t* ctx)
- {
- enum v4l2_buf_type type; //是否可以注释??
- // int32_t count=_op->GetType()==RenderPosition::FRONT_BACK?2:1;
- // for(int32_t i=0;i<count;i++)
- {
- // context_t * p=_ctx0;
-
- /* Start v4l2 streaming */
- type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (ioctl( ctx->cam_fd, VIDIOC_STREAMON, &type) < 0)
- ERROR_RETURN("Failed to start streaming: %s (%d)",
- strerror(errno), errno);
- usleep(200);
- INFO("Camera video streaming on ...");
- }
-
- return true;
- }
- // void ConvertYUYVtoYUV420(uint8_t* yuyv, uint8_t* yuv420, int width, int height) {
-
-
- // int frameSize = width * height;
- // // unsigned char *data= malloc(frameSize);
- // uint8_t* yPlane = yuv420;
- // uint8_t* uPlane = yuv420 + frameSize;
- // uint8_t* vPlane = yuv420 + frameSize + (frameSize / 4);
- // for (int j = 0; j < height; j += 2) {
- // for (int i = 0; i < width; i += 2) {
- // int yIndex = j * width + i;
- // int yIndexNextRow = (j + 1) * width + i;
- // int uIndex = (j / 2) * (width / 2) + (i / 2);
- // int vIndex = uIndex;
- // yPlane[yIndex] = yuyv[(yIndex * 2) + 0];
- // yPlane[yIndex + 1] = yuyv[(yIndex * 2) + 2];
- // yPlane[yIndexNextRow] = yuyv[(yIndexNextRow * 2) + 0];
- // yPlane[yIndexNextRow + 1] = yuyv[(yIndexNextRow * 2) + 2];
- // uPlane[uIndex] = (yuyv[(yIndex * 2) + 1] + yuyv[(yIndexNextRow * 2) + 1]) / 2;
- // vPlane[vIndex] = (yuyv[(yIndex * 2) + 3] + yuyv[(yIndexNextRow * 2) + 3]) / 2;
- // }
- // }
- // }
- void YUY2toI420(int inWidth, int inHeight, uint8_t* pSrc, uint8_t* pDest)
- {
- if (pSrc == nullptr) {
- // 指针无效,处理错误
- std::cerr << "pSrc is null!" << std::endl;
- }
- printf("99999\n");
- int i, j;
- uint8_t* u = pDest + (inWidth * inHeight);
- uint8_t* v = u + (inWidth * inHeight) / 4;
-
- for (i = 0; i < inHeight / 2; i++) {
- uint8_t* src_l1 = pSrc + inWidth * 2 * 2 * i;
- uint8_t* src_l2 = src_l1 + inWidth * 2;
- uint8_t* y_l1 = pDest + inWidth * 2 * i;
- uint8_t* y_l2 = y_l1 + inWidth;
- for (j = 0; j < inWidth / 2; j++) {
- *y_l1++ = src_l1[0];
- *u++ = src_l1[1];
- *y_l1++ = src_l1[2];
- *v++ = src_l1[3];
- *y_l2++ = src_l2[0];
- *y_l2++ = src_l2[2];
- src_l1 += 4;
- src_l2 += 4;
- }
- }
- }
- int main(int argc, char **argv)
- {
- context_t ctx;
- // if(!open_cam(&ctx)) return;
- open_cam(&ctx);
- prepare_buffer(&ctx);
- start_streams(&ctx);
- int ret = 0;
- int error = 0;
- bool eos = false;
- bool _run=true;
- struct pollfd fds[1];
-
- struct v4l2_buffer v4l2_buf;
- input = argv[1];
- width = 1280;
- height = 720;
- fps = 30;
- output = "test_out.h264";
- // input
- yuv_file.open(input, std::ios::binary);
- if (!yuv_file.is_open()) {
- printf("Error opening the YUV file.\n");
- return 1;
- }
- size_t frame_size = width * height * 3 / 2; // YUV420P
- // output
- output_file.open(output, std::ios::binary | std::ios::app);
- if (!output_file.is_open()) {
- printf("Error opening the output file.\n");
- yuv_file.close();
- return -1;
- }
- // encoder
- JetsonEnc *test = new JetsonEnc(width, height, fps);
- EncDataWriter *writer = new EncDataWriter;
- int frames = 0;
- test->SetDecCallBack(static_cast<JetsonEncListner *>(writer)); // 将writer 作为回调函数的接收者,设置到test对象中
- while(_run)
- { printf("_run \n");
- // int cam_fd=-1;
- int cam_fd = ctx.cam_fd;
- // context_t * p=nullptr;
-
- fds[0].fd = cam_fd;
- fds[0].events = POLLIN;
- if(poll(fds, 1, 5000) > 0)
- {
- printf("poll\n");
-
- if (fds[0].revents & POLLIN)
- {
- printf("fds\n");
- /* Dequeue a camera buff */
- memset(&v4l2_buf, 0, sizeof(v4l2_buf));
-
- v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- // if (p->capture_dmabuf)
- if (ctx.capture_dmabuf)
- v4l2_buf.memory = V4L2_MEMORY_DMABUF;
- else
- v4l2_buf.memory = V4L2_MEMORY_MMAP;
-
- if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
- printf("Failed to dequeue camera buff: %s (%d)",
- strerror(errno), errno);
- // printf("succ \n");
-
- // char *buffer = new char[frame_size];
- // 准备 YUY2 输入数据
- // uint8_t* yuy2_data = (uint8_t* )ctx.g_buff[v4l2_buf.index].start; // YUY2 数据大小
- // 假设 yuy2_data 已经被填充
- // 准备 I420 输出数据
- // uint8_t* i420_data = new uint8_t[width * height * 3 / 2]; // I420 数据大小
- // 调用转换函数
- // YUY2toI420(width, height, yuy2_data, i420_data);
- // test->AddFrame(i420_data, frame_size);
-
- // const int result = libyuv::YUY2ToI420((uint8_t*)ctx.g_buff[v4l2_buf.index].start, 2*width,yuv,width,yuv+width*height,width/2,
- // yuv+5*width*height/4,width/2,width,height);
- char *buffer = new char[frame_size];
- yuv_file.read(buffer, frame_size);
- test->AddFrame(buffer, frame_size);
- // JetsonEnc中编码完会释放buffer,此处无需释放,这样做的目的是减少拷贝
- // delete buffer;
- frames++;
- printf("Frame : %d\n", frames);
- if(test->GetQueueSize() >= 5){
- usleep(1000 * 100);
- }
- usleep(1000 * 1000 / fps);
-
- if (ioctl(ctx.cam_fd, VIDIOC_QBUF, &v4l2_buf))
- printf("Failed to queue camera buffers: %s (%d)",
- strerror(errno), errno);
-
- }
- }
-
- }
- // while (!yuv_file.eof()) {
- // char *buffer = new char[frame_size];
- // yuv_file.read(buffer, frame_size);
- // test->AddFrame(buffer, frame_size);
-
- // // JetsonEnc中编码完会释放buffer,此处无需释放,这样做的目的是减少拷贝
- // // delete buffer;
- // frames++;
- // printf("Frame : %d\n", frames);
- // if(test->GetQueueSize() >= 5){
- // usleep(1000 * 100);
- // }
- // usleep(1000 * 1000 / fps);
- // }
- printf("file over\n");
- // waiting encoder finish
- while(test->GetQueueSize() != 0){
- usleep(1000 * 1000 / fps);
- }
- // release
- delete test;
- // delete writer;
- yuv_file.close();
- output_file.close();
- return 0;
- }
|