#include "pch.h" #include "../common/comm.h" #include "./include/api.h" #include "lock.h" #include #include #include #include #include #include #include #include #include #include "capture_op.h" #include "gsml_capturer.h" static nv_color_fmt nvcolor_fmt[] = { /* TODO: add more pixel format mapping */ {V4L2_PIX_FMT_UYVY, NVBUF_COLOR_FORMAT_UYVY}, {V4L2_PIX_FMT_VYUY, NVBUF_COLOR_FORMAT_VYUY}, {V4L2_PIX_FMT_YUYV, NVBUF_COLOR_FORMAT_YUYV}, {V4L2_PIX_FMT_YVYU, NVBUF_COLOR_FORMAT_YVYU}, {V4L2_PIX_FMT_GREY, NVBUF_COLOR_FORMAT_GRAY8}, {V4L2_PIX_FMT_YUV420M, NVBUF_COLOR_FORMAT_YUV420}, }; static NvBufSurfaceColorFormat get_nvbuff_color_fmt(unsigned int v4l2_pixfmt) { unsigned i; for (i = 0; i < sizeof(nvcolor_fmt) / sizeof(nvcolor_fmt[0]); i++) { if (v4l2_pixfmt == nvcolor_fmt[i].v4l2_pixfmt) return nvcolor_fmt[i].nvbuff_color; } return NVBUF_COLOR_FORMAT_INVALID; } rtc::scoped_refptr OpenGSMLCapture(CaptureOp* op) { auto video_capture=std::make_unique(op); video_capture->Start(); rtc::scoped_refptr video_source = GSMLTrackSource::Create(std::move(video_capture)); return video_source; } GSMLCapturer::GSMLCapturer(CaptureOp* lhs):_op(lhs) { } void GSMLCapturer::Start() { _thread = std::thread(std::bind(&GSMLCapturer::Run, this)); } //视频的捕获 bool GSMLCapturer::open_cam() { _op->_ctx0=(context_t *)malloc(sizeof(context_t)); { context_t * p=_op->_ctx0; p->cam_fd=-1; p->cam_pixfmt = V4L2_PIX_FMT_YUYV; p->cam_w = 1280; p->cam_h = 720; // p->frame = 0; p->g_buff = NULL; p->capture_dmabuf = true; // opencv display v4l2 can't be true // p->capture_dmabuf = false; // opencv display v4l2 can't be true p->fps = 30; p->enable_verbose = false; std::string devname="/dev/video" + std::to_string(_op->GetIndex()); p->cam_fd = open(devname.c_str(), O_RDWR); //打开视频设备 if( p->cam_fd==-1) { printf("Failed to open camera device %s: %s (%d)", devname.c_str(), strerror(errno), errno); } struct v4l2_format fmt; fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; fmt.fmt.pix.width = p->cam_w; fmt.fmt.pix.height = p->cam_h; fmt.fmt.pix.pixelformat = p->cam_pixfmt; fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; if (ioctl( p->cam_fd, VIDIOC_S_FMT, &fmt) < 0) printf("Failed to set camera output format: %s (%d)", strerror(errno), errno); /* Get the real format in case the desired is not supported */ memset(&fmt, 0, sizeof fmt); fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl( p->cam_fd, VIDIOC_G_FMT, &fmt) < 0) printf("Failed to get camera output format: %s (%d)", strerror(errno), errno); if (fmt.fmt.pix.width != p->cam_w || fmt.fmt.pix.height != p->cam_h || fmt.fmt.pix.pixelformat != p->cam_pixfmt) { printf("The desired format is not supported"); p->cam_w = fmt.fmt.pix.width; p->cam_h = fmt.fmt.pix.height; p->cam_pixfmt =fmt.fmt.pix.pixelformat; } struct v4l2_streamparm streamparm; memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm)); streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; /* streamparm.parm.capture.timeperframe.numerator = 1; streamparm.parm.capture.timeperframe.denominator = 3; streamparm.parm.output.timeperframe.numerator = 1; streamparm.parm.output.timeperframe.denominator = 3; */ ioctl ( p->cam_fd, VIDIOC_G_PARM, &streamparm); printf(">>: Frame rate: %u/%u\n",streamparm.parm.capture.timeperframe.numerator,streamparm.parm.capture.timeperframe.denominator); // INFO("Camera ouput format: (%d x %d) stride: %d, imagesize: %d, frate: %u / %u", // fmt.fmt.pix.width, // fmt.fmt.pix.height, // fmt.fmt.pix.bytesperline, // fmt.fmt.pix.sizeimage, // streamparm.parm.capture.timeperframe.denominator, // streamparm.parm.capture.timeperframe.numerator); } return true; } void GSMLCapturer::Run() { if(!open_cam()) return; prepare_buffer(); start_streams(); // start_capture(&ctx); _run=true; struct pollfd fds[1]; struct v4l2_buffer v4l2_buf; long long _source = 0,_dst = 0; while(_run) { int cam_fd=-1; context_t * p=nullptr; if(_op->IsForward()) { cam_fd=_op->_ctx0->cam_fd; p=_op->_ctx0; } else{ cam_fd=_op->_ctx1->cam_fd; p=_op->_ctx1; } NvBufSurf::NvCommonTransformParams transform_params = {0}; /* Init the NvBufferTransformParams */ transform_params.src_top = 0; transform_params.src_left = 0; transform_params.src_width = p->cam_w; transform_params.src_height = p->cam_h; transform_params.dst_top = 0; transform_params.dst_left = 0; transform_params.dst_width = p->cam_w; transform_params.dst_height = p->cam_h; transform_params.flag = NVBUFSURF_TRANSFORM_FILTER; transform_params.flip = NvBufSurfTransform_None; transform_params.filter = NvBufSurfTransformInter_Algo3; fds[0].fd = cam_fd; fds[0].events = POLLIN; if(poll(fds, 1, 5000) > 0) { if (fds[0].revents & POLLIN) { /* Dequeue a camera buff */ memset(&v4l2_buf, 0, sizeof(v4l2_buf)); v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (p->capture_dmabuf) v4l2_buf.memory = V4L2_MEMORY_DMABUF; else v4l2_buf.memory = V4L2_MEMORY_MMAP; if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0) printf("Failed to dequeue camera buff: %s (%d)", strerror(errno), errno); if(_op->GetType()==RenderPosition::FRONT) { _source = std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()).count(); //printf("encode delay:%lld----frame:%ld\r\n", _source); } // NvBufSurface *pSurf = NULL; // if (-1 == NvBufSurfaceFromFd(p->g_buff[v4l2_buf.index].dmabuff_fd, // (void**)(&pSurf))) // printf("Cannot get NvBufSurface from fd"); // if (p->capture_dmabuf) { // /* Cache sync for VIC operation since the data is from CPU */ // if (-1 == NvBufSurfaceSyncForDevice (pSurf, 0, 0)) // printf("Cannot sync output buffer"); // } // else { // /* Copies raw buffer plane contents to an NvBufsurface plane */ // if (-1 == Raw2NvBufSurface (p->g_buff[v4l2_buf.index].start, 0, 0, // p->cam_w, p->cam_h, pSurf)) // printf("Cannot copy raw buffer to NvBufsurface plane"); // } // /* Convert the camera buffer from YUV422 to YUV420P */ // if (NvBufSurf::NvTransform(&transform_params, p->g_buff[v4l2_buf.index].dmabuff_fd, p->render_dmabuf_fd)) // printf("Failed to convert the buffer"); // NvBufSurface *pSurf_ = NULL; // if (-1 == NvBufSurfaceFromFd(p->render_dmabuf_fd, (void**)(&pSurf_))) // printf("Cannot get NvBufSurface from fd"); int width = p->cam_w; int height = p->cam_h; rtc::scoped_refptr buffer=webrtc::I420Buffer::Create(p->cam_w,p->cam_h); const int conversionResult = libyuv::ConvertToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 0, buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), buffer->MutableDataV(), buffer->StrideV(), 0, 0, p->cam_w,p->cam_h, buffer->width(), buffer->height(), libyuv::kRotate0, libyuv::FOURCC_YUYV); // const char yuv[3*width*height/2]; uint8_t yuv[3 * width * height / 2]; // 更改为 uint8_t 类型 // const int result = libyuv::YUY2ToI420((uint8_t*)p->g_buff[v4l2_buf.index].start, 2*width,yuv,width,yuv+width*height,width/2, // yuv+5*width*height/4,width/2,width,height); // int YUY2ToI420(const uint8_t* src_yuy2, // int src_stride_yuy2, // uint8_t* dst_y, // int dst_stride_y, // uint8_t* dst_u, // int dst_stride_u, // uint8_t* dst_v, // int dst_stride_v, // int width, // int height); // if (conversionResult >= 0) { // printf("dma success \n"); webrtc::VideoFrame videoFrame(buffer, webrtc::VideoRotation::kVideoRotation_0, rtc::TimeNanos()); if ((p->cam_w == 0) && (p->cam_h == 0)) { _broadcaster.OnFrame(videoFrame); } else { if (height == 0) { height = (videoFrame.height() * width) / videoFrame.width(); } else if (width == 0) { width = (videoFrame.width() * height) / videoFrame.height(); } int stride_y = width; int stride_uv = (width + 1) / 2; rtc::scoped_refptr scaled_buffer = webrtc::I420Buffer::Create(width, height, stride_y, stride_uv, stride_uv); scaled_buffer->ScaleFrom(*videoFrame.video_frame_buffer()->ToI420()); webrtc::VideoFrame frame = webrtc::VideoFrame(scaled_buffer, webrtc::kVideoRotation_0, rtc::TimeNanos()); _broadcaster.OnFrame(frame); } } if(_op->GetType()==RenderPosition::FRONT) { _dst = std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()).count(); // printf("encode delay:%lld\r\n",_dst - _source); } if (ioctl(p->cam_fd, VIDIOC_QBUF, &v4l2_buf)) printf("Failed to queue camera buffers: %s (%d)", strerror(errno), errno); } } } stop_streams(); // NvBufSurf::NvDestroy(p->render_dmabuf_fd); close_cam(); } void GSMLCapturer::Destroy() { Stop(); } void GSMLCapturer::AddOrUpdateSink( rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) { _broadcaster.AddOrUpdateSink(sink, wants); } void GSMLCapturer::RemoveSink(rtc::VideoSinkInterface* sink) { _broadcaster.RemoveSink(sink); } bool GSMLCapturer::prepare_buffer() { context_t * p=_op->_ctx0; NvBufSurf::NvCommonAllocateParams camparams = {0}; int fd[V4L2_BUFFERS_NUM] = {0}; /* Allocate global buffer context */ p->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer)); if (p->g_buff == NULL) printf("Failed to allocate global buffer context"); camparams.memType = NVBUF_MEM_SURFACE_ARRAY; camparams.width = p->cam_w; camparams.height = p->cam_h; camparams.layout = NVBUF_LAYOUT_PITCH; camparams.colorFormat = get_nvbuff_color_fmt(p->cam_pixfmt); camparams.memtag = NvBufSurfaceTag_CAMERA; if (NvBufSurf::NvAllocate(&camparams, V4L2_BUFFERS_NUM, fd)) printf("Failed to create NvBuffer"); /* Create buffer and provide it with camera */ for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++) { NvBufSurface *pSurf = NULL; p->g_buff[index].dmabuff_fd = fd[index]; if (-1 == NvBufSurfaceFromFd(fd[index], (void**)(&pSurf))) printf("Failed to get NvBuffer parameters"); if (p->cam_pixfmt == V4L2_PIX_FMT_GREY && pSurf->surfaceList[0].pitch != pSurf->surfaceList[0].width) p->capture_dmabuf = false; /* TODO: add multi-planar support Currently only supports YUV422 interlaced single-planar */ if (p->capture_dmabuf) { if (-1 == NvBufSurfaceMap (pSurf, 0, 0, NVBUF_MAP_READ_WRITE)) printf("Failed to map buffer"); p->g_buff[index].start = (unsigned char *)pSurf->surfaceList[0].mappedAddr.addr[0]; p->g_buff[index].size = pSurf->surfaceList[0].dataSize; } } camparams.colorFormat = get_nvbuff_color_fmt(V4L2_PIX_FMT_YUV420M); camparams.memtag = NvBufSurfaceTag_NONE; // /* Create Render buffer */ if (NvBufSurf::NvAllocate(&camparams, 1, &p->render_dmabuf_fd)) printf("Failed to create NvBuffer"); if (p->capture_dmabuf) { if (!request_camera_buff(p)) printf("Failed to set up camera buff"); } // else { // if (!request_camera_buff_mmap(ctx)) // ERROR_RETURN("Failed to set up camera buff"); // } // INFO("Succeed in preparing stream buffers"); return true; } bool GSMLCapturer::request_camera_buff( context_t * p) { /* Request camera v4l2 buffer */ struct v4l2_requestbuffers rb; memset(&rb, 0, sizeof(rb)); rb.count = V4L2_BUFFERS_NUM; rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; rb.memory = V4L2_MEMORY_DMABUF; if (ioctl(p->cam_fd, VIDIOC_REQBUFS, &rb) < 0) printf("Failed to request v4l2 buffers: %s (%d)", strerror(errno), errno); if (rb.count != V4L2_BUFFERS_NUM) printf("V4l2 buffer number is not as desired"); for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++) { struct v4l2_buffer buf; /* Query camera v4l2 buf length */ memset(&buf, 0, sizeof buf); buf.index = index; buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_DMABUF; if (ioctl(p->cam_fd, VIDIOC_QUERYBUF, &buf) < 0) printf("Failed to query buff: %s (%d)", strerror(errno), errno); /* TODO: add support for multi-planer Enqueue empty v4l2 buff into camera capture plane */ buf.m.fd = (unsigned long)p->g_buff[index].dmabuff_fd; if (buf.length != p->g_buff[index].size) { printf("Camera v4l2 buf length is not expected"); p->g_buff[index].size = buf.length; } if (ioctl(p->cam_fd, VIDIOC_QBUF, &buf) < 0) printf("Failed to enqueue buffers: %s (%d)", strerror(errno), errno); } return true; } bool GSMLCapturer::stop_streams() { enum v4l2_buf_type type; { context_t * p=_op->_ctx0; for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++) { if(munmap(p->g_buff[index].start,p->g_buff[index].size)==-1) { printf("munmap failed: %s (%d)", strerror(errno), errno); } } /* Stop v4l2 streaming */ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl( p->cam_fd, VIDIOC_STREAMOFF, &type)) printf("Failed to stop streaming: %s (%d)", strerror(errno), errno); // INFO("Camera video streaming off ..."); } return true; } bool GSMLCapturer::start_streams() { enum v4l2_buf_type type; context_t * p=_op->_ctx0; /* Start v4l2 streaming */ type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (ioctl(p->cam_fd, VIDIOC_STREAMON, &type) < 0) printf("Failed to start streaming: %s (%d)", strerror(errno), errno); usleep(200); // INFO("Camera video streaming on ..."); return true; } void GSMLCapturer::close_cam() { { context_t * p=_op->_ctx0; if(p->g_buff!=nullptr) { free(p->g_buff); p->g_buff = nullptr; } if(p->cam_fd>0) close(p->cam_fd); } free(_op->_ctx0); } void GSMLCapturer::Stop() { _run=false; _thread.join(); }