Casper 7 сар өмнө
parent
commit
f3ec9e961e

+ 0 - 2
test/test-export-yuv/USAGE.bash → test/test-export-yuv/README-usage.bash

@@ -1,5 +1,3 @@
 ## NOTE
 
-# 启动
-cd
 g++ -o zw-test0 zw-test0.cpp `pkg-config --cflags --libs opencv4` && ./zw-test0

+ 65 - 0
test/test-mwsdk-gmsl_camera/CHANGELOG.md

@@ -0,0 +1,65 @@
+# Changelog
+All notable changes to this project will be documented in this file.
+
+## [Unreleased]
+## [1.0.0] - 2020-07-14
+- Adapted for Jetpack 4.4 GA
+- refactor for better performance 
+
+## [0.1.4] - 2019-08-07
+- Fixed bugs in cameras_opencv_demo.cpp when set fmt as YUYV results program crash.
+- Added version number support in both dynamic libs and header file.
+- Removed redundant outputs.
+
+## [0.1.3] - 2019-06-18
+- Change the directory structure for easy management.
+
+## [0.1.2] - 2019-04-13
+- Adjust folder structure to adapt C++ Project Standard.
+- Optimize user compilation methods
+  
+## [0.1.1] - 2019-04-09
+
+### Changed
+
+- Added timestamp interface in SDK and usage in example.cpp.
+- Modified make logic. 
+
+## [0.1.0] - 2019-03-08
+
+### Changed
+
+- Rewrote the SDK so that it no longer depends on Nv header files.
+- Modified example.cpp.
+- Modified folder structure.
+
+## [0.0.5] - 2019-02-20
+### Changed
+- Add example launch command in README.md.
+
+## [0.0.4] - 2019-02-16
+### Changed
+- S2-pro compatible.
+
+## [0.0.3] - 2019-01-17
+### Added
+- R5 synchronize support
+
+### Changed
+- Use CV Display instead of EGLRender.
+### Fixed
+- Camera frozen in long time test.
+
+## [0.0.2] - 2018-12-20
+
+### Removed
+- Removed SDK alone with example.
+
+## [0.0.1] - 2018-12-18
+### Added
+- Added control of multi-window parameters. 
+- Fix a build problem on build.sh script. 
+### Changed
+- Make install path changed to /lib/
+- Simplify code structure.
+- Replace multi-media framework to xavier compatible.

+ 31 - 0
test/test-mwsdk-gmsl_camera/Makefile

@@ -0,0 +1,31 @@
+
+#================================================================
+#   Copyright (C) 2019 All rights reserved.
+#
+#   filename    : Makefile
+#   Author      : wjyang
+#   Date        : 2019-06-20
+#   Description : 
+#
+#================================================================
+PROJECT_TOP:=$(shell pwd)
+export PROJECT_TOP
+
+all: test
+
+help:
+	@echo "make test	: make sample for test."
+	@echo "make clean	: clean build project."
+
+test:
+	+@make -C samples/cameras_egl_demo
+	+@make -C samples/cameras_opencv_demo
+	+@make -C samples/cameras_sdk_demo
+	+@make -C samples/cameras_sdk_noopencv_demo
+
+clean:
+	+@make clean -C samples/cameras_egl_demo
+	+@make clean -C samples/cameras_opencv_demo
+	+@make clean -C samples/cameras_sdk_demo
+	+@make clean -C samples/cameras_sdk_noopencv_demo
+	@rm -rf $(PROJECT_TOP)/bin

+ 51 - 0
test/test-mwsdk-gmsl_camera/README.md

@@ -0,0 +1,51 @@
+**[BUILD]**
+
+```shell
+First make please use sudo 
+sudo make -j
+```
+
+**[RUN GMSL CAMREA DEMO]**
+
+使用NV自带接口时,参考cameras_egl_demo
+使用miivii提供的接口读取图像以及当前图像时间戳时,参考cameras_sdk_demo
+使用opencv接口获取相机图像,同时调用miivii接口获取图像时间戳时,参考cameras_opencv_demo
+使用miivii接口获取图像以及当前图像时间戳,并不进行opencv库调用时,参考cameras_sdk_noopencv_demo
+
+```shell
+./bin/cameras_opencv_demo -s 1280x720 -d /dev/video0 -n 4-4 -r 30-30 -b 0xf-0xf0 -p 0-0-0-0-1-2-3-4
+./bin/cameras_sdk_demo -s 1280x720 -d /dev/video0 -n 4-4 -r 30-30 -b 0xf-0xf0 -p 0-0-0-0-1-2-3-4
+./bin/cameras_egl_demo -d /dev/video0 -s 1280x720
+```
+parameter comment:
+-d:Set output resolution of video device
+-n:Set sync and async camera no. for example: [-n 2-4] the forward one is sync cameras no 2,the after one is async cameras no 4 (8 sync cameras is setted by default.like [-n 8-0])
+-r:Set sync and async camera freq for example: [-r 30-20] the forward one is sync cameras freq 30,the after one is async cameras freq 20(sync freq 30 is setted by default.like [-r 30-0])
+-b:Set which cameras you want to trigger.example: [-b 0x0f-0xf0] the forward one is sync cameras which you want trigger,the after one is async cameras which you want trigger(all 8 cameras is setted sync model by default.like[-b 0xff-0])
+-p:Set async cameras is triggered at which angle in a circle,not set by default.
+-h:show help.
+
+同步相机开启方式(以三个摄像头为例):
+./bin/cameras_sdk_demo -d /dev/video0 -s 1280x720 -m 3 -r 30-0
+-d:设置打开的第一个摄像头设备号。
+-s:设置采集摄像头的分辨率,中间以小写x分隔。
+-m:设置打开的摄像头个数。
+-r:设置外部触发摄像头的采集频率,“-”前为同步频率,“-”后为异步频率。此例中只打开同步摄像头,所以只设置“-”前的频率为30。
+
+
+**[SDK CAPACITY]**
+
+GMSL-SDK support custom output image resolutions as well 
+ as multiple formats.
+ - UYVY
+ - YUYV
+ - YVYU
+ - YUV420M
+ - XBGR32
+ - ARGB32
+
+ The image format is set to ABGR32 by default.
+
+ Notice that:
+ > YUV color system images should only be obtained by method MVGmslGetImage() while RGB color 
+ system images should only be obtained by method MVGmslGetCvMat();

+ 9 - 0
test/test-mwsdk-gmsl_camera/bin/firmware_update/README.md

@@ -0,0 +1,9 @@
+li-isx031-update
+    1:根据擦除或者升级执行程序。
+    擦除: ./li-isx031-update e
+    升级: ./li-isx031-update u update_file_name
+    2:根据提示输入相机地址、串型器地址。
+        相机编号 :  0  1  2  3   4  5  6  7
+        相机地址 : 05 04 07 06  09 08 17 16
+        串型器地址: 66 68 34 96 100 32 48 98
+    3: 等待升级或者擦除完成。

+ 69 - 0
test/test-mwsdk-gmsl_camera/samples/cameras_egl_demo/Makefile

@@ -0,0 +1,69 @@
+###############################################################################
+#
+# Copyright (c) 2016-2017, NVIDIA CORPORATION. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above copyright
+#    notice, this list of conditions and the following disclaimer in the
+#    documentation and/or other materials provided with the distribution.
+#  * Neither the name of NVIDIA CORPORATION nor the names of its
+#    contributors may be used to endorse or promote products derived
+#    from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
+# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+###############################################################################
+
+include ../../Rules.mk
+
+APP := cameras_egl_demo
+
+SRCS := \
+	camera_v4l2_cuda.cpp \
+	$(wildcard $(NV_CLASS_DIR)/*.cpp)
+
+OBJS := $(SRCS:.cpp=.o)
+#OBJS := $(notdir $(OBJS))
+
+OBJS += \
+	$(ALGO_CUDA_DIR)/NvAnalysis.o \
+	$(ALGO_CUDA_DIR)/NvCudaProc.o
+
+all: $(APP)
+
+$(NV_CLASS_DIR)/%.o: $(NV_CLASS_DIR)/%.cpp
+	$(AT)$(MAKE) -C $(NV_CLASS_DIR)
+
+$(ALGO_CUDA_DIR)/%.o: $(ALGO_CUDA_DIR)/%.cpp
+	$(AT)$(MAKE) -C $(ALGO_CUDA_DIR)
+
+$(ALGO_CUDA_DIR)/%.o: $(ALGO_CUDA_DIR)/%.cu
+	$(AT)$(MAKE) -C $(ALGO_CUDA_DIR)
+
+%.o: %.cpp
+	@echo "Compiling: $<"
+	$(CPP) $(CPPFLAGS) -c $<
+
+$(APP): $(OBJS)
+	@echo "Linking: $@"
+	$(CPP) -o $@ $(OBJS) $(CPPFLAGS) $(LDFLAGS)
+	@mkdir -p $(TOP_DIR)/bin
+	@cp -rf $(APP) $(TOP_DIR)/bin
+
+clean:
+	$(AT)rm -rf $(APP) $(OBJS)
+	$(AT)rm -rf *.o

+ 762 - 0
test/test-mwsdk-gmsl_camera/samples/cameras_egl_demo/camera_v4l2_cuda.cpp

@@ -0,0 +1,762 @@
+/*
+ * Copyright (c) 2016-2018, NVIDIA CORPORATION. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *  * Neither the name of NVIDIA CORPORATION nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <stdio.h>
+#include <unistd.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <sys/mman.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <stdlib.h>
+#include <signal.h>
+#include <poll.h>
+
+#include "NvEglRenderer.h"
+#include "NvUtils.h"
+#include "NvCudaProc.h"
+
+#include "camera_v4l2_cuda.h"
+
+#define MJPEG_EOS_SEARCH_SIZE 4096
+
+static bool quit = false;
+
+using namespace std;
+
+static void
+print_usage(void) {
+    printf("\n\tUsage: cameras_egl_demo [OPTIONS]\n\n"
+           "\tExample: \n"
+           "\t./cameras_egl_demo -d /dev/video0 -s 1280x720\n\n"
+           "\tSupported options:\n"
+           "\t-d\t\tSet V4l2 video device node\n"
+           "\t-s\t\tSet output resolution of video device\n"
+           "\t-f\t\tSet output pixel format of video device (supports only YUYV/YVYU/UYVY/VYUY/GREY/MJPEG)\n"
+           "\t-r\t\tSet renderer frame rate (30 fps by default)\n"
+           "\t-n\t\tSave the n-th frame before VIC processing\n"
+           "\t-c\t\tEnable CUDA aglorithm (draw a black box in the upper left corner)\n"
+           "\t-v\t\tEnable verbose message\n"
+           "\t-h\t\tPrint this usage\n\n"
+           "\tNOTE: It runs infinitely until you terminate it with <ctrl+c>\n");
+}
+
+static bool
+parse_cmdline(context_t * ctx, int argc, char **argv)
+{
+    int c;
+
+    if (argc < 2)
+    {
+        print_usage();
+        exit(EXIT_SUCCESS);
+    }
+
+    while ((c = getopt(argc, argv, "d:s:f:r:n:cvh")) != -1)
+    {
+        switch (c)
+        {
+            case 'd':
+                ctx->cam_devname = optarg;
+                break;
+            case 's':
+                if (sscanf(optarg, "%dx%d",
+                            &ctx->cam_w, &ctx->cam_h) != 2)
+                {
+                    print_usage();
+                    return false;
+                }
+                break;
+            case 'f':
+                if (strcmp(optarg, "YUYV") == 0)
+                    ctx->cam_pixfmt = V4L2_PIX_FMT_YUYV;
+                else if (strcmp(optarg, "YVYU") == 0)
+                    ctx->cam_pixfmt = V4L2_PIX_FMT_YVYU;
+                else if (strcmp(optarg, "VYUY") == 0)
+                    ctx->cam_pixfmt = V4L2_PIX_FMT_VYUY;
+                else if (strcmp(optarg, "UYVY") == 0)
+                    ctx->cam_pixfmt = V4L2_PIX_FMT_UYVY;
+                else if (strcmp(optarg, "GREY") == 0)
+                    ctx->cam_pixfmt = V4L2_PIX_FMT_GREY;
+                else if (strcmp(optarg, "MJPEG") == 0)
+                    ctx->cam_pixfmt = V4L2_PIX_FMT_MJPEG;
+                else
+                {
+                    print_usage();
+                    return false;
+                }
+                sprintf(ctx->cam_file, "camera.%s", optarg);
+                break;
+            case 'r':
+                ctx->fps = strtol(optarg, NULL, 10);
+                break;
+            case 'n':
+                ctx->save_n_frame = strtol(optarg, NULL, 10);
+                break;
+            case 'c':
+                ctx->enable_cuda = true;
+                break;
+            case 'v':
+                ctx->enable_verbose = true;
+                break;
+            case 'h':
+                print_usage();
+                exit(EXIT_SUCCESS);
+                break;
+            default:
+                print_usage();
+                return false;
+        }
+    }
+
+    return true;
+}
+
+static void
+set_defaults(context_t * ctx)
+{
+    memset(ctx, 0, sizeof(context_t));
+
+    ctx->cam_devname = "/dev/video0";
+    ctx->cam_fd = -1;
+    ctx->cam_pixfmt = V4L2_PIX_FMT_UYVY;
+    ctx->cam_w = 1280;
+    ctx->cam_h = 720;
+    ctx->frame = 0;
+    ctx->save_n_frame = 0;
+
+    ctx->g_buff = NULL;
+    ctx->capture_dmabuf = true;
+    ctx->renderer = NULL;
+    ctx->fps = 30;
+
+    ctx->enable_cuda = false;
+    ctx->egl_image = NULL;
+    ctx->egl_display = EGL_NO_DISPLAY;
+
+    ctx->enable_verbose = false;
+}
+
+static nv_color_fmt nvcolor_fmt[] =
+{
+    /* TODO: add more pixel format mapping */
+    {V4L2_PIX_FMT_UYVY, NVBUF_COLOR_FORMAT_UYVY},
+    {V4L2_PIX_FMT_VYUY, NVBUF_COLOR_FORMAT_VYUY},
+    {V4L2_PIX_FMT_YUYV, NVBUF_COLOR_FORMAT_YUYV},
+    {V4L2_PIX_FMT_YVYU, NVBUF_COLOR_FORMAT_YVYU},
+    {V4L2_PIX_FMT_GREY, NVBUF_COLOR_FORMAT_GRAY8},
+    {V4L2_PIX_FMT_YUV420M, NVBUF_COLOR_FORMAT_YUV420},
+};
+
+static NvBufSurfaceColorFormat
+get_nvbuff_color_fmt(unsigned int v4l2_pixfmt)
+{
+    unsigned i;
+
+    for (i = 0; i < sizeof(nvcolor_fmt) / sizeof(nvcolor_fmt[0]); i++)
+    {
+        if (v4l2_pixfmt == nvcolor_fmt[i].v4l2_pixfmt)
+            return nvcolor_fmt[i].nvbuff_color;
+    }
+
+    return NVBUF_COLOR_FORMAT_INVALID;
+}
+
+static bool
+save_frame_to_file(context_t * ctx, struct v4l2_buffer * buf)
+{
+    int file;
+
+    file = open(ctx->cam_file, O_CREAT | O_WRONLY | O_APPEND | O_TRUNC,
+            S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH);
+
+    if (-1 == file)
+        ERROR_RETURN("Failed to open file for frame saving");
+
+    if (-1 == write(file, ctx->g_buff[buf->index].start,
+                ctx->g_buff[buf->index].size))
+    {
+        close(file);
+        ERROR_RETURN("Failed to write frame into file");
+    }
+
+    close(file);
+
+    return true;
+}
+
+static bool
+nvbuff_do_clearchroma (int dmabuf_fd)
+{
+  int ret = 0;
+  unsigned i;
+
+  NvBufSurface *pSurf = NULL;
+  if (-1 == NvBufSurfaceFromFd(dmabuf_fd, (void**)(&pSurf)))
+    ERROR_RETURN("%s: NvBufSurfaceFromFd Failed \n", __func__);
+
+  for (i = 1; i < pSurf->surfaceList[0].planeParams.num_planes; i++) {
+    ret = NvBufSurfaceMemSet(pSurf, 0, i, 0x80);
+    if (ret != 0)
+      ERROR_RETURN("%s: NvBufSurfaceMemSet Failed \n", __func__);
+  }
+
+  return true;
+}
+
+static bool
+camera_initialize(context_t * ctx)
+{
+    struct v4l2_format fmt;
+
+    /* Open camera device */
+    ctx->cam_fd = open(ctx->cam_devname, O_RDWR);
+    if (ctx->cam_fd == -1)
+        ERROR_RETURN("Failed to open camera device %s: %s (%d)",
+                ctx->cam_devname, strerror(errno), errno);
+
+    /* Set camera output format */
+    memset(&fmt, 0, sizeof(fmt));
+    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    fmt.fmt.pix.width = ctx->cam_w;
+    fmt.fmt.pix.height = ctx->cam_h;
+    fmt.fmt.pix.pixelformat = ctx->cam_pixfmt;
+    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
+    if (ioctl(ctx->cam_fd, VIDIOC_S_FMT, &fmt) < 0)
+        ERROR_RETURN("Failed to set camera output format: %s (%d)",
+                strerror(errno), errno);
+
+    /* Get the real format in case the desired is not supported */
+    memset(&fmt, 0, sizeof fmt);
+    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (ioctl(ctx->cam_fd, VIDIOC_G_FMT, &fmt) < 0)
+        ERROR_RETURN("Failed to get camera output format: %s (%d)",
+                strerror(errno), errno);
+    if (fmt.fmt.pix.width != ctx->cam_w ||
+            fmt.fmt.pix.height != ctx->cam_h ||
+            fmt.fmt.pix.pixelformat != ctx->cam_pixfmt)
+    {
+        WARN("The desired format is not supported");
+        ctx->cam_w = fmt.fmt.pix.width;
+        ctx->cam_h = fmt.fmt.pix.height;
+        ctx->cam_pixfmt =fmt.fmt.pix.pixelformat;
+    }
+
+    struct v4l2_streamparm streamparm;
+    memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
+    streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    ioctl (ctx->cam_fd, VIDIOC_G_PARM, &streamparm);
+
+    INFO("Camera ouput format: (%d x %d)  stride: %d, imagesize: %d, frate: %u / %u",
+            fmt.fmt.pix.width,
+            fmt.fmt.pix.height,
+            fmt.fmt.pix.bytesperline,
+            fmt.fmt.pix.sizeimage,
+            streamparm.parm.capture.timeperframe.denominator,
+            streamparm.parm.capture.timeperframe.numerator);
+
+    return true;
+}
+
+static bool
+display_initialize(context_t * ctx)
+{
+    /* Create EGL renderer */
+    ctx->renderer = NvEglRenderer::createEglRenderer("renderer0",
+            ctx->cam_w / 4, ctx->cam_h / 4, (1920 - ctx->cam_w / 4) / 2, (1080 - ctx->cam_h / 4) / 2);
+    if (!ctx->renderer)
+        ERROR_RETURN("Failed to create EGL renderer");
+    ctx->renderer->setFPS(ctx->fps);
+
+    if (ctx->enable_cuda)
+    {
+        /* Get defalut EGL display */
+        ctx->egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+        if (ctx->egl_display == EGL_NO_DISPLAY)
+            ERROR_RETURN("Failed to get EGL display connection");
+
+        /* Init EGL display connection */
+        if (!eglInitialize(ctx->egl_display, NULL, NULL))
+            ERROR_RETURN("Failed to initialize EGL display connection");
+    }
+
+    return true;
+}
+
+static bool
+init_components(context_t * ctx)
+{
+    if (!camera_initialize(ctx))
+        ERROR_RETURN("Failed to initialize camera device");
+
+    if (!display_initialize(ctx))
+        ERROR_RETURN("Failed to initialize display");
+
+    INFO("Initialize v4l2 components successfully");
+    return true;
+}
+
+static bool
+request_camera_buff(context_t *ctx)
+{
+    /* Request camera v4l2 buffer */
+    struct v4l2_requestbuffers rb;
+    memset(&rb, 0, sizeof(rb));
+    rb.count = V4L2_BUFFERS_NUM;
+    rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    rb.memory = V4L2_MEMORY_DMABUF;
+    if (ioctl(ctx->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
+        ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
+                     strerror(errno), errno);
+    if (rb.count != V4L2_BUFFERS_NUM)
+        ERROR_RETURN("V4l2 buffer number is not as desired");
+
+    for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
+    {
+        struct v4l2_buffer buf;
+
+        // Query camera v4l2 buf length
+        memset(&buf, 0, sizeof buf);
+        buf.index = index;
+        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        buf.memory = V4L2_MEMORY_DMABUF;
+
+        if (ioctl(ctx->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
+            ERROR_RETURN("Failed to query buff: %s (%d)",
+                         strerror(errno), errno);
+
+        // TODO add support for multi-planer
+        // Enqueue empty v4l2 buff into camera capture plane
+        buf.m.fd = (unsigned long) ctx->g_buff[index].dmabuff_fd;
+        if (buf.length != ctx->g_buff[index].size)
+        {
+            WARN("Camera v4l2 buf length is not expected");
+            ctx->g_buff[index].size = buf.length;
+        }
+
+        if (ioctl(ctx->cam_fd, VIDIOC_QBUF, &buf) < 0)
+            ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
+                         strerror(errno), errno);
+    }
+
+    return true;
+}
+
+static bool
+request_camera_buff_mmap(context_t *ctx)
+{
+    /* Request camera v4l2 buffer */
+    struct v4l2_requestbuffers rb;
+    memset(&rb, 0, sizeof(rb));
+    rb.count = V4L2_BUFFERS_NUM;
+    rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    rb.memory = V4L2_MEMORY_MMAP;
+    if (ioctl(ctx->cam_fd, VIDIOC_REQBUFS, &rb) < 0)
+        ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)",
+                     strerror(errno), errno);
+    if (rb.count != V4L2_BUFFERS_NUM)
+        ERROR_RETURN("V4l2 buffer number is not as desired");
+
+    for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
+    {
+        struct v4l2_buffer buf;
+
+        // Query camera v4l2 buf length
+        memset(&buf, 0, sizeof buf);
+        buf.index = index;
+        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+        buf.memory = V4L2_MEMORY_MMAP;
+        if (ioctl(ctx->cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
+            ERROR_RETURN("Failed to query buff: %s (%d)",
+                         strerror(errno), errno);
+
+        ctx->g_buff[index].size = buf.length;
+        ctx->g_buff[index].start = (unsigned char *)
+                mmap(NULL /* start anywhere */,
+                     buf.length,
+                     PROT_READ | PROT_WRITE /* required */,
+                     MAP_SHARED /* recommended */,
+                     ctx->cam_fd, buf.m.offset);
+        if (MAP_FAILED == ctx->g_buff[index].start)
+            ERROR_RETURN("Failed to map buffers");
+
+        if (ioctl(ctx->cam_fd, VIDIOC_QBUF, &buf) < 0)
+            ERROR_RETURN("Failed to enqueue buffers: %s (%d)",
+                         strerror(errno), errno);
+    }
+
+    return true;
+}
+
+static bool
+prepare_buffers_mjpeg(context_t * ctx)
+{
+    NvBufSurf::NvCommonAllocateParams params = {0};
+
+    /* Allocate global buffer context */
+    ctx->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
+    if (ctx->g_buff == NULL)
+        ERROR_RETURN("Failed to allocate global buffer context");
+    memset(ctx->g_buff, 0, V4L2_BUFFERS_NUM * sizeof(nv_buffer));
+
+    params.memType = NVBUF_MEM_SURFACE_ARRAY;
+    params.width = ctx->cam_w;
+    params.height = ctx->cam_h;
+    params.layout = NVBUF_LAYOUT_PITCH;
+
+    params.colorFormat = get_nvbuff_color_fmt(V4L2_PIX_FMT_YUV420M);
+    params.memtag = NvBufSurfaceTag_NONE;
+
+    /* Create Render buffer */
+    if (NvBufSurf::NvAllocate(&params, 1, &ctx->render_dmabuf_fd))
+        ERROR_RETURN("Failed to create NvBuffer");
+
+    ctx->capture_dmabuf = false;
+    if (!request_camera_buff_mmap(ctx))
+        ERROR_RETURN("Failed to set up camera buff");
+
+    INFO("Succeed in preparing mjpeg buffers");
+    return true;
+}
+
+static bool
+prepare_buffers(context_t * ctx)
+{
+    NvBufSurf::NvCommonAllocateParams camparams = {0};
+    int fd[V4L2_BUFFERS_NUM] = {0};
+
+    /* Allocate global buffer context */
+    ctx->g_buff = (nv_buffer *)malloc(V4L2_BUFFERS_NUM * sizeof(nv_buffer));
+    if (ctx->g_buff == NULL)
+        ERROR_RETURN("Failed to allocate global buffer context");
+
+    camparams.memType = NVBUF_MEM_SURFACE_ARRAY;
+    camparams.width = ctx->cam_w;
+    camparams.height = ctx->cam_h;
+    camparams.layout = NVBUF_LAYOUT_PITCH;
+    camparams.colorFormat = get_nvbuff_color_fmt(ctx->cam_pixfmt);
+    camparams.memtag = NvBufSurfaceTag_CAMERA;
+    if (NvBufSurf::NvAllocate(&camparams, V4L2_BUFFERS_NUM, fd))
+        ERROR_RETURN("Failed to create NvBuffer");
+    /* Create buffer and provide it with camera */
+    for (unsigned int index = 0; index < V4L2_BUFFERS_NUM; index++)
+    {
+        NvBufSurface *pSurf = NULL;
+
+        ctx->g_buff[index].dmabuff_fd = fd[index];
+
+        if (-1 == NvBufSurfaceFromFd(fd[index], (void**)(&pSurf)))
+            ERROR_RETURN("Failed to get NvBuffer parameters");
+
+        if (ctx->cam_pixfmt == V4L2_PIX_FMT_GREY &&
+            pSurf->surfaceList[0].pitch != pSurf->surfaceList[0].width)
+            ctx->capture_dmabuf = false;
+
+        /* TODO: add multi-planar support
+           Currently only supports YUV422 interlaced single-planar */
+        if (ctx->capture_dmabuf) {
+            if (-1 == NvBufSurfaceMap (pSurf, 0, 0, NVBUF_MAP_READ_WRITE))
+                ERROR_RETURN("Failed to map buffer");
+            ctx->g_buff[index].start = (unsigned char *)pSurf->surfaceList[0].mappedAddr.addr[0];
+            ctx->g_buff[index].size = pSurf->surfaceList[0].dataSize;
+        }
+    }
+
+    camparams.colorFormat = get_nvbuff_color_fmt(V4L2_PIX_FMT_YUV420M);
+    camparams.memtag = NvBufSurfaceTag_NONE;
+    /* Create Render buffer */
+    if (NvBufSurf::NvAllocate(&camparams, 1, &ctx->render_dmabuf_fd))
+        ERROR_RETURN("Failed to create NvBuffer");
+
+    if (ctx->capture_dmabuf) {
+        if (!request_camera_buff(ctx))
+            ERROR_RETURN("Failed to set up camera buff");
+    } else {
+        if (!request_camera_buff_mmap(ctx))
+            ERROR_RETURN("Failed to set up camera buff");
+    }
+
+    INFO("Succeed in preparing stream buffers");
+    return true;
+}
+
+static bool
+start_stream(context_t * ctx)
+{
+    enum v4l2_buf_type type;
+
+    // Start v4l2 streaming
+    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (ioctl(ctx->cam_fd, VIDIOC_STREAMON, &type) < 0)
+        ERROR_RETURN("Failed to start streaming: %s (%d)",
+                     strerror(errno), errno);
+
+    usleep(200);
+
+    INFO("Camera video streaming on ...");
+    return true;
+}
+
+static void
+signal_handle(int signum)
+{
+    printf("Quit due to exit command from user!\n");
+    quit = true;
+}
+
+static bool
+cuda_postprocess(context_t *ctx, int fd)
+{
+    if (ctx->enable_cuda)
+    {
+        NvBufSurface *pSurf = NULL;
+        /* Create EGLImage from dmabuf fd */
+        if (-1 == NvBufSurfaceFromFd(fd, (void**)(&pSurf)))
+            ERROR_RETURN("Failed to get NvBufSurface from FD");
+        NvBufSurfaceMapEglImage(pSurf, 0);
+        ctx->egl_image = pSurf->surfaceList[0].mappedAddr.eglImage;
+        if (ctx->egl_image == NULL)
+            ERROR_RETURN("Failed to map dmabuf fd (0x%X) to EGLImage",
+                         ctx->render_dmabuf_fd);
+
+        // Running algo process with EGLImage via GPU multi cores
+        HandleEGLImage(&ctx->egl_image);
+
+        // Destroy EGLImage
+        NvBufSurfaceUnMapEglImage(pSurf, 0);
+        ctx->egl_image = NULL;
+    }
+
+    return true;
+}
+
+static bool
+start_capture(context_t * ctx)
+{
+    struct sigaction sig_action;
+    struct pollfd fds[1];
+    NvBufSurf::NvCommonTransformParams transform_params = {0};
+
+    // Ensure a clean shutdown if user types <ctrl+c>
+    sig_action.sa_handler = signal_handle;
+    sigemptyset(&sig_action.sa_mask);
+    sig_action.sa_flags = 0;
+    sigaction(SIGINT, &sig_action, NULL);
+
+    if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG)
+        ctx->jpegdec = NvJPEGDecoder::createJPEGDecoder("jpegdec");
+
+    /* Init the NvBufferTransformParams */
+    transform_params.src_top = 0;
+    transform_params.src_left = 0;
+    transform_params.src_width = ctx->cam_w;
+    transform_params.src_height = ctx->cam_h;
+    transform_params.dst_top = 0;
+    transform_params.dst_left = 0;
+    transform_params.dst_width = ctx->cam_w;
+    transform_params.dst_height = ctx->cam_h;
+    transform_params.flag = NVBUFSURF_TRANSFORM_FILTER;
+    transform_params.flip = NvBufSurfTransform_None;
+    transform_params.filter = NvBufSurfTransformInter_Algo3;
+
+    // Enable render profiling information
+    ctx->renderer->enableProfiling();
+
+    fds[0].fd = ctx->cam_fd;
+    fds[0].events = POLLIN;
+    while (poll(fds, 1, 5000) > 0 && !quit)
+    {
+        if (fds[0].revents & POLLIN) {
+            struct v4l2_buffer v4l2_buf;
+
+            // Dequeue camera buff
+            memset(&v4l2_buf, 0, sizeof(v4l2_buf));
+            v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+            if (ctx->capture_dmabuf)
+            v4l2_buf.memory = V4L2_MEMORY_DMABUF;
+            else
+                v4l2_buf.memory = V4L2_MEMORY_MMAP;
+            if (ioctl(ctx->cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
+                ERROR_RETURN("Failed to dequeue camera buff: %s (%d)",
+                             strerror(errno), errno);
+
+            ctx->frame++;
+            printf("frame No : %d\n", ctx->frame);
+            if (ctx->frame == ctx->save_n_frame)
+                save_frame_to_file(ctx, &v4l2_buf);
+
+            if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG) {
+                int fd = 0;
+                uint32_t width, height, pixfmt;
+                unsigned int i = 0;
+                unsigned int eos_search_size = MJPEG_EOS_SEARCH_SIZE;
+                unsigned int bytesused = v4l2_buf.bytesused;
+                uint8_t *p;
+
+                // v4l2_buf.bytesused may have padding bytes for alignment
+                // Search for EOF to get exact size
+                if (eos_search_size > bytesused)
+                    eos_search_size = bytesused;
+                for (i = 0; i < eos_search_size; i++) {
+                    p =(uint8_t *)(ctx->g_buff[v4l2_buf.index].start + bytesused);
+                    if ((*(p-2) == 0xff) && (*(p-1) == 0xd9)) {
+                        break;
+                    }
+                    bytesused--;
+                }
+
+                if (ctx->jpegdec->decodeToFd(fd, ctx->g_buff[v4l2_buf.index].start,
+                    bytesused, pixfmt, width, height) < 0)
+                    ERROR_RETURN("Cannot decode MJPEG");
+
+                /* Convert the decoded buffer to YUV420P */
+                if (NvBufSurf::NvTransform(&transform_params, fd, ctx->render_dmabuf_fd))
+                    ERROR_RETURN("Failed to convert the buffer");
+            } else {
+                NvBufSurface *pSurf = NULL;
+                if (-1 == NvBufSurfaceFromFd(ctx->g_buff[v4l2_buf.index].dmabuff_fd,
+                        (void**)(&pSurf)))
+                    ERROR_RETURN("Cannot get NvBufSurface from fd");
+                if (ctx->capture_dmabuf) {
+                    /* Cache sync for VIC operation since the data is from CPU */
+                    if (-1 == NvBufSurfaceSyncForDevice (pSurf, 0, 0))
+                        ERROR_RETURN("Cannot sync output buffer");
+                } else {
+                    /* Copies raw buffer plane contents to an NvBufsurface plane */
+                    if (-1 == Raw2NvBufSurface (ctx->g_buff[v4l2_buf.index].start, 0, 0,
+                             ctx->cam_w, ctx->cam_h, pSurf))
+                        ERROR_RETURN("Cannot copy raw buffer to NvBufsurface plane");
+                }
+
+                /*  Convert the camera buffer from YUV422 to YUV420P */
+                if (NvBufSurf::NvTransform(&transform_params, ctx->g_buff[v4l2_buf.index].dmabuff_fd, ctx->render_dmabuf_fd))
+                    ERROR_RETURN("Failed to convert the buffer");
+
+                if (ctx->cam_pixfmt == V4L2_PIX_FMT_GREY) {
+                    if(!nvbuff_do_clearchroma(ctx->render_dmabuf_fd))
+                        ERROR_RETURN("Failed to clear chroma");
+                }
+            }
+            cuda_postprocess(ctx, ctx->render_dmabuf_fd);
+
+            ctx->renderer->render(ctx->render_dmabuf_fd);
+
+            // Enqueue camera buff
+            if (ioctl(ctx->cam_fd, VIDIOC_QBUF, &v4l2_buf))
+                ERROR_RETURN("Failed to queue camera buffers: %s (%d)",
+                             strerror(errno), errno);
+        }
+    }
+
+    // Print profiling information when streaming stops.
+    ctx->renderer->printProfilingStats();
+
+    if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG)
+        delete ctx->jpegdec;
+
+    return true;
+}
+
+static bool
+stop_stream(context_t * ctx)
+{
+    enum v4l2_buf_type type;
+
+    /* Stop v4l2 streaming */
+    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (ioctl(ctx->cam_fd, VIDIOC_STREAMOFF, &type))
+        ERROR_RETURN("Failed to stop streaming: %s (%d)",
+                strerror(errno), errno);
+
+    INFO("Camera video streaming off ...");
+    return true;
+}
+
+int
+main(int argc, char *argv[])
+{
+    context_t ctx;
+    int error = 0;
+
+    set_defaults(&ctx);
+
+    CHECK_ERROR(parse_cmdline(&ctx, argc, argv), cleanup,
+                "Invalid options specified");
+
+    CHECK_ERROR(init_components(&ctx), cleanup,
+                "Failed to initialize v4l2 components");
+
+    if (ctx.cam_pixfmt == V4L2_PIX_FMT_MJPEG) {
+        CHECK_ERROR(prepare_buffers_mjpeg(&ctx), cleanup,
+                "Failed to prepare v4l2 buffs");
+    } else {
+    CHECK_ERROR(prepare_buffers(&ctx), cleanup,
+                "Failed to prepare v4l2 buffs");
+    }
+
+    CHECK_ERROR(start_stream(&ctx), cleanup,
+                "Failed to start streaming");
+
+    CHECK_ERROR(start_capture(&ctx), cleanup,
+            "Failed to start capturing");
+
+    CHECK_ERROR(stop_stream(&ctx), cleanup,
+                "Failed to stop streaming");
+
+    cleanup:
+    if (ctx.cam_fd > 0)
+        close(ctx.cam_fd);
+
+    if (ctx.renderer != NULL)
+        delete ctx.renderer;
+
+    if (ctx.egl_display && !eglTerminate(ctx.egl_display))
+        printf("Failed to terminate EGL display connection\n");
+
+    if (ctx.g_buff != NULL)
+    {
+        for (unsigned i = 0; i < V4L2_BUFFERS_NUM; i++) {
+            if (ctx.g_buff[i].dmabuff_fd)
+                NvBufSurf::NvDestroy(ctx.g_buff[i].dmabuff_fd);
+            if (ctx.cam_pixfmt == V4L2_PIX_FMT_MJPEG)
+                munmap(ctx.g_buff[i].start, ctx.g_buff[i].size);
+        }
+        free(ctx.g_buff);
+    }
+
+    NvBufSurf::NvDestroy(ctx.render_dmabuf_fd);
+
+    if (error)
+        printf("App run failed\n");
+    else
+        printf("App run was successful\n");
+
+    return -error;
+}

+ 104 - 0
test/test-mwsdk-gmsl_camera/samples/cameras_egl_demo/camera_v4l2_cuda.h

@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2016,2017, NVIDIA CORPORATION. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *  * Neither the name of NVIDIA CORPORATION nor the names of its
+ *    contributors may be used to endorse or promote products derived
+ *    from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <queue>
+#include "NvJpegDecoder.h"
+#include "NvBufSurface.h"
+
+#define V4L2_BUFFERS_NUM    4
+
+#define INFO(fmt, ...) \
+    if (ctx->enable_verbose) \
+        printf("INFO: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__);
+
+#define WARN(fmt, ...) \
+        printf("WARN: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__);
+
+#define CHECK_ERROR(cond, label, fmt, ...) \
+    if (!cond) { \
+        error = 1; \
+        printf("ERROR: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__); \
+        goto label; \
+    }
+
+#define ERROR_RETURN(fmt, ...) \
+    do { \
+        printf("ERROR: %s(): (line:%d) " fmt "\n", __FUNCTION__, __LINE__, ##__VA_ARGS__); \
+        return false; \
+    } while(0)
+
+typedef struct
+{
+    // Hold the user accessible pointer
+    unsigned char * start;
+    // Hold the memory length
+    unsigned int size;
+    // Hold the file descriptor of NvBuffer
+    int dmabuff_fd;
+} nv_buffer;
+
+typedef struct
+{
+    // camera v4l2 context
+    const char * cam_devname;
+    char cam_file[16];
+    int cam_fd;
+    unsigned int cam_pixfmt;
+    unsigned int cam_w;
+    unsigned int cam_h;
+    unsigned int frame;
+    unsigned int save_n_frame;
+
+    // Global buffer ptr
+    nv_buffer * g_buff;
+    bool capture_dmabuf;
+
+    // EGL renderer
+    NvEglRenderer *renderer;
+    int render_dmabuf_fd;
+    int fps;
+
+    // CUDA processing
+    bool enable_cuda;
+    EGLDisplay egl_display;
+    EGLImageKHR egl_image;
+
+    // MJPEG decoding
+    NvJPEGDecoder *jpegdec;
+
+    // Verbose option
+    bool enable_verbose;
+
+} context_t;
+
+// Correlate v4l2 pixel format and NvBuffer color format
+typedef struct
+{
+    unsigned int v4l2_pixfmt;
+    NvBufSurfaceColorFormat nvbuff_color;
+} nv_color_fmt;

+ 53 - 0
test/test-mwsdk-gmsl_camera/samples/cameras_opencv_demo/Makefile

@@ -0,0 +1,53 @@
+###############################################################################
+#
+# Copyright (c) 2016-2017, NVIDIA CORPORATION. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above copyright
+#    notice, this list of conditions and the following disclaimer in the
+#    documentation and/or other materials provided with the distribution.
+#  * Neither the name of NVIDIA CORPORATION nor the names of its
+#    contributors may be used to endorse or promote products derived
+#    from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
+# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+###############################################################################
+
+include ../../Rules.mk
+
+EXAMPLE := cameras_opencv_demo
+
+EXAMPLE_SRC := \
+	 cameras_opencv_demo.cpp
+
+EXAMPLE_OBJS := $(EXAMPLE_SRC:.cpp=.o)
+CPPFLAGS += -I "/opt/miivii/include" `pkg-config --cflags opencv4`
+LDFLAGS += -L"/opt/miivii/lib" `pkg-config --libs opencv4`
+
+all: $(EXAMPLE)
+
+%.o: %.cpp
+	$(CPP) $(CPPFLAGS) -c $<
+
+$(EXAMPLE): $(EXAMPLE_OBJS)
+	@echo $(TOP_DIR)/source
+	$(CPP) -o $@ $(EXAMPLE_OBJS) -L"$(TOP_DIR)/source" -lmvgmslcamera $(CPPFLAGS) $(LDFLAGS)
+	@mkdir -p $(TOP_DIR)/bin
+	@cp -rf $(EXAMPLE) $(TOP_DIR)/bin
+clean:
+	$(AT)rm -rf $(OBJS) $(LIB_OBJS) $(EXAMPLE) $(EXAMPLE_OBJS)

+ 179 - 0
test/test-mwsdk-gmsl_camera/samples/cameras_opencv_demo/cameras_opencv_demo.cpp

@@ -0,0 +1,179 @@
+#include <opencv2/core.hpp>
+#include <opencv2/videoio.hpp>
+#include <opencv2/highgui.hpp>
+#include <opencv2/imgproc.hpp>  // cv::Canny()
+#include <iostream>
+#include <csignal>
+#include <fstream>
+#include <zconf.h>
+#include <string>
+#include <chrono>
+
+#include "MvGmslCamera.h"
+
+using namespace cv;
+using std::string;
+using std::cout;
+using std::cerr;
+using std::endl;
+
+
+struct sample_context {
+    string cam_devname;
+    string pix_fmt;
+    unsigned int cam_w{1280};
+    unsigned int cam_h{720};
+    unsigned char camera_no;
+    struct sync_out_a_cfg_client_t stCameraCfgSend{};
+};
+
+static void
+print_usage(void) {
+    printf("\n\tUsage: example [OPTIONS]\n\n"
+           "\tExample: \n"
+           "\t./cameras_opencv_demo -d /dev/video0 -s 1280x720\n\n"
+           "\tSupported options:\n"
+           "\t-d\t\tSet V4l2 video device node\n"
+           "\t-s\t\tSet output resolution of video device\n"
+           "\t-n\t\tSet sync and async camera no. for example: [-n 2-4] the forward one is sync cameras no 2,the after one is async cameras no 4 (8 sync cameras is setted by default.like [-n 8-0])\n"
+           "\t-r\t\tSet sync and async camera freq for example: [-r 30-20] the forward one is sync cameras freq 30,the after one is async cameras freq 20(sync freq 30 is setted by default.like [-r 30-0]) \n"
+           "\t-b\t\tSet which cameras you want to trigger.example: [-b 0x0f-0xf0] the forward one is sync cameras which you want trigger,the after one is async cameras which you want trigger(all 8 cameras is setted sync model by default.like[-b 0xff-0])\n"
+           "\t-p\t\tSet async cameras is triggered at which angle in a circle,not set by default.\n"
+           "\t-h\t\tPrint this usage\n\n"
+           "\tNOTE: It runs infinitely until you terminate it with <ctrl+c>\n");
+}
+
+static bool
+parse_cmdline(struct sample_context *ctx, int argc, char **argv) {
+    int c;
+    int sync_camera_bit_draw = 0, async_camera_bit_draw = 0;
+
+    if (argc < 2) {
+        print_usage();
+        exit(EXIT_SUCCESS);
+    }
+
+    while ((c = getopt(argc, argv, "d:s:r:n:b:f:p:h")) != -1) {
+        switch (c) {
+            case 'd':
+                ctx->cam_devname = optarg;
+                sscanf(ctx->cam_devname.c_str(), "/dev/video%hhu", &ctx->camera_no);
+                break;
+            case 's':
+                if (sscanf(optarg, "%ux%u", &ctx->cam_w, &ctx->cam_h) != 2) {
+                    print_usage();
+                    return false;
+                }
+                break;
+            case 'r':
+                if (sscanf(optarg, "%hhu-%hhu", &ctx->stCameraCfgSend.sync_freq, &ctx->stCameraCfgSend.async_freq) !=
+                    2) {
+                    print_usage();
+                    return false;
+                }
+                printf("sync_freq : %d async_freq:%d\n", ctx->stCameraCfgSend.sync_freq,
+                       ctx->stCameraCfgSend.async_freq);
+                break;
+            case 'n':
+                if (sscanf(optarg, "%hhu-%hhu", &ctx->stCameraCfgSend.sync_camera_num,
+                           &ctx->stCameraCfgSend.async_camera_num) != 2) {
+                    print_usage();
+                    return false;
+                }
+                printf("sync_camera_num : %d async_camera_num:%d\n", (ctx->stCameraCfgSend.sync_camera_num),
+                       (ctx->stCameraCfgSend.async_camera_num));
+                break;
+            case 'b':
+                if (sscanf(optarg, "%x-%x", &sync_camera_bit_draw, &async_camera_bit_draw) != 2) {
+                    print_usage();
+                    return false;
+                }
+                ctx->stCameraCfgSend.sync_camera_bit_draw = (unsigned char) sync_camera_bit_draw;
+                ctx->stCameraCfgSend.async_camera_bit_draw = (unsigned char) async_camera_bit_draw;
+                printf("sync_camera_bit_draw : %d async_camera_bit_draw:%d\n",
+                       ctx->stCameraCfgSend.sync_camera_bit_draw, ctx->stCameraCfgSend.async_camera_bit_draw);
+                break;
+            case 'p':
+                if (sscanf(optarg, "%hhu-%hhu-%hhu-%hhu-%hhu-%hhu-%hhu-%hhu",
+                           &ctx->stCameraCfgSend.async_camera_pos[0], &ctx->stCameraCfgSend.async_camera_pos[1],
+                           &ctx->stCameraCfgSend.async_camera_pos[2],
+                           &ctx->stCameraCfgSend.async_camera_pos[3], &ctx->stCameraCfgSend.async_camera_pos[4],
+                           &ctx->stCameraCfgSend.async_camera_pos[5], &ctx->stCameraCfgSend.async_camera_pos[6],
+                           &ctx->stCameraCfgSend.async_camera_pos[7]) != 8) {
+                    print_usage();
+                    return false;
+                }
+                printf("pos:[0]:%hhu [1]:%hhu [2]:%hhu [3]:%hhu [4]:%hhu [5]:%hhu [6]:%hhu [7]:%hhu \n",
+                       ctx->stCameraCfgSend.async_camera_pos[0], ctx->stCameraCfgSend.async_camera_pos[1],
+                       ctx->stCameraCfgSend.async_camera_pos[2],
+                       ctx->stCameraCfgSend.async_camera_pos[3], ctx->stCameraCfgSend.async_camera_pos[4],
+                       ctx->stCameraCfgSend.async_camera_pos[5], ctx->stCameraCfgSend.async_camera_pos[6],
+                       ctx->stCameraCfgSend.async_camera_pos[7]);
+                break;
+            case 'f':
+                ctx->pix_fmt = optarg;
+                break;
+            case 'h':
+                print_usage();
+                exit(EXIT_SUCCESS);
+                break;
+            default:
+                print_usage();
+                return false;
+        }
+    }
+    return true;
+}
+
+int main(int argc, char *argv[]) {
+    struct sample_context ctx = {};
+    ctx.stCameraCfgSend.async_camera_num = 0;
+    ctx.stCameraCfgSend.async_freq = 0;
+    ctx.stCameraCfgSend.async_camera_bit_draw = 0;
+    ctx.stCameraCfgSend.sync_camera_num = 8;
+    ctx.stCameraCfgSend.sync_freq = 30;
+    ctx.stCameraCfgSend.sync_camera_bit_draw = 0xff;
+    ctx.cam_devname = "/dev/video0";
+    ctx.cam_w = 1280;
+    ctx.cam_h = 720;
+    ctx.pix_fmt = "UYVY";
+
+    if (!parse_cmdline(&ctx, argc, argv)) {
+        return -1;
+    }
+
+    char Gstring[256];
+    sprintf(Gstring, "v4l2src device=%s ! video/x-raw, width=%d, height=%d, format=UYVY ! queue ! appsink",
+            ctx.cam_devname.c_str(), ctx.cam_w, ctx.cam_h);
+    miivii::MvGmslCamera mvcam(ctx.stCameraCfgSend);
+    cout << "Opening camera..." << endl;
+    VideoCapture capture(Gstring, CAP_GSTREAMER);
+    if (!capture.isOpened()) {
+        cerr << "ERROR: Can't initialize camera capture" << endl;
+        return 1;
+    }
+    Mat frame;
+    Mat frameRaw;
+    bool stop = false;
+    uint64_t timestamp = 0;
+    unsigned int colorConvert;
+    if (ctx.pix_fmt == "YUYV") {
+        colorConvert = COLOR_YUV2BGR_YUYV;
+    } else {
+        colorConvert = COLOR_YUV2BGR_UYVY;
+    }
+
+    while (!stop) {
+        capture.read(frameRaw);
+        /*必须要先获取到图像再去获取时间戳*/
+        mvcam.GetGmslTimeStamp(ctx.camera_no, timestamp);
+        cvtColor(frameRaw, frame, colorConvert);
+        namedWindow("Video" + std::to_string(ctx.camera_no), 0);
+        resizeWindow("Video" + std::to_string(ctx.camera_no), ctx.cam_w, ctx.cam_h);
+        imshow("Video" + std::to_string(ctx.camera_no), frame);
+        std::cout << "Timestamp from MIIVII  :  " << timestamp << std::endl;
+        if (waitKey(1) == 27) {
+            stop = true;
+        }
+    }
+}

+ 53 - 0
test/test-mwsdk-gmsl_camera/samples/cameras_sdk_demo/Makefile

@@ -0,0 +1,53 @@
+###############################################################################
+#
+# Copyright (c) 2016-2017, NVIDIA CORPORATION. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above copyright
+#    notice, this list of conditions and the following disclaimer in the
+#    documentation and/or other materials provided with the distribution.
+#  * Neither the name of NVIDIA CORPORATION nor the names of its
+#    contributors may be used to endorse or promote products derived
+#    from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
+# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+###############################################################################
+
+include ../../Rules.mk
+
+EXAMPLE := cameras_sdk_demo
+
+EXAMPLE_SRC := \
+	 cameras_sdk_demo.cpp
+
+EXAMPLE_OBJS := $(EXAMPLE_SRC:.cpp=.o)
+CPPFLAGS += -I "/opt/miivii/include" `pkg-config --cflags opencv4`
+LDFLAGS += -L"/opt/miivii/lib" `pkg-config --libs opencv4`
+
+all: $(EXAMPLE)
+
+%.o: %.cpp
+	$(CPP) $(CPPFLAGS) -c $<
+
+$(EXAMPLE): $(EXAMPLE_OBJS)
+	@echo $(TOP_DIR)/source
+	$(CPP) -o $@ $(EXAMPLE_OBJS) -L"$(TOP_DIR)/source" -lmvgmslcamera $(CPPFLAGS) $(LDFLAGS)
+	@mkdir -p $(TOP_DIR)/bin
+	@cp -rf $(EXAMPLE) $(TOP_DIR)/bin
+clean:
+	$(AT)rm -rf $(OBJS) $(LIB_OBJS) $(EXAMPLE) $(EXAMPLE_OBJS)

+ 294 - 0
test/test-mwsdk-gmsl_camera/samples/cameras_sdk_demo/cameras_sdk_demo.cpp

@@ -0,0 +1,294 @@
+//
+// Created by alex on 18-12-8.
+//
+//
+#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <string>
+#include <iostream>
+#include <zconf.h>
+#include <csignal>
+#include <thread>
+#include "MvGmslCamera.h"
+#include <fstream>
+#include <chrono>
+
+using namespace std::chrono;
+
+using std::string;
+sig_atomic_t exitRequested = 0;
+uint camera_num = 1;
+
+struct sync_out_a_cfg_client_t stCameraCfgSend = {};
+
+char dev_node[32] = "/dev/video0";/*不输入-d参数时,默认开启的第一个的设备*/
+std::string camera_fmt_str = "UYVY";/*默认采集的图像格式*/
+std::string output_fmt_str = "BGRA32";/*默认输出的图像格式,支持类型见README*/
+uint cam_w = 1280;/*不输入-s参数时。默认采集摄像头的分辨率*/
+uint cam_h = 720;/*不输入-s参数时。默认采集摄像头的分辨率*/
+uint out_w = 640;/*屏幕输出的视频分辨率*/
+uint out_h = 360;/*屏幕输出的视频分辨率*/
+uint64_t timestampbefore[8] = {0};/*上一次采集图像时采集时间*/
+uint64_t LinuxGetFrameTimeBefore[8] = {0};/*上一次采集图像时的系统时间*/
+std::string g_camera_dev = "NONE";/*默认名字*/
+
+void handler(int) {
+    std::cout << "will exit..." << std::endl;
+    exitRequested = true;
+}
+/*图像采集时的时间戳记录,将时间戳间隔与帧数不相符的相关时间信息打印到/tmp/cameras_sdk_demo.log文件中,每个设备生成单独log文件*/
+void CheckTimeStampLog(uint64_t timestamp,uint8_t camera_no)
+{
+    uint64_t FrameInterval = 0;
+    char buffer[256] = {0};
+    uint64_t LinuxFrameInterval{};
+    struct timeval cur_time;
+    uint64_t LinuxGetFrameTime{};
+    uint64_t time_interval{};
+    uint64_t FrameTransferDelay{};
+    FILE * file_diff = NULL;
+    char file_name[100] = {0};
+    if(0 == timestamp)
+    {
+        /*camera Data is not available during camera preparation*/
+        return;
+    }
+    gettimeofday(&cur_time, NULL);
+    LinuxGetFrameTime = cur_time.tv_sec * 1000000000 + cur_time.tv_usec * 1000;
+    FrameInterval = timestamp - timestampbefore[camera_no];
+    LinuxFrameInterval = LinuxGetFrameTime - LinuxGetFrameTimeBefore[camera_no];
+    LinuxGetFrameTimeBefore[camera_no] = LinuxGetFrameTime;
+    FrameTransferDelay = LinuxGetFrameTime - timestamp;
+    if(stCameraCfgSend.sync_freq != 0)
+    time_interval = 1000000000 / stCameraCfgSend.sync_freq;
+    else{
+        time_interval = 1000000000 / stCameraCfgSend.async_freq;
+    }
+    if(timestampbefore[camera_no] == 0) { //first frame
+        FrameInterval =  time_interval;
+        LinuxFrameInterval = time_interval;
+    }
+    timestampbefore[camera_no] = timestamp;
+/*    if((FrameInterval > (time_interval + 15000000) || FrameInterval < (time_interval - 15000000)) 
+        || (LinuxFrameInterval > (time_interval + 15000000) || LinuxFrameInterval < (time_interval - 15000000)) 
+        || FrameTransferDelay < 70000000 || FrameTransferDelay > 90000000)
+    {
+        printf("camera_no==========%d\n",camera_no);
+        printf("timestamp==========%ld\n",timestamp/1000000);
+        printf("FrameInterva===-------------=======%ld\n",FrameInterval/1000000);
+        printf("LinuxGetFrameTime======------------====%ld\n",LinuxGetFrameTime/1000000);
+        printf("LinuxFrameInterval======---------------====%ld\n",LinuxFrameInterval/1000000);
+        printf("FrameTransferDelay======-----------------------------------====%ld\n",FrameTransferDelay/1000000);
+    }*/
+    if(((FrameInterval > (time_interval + 12000000) || FrameInterval < (time_interval - 12000000))) || (FrameInterval == 0))
+    {
+        sprintf(file_name,"/tmp/cameras_sdk_demo_video%d.log",camera_no);
+        file_diff = fopen(file_name,"a+");
+        sprintf(buffer,"Timestamp : %ld FrameInterval  :  %ld FrameTransferDelay : %ld LinuxGetFrameTime : %ld LinuxFrameInterval : %ld\n"
+                        ,timestamp,FrameInterval,FrameTransferDelay,LinuxGetFrameTime,LinuxFrameInterval);
+        fwrite(buffer,sizeof(char),strlen(buffer),file_diff);
+        fflush(file_diff);
+        fclose(file_diff);
+    }
+    if(atoi(getenv("CHECK_TIME")))
+    {
+        printf("Timestamp : %ld FrameInterval : %ld FrameTransferDelay : %ld   LinuxGetFrameTime : %ld LinuxFrameInterval : %ld\n"
+                        ,timestamp,FrameInterval,FrameTransferDelay,LinuxGetFrameTime,LinuxFrameInterval);
+    }
+}
+/*demo程序命令相关参数介绍,详细解释见README*/
+static void
+print_usage(void) {
+    printf("\n\tUsage: example [OPTIONS]\n\n"
+           "\tExample: \n"
+           "\t./cameras_sdk_demo -d /dev/video0 -s 1280x720\n\n"
+           "\tSupported options:\n"
+           "\t-d\t\tSet V4l2 video device node\n"
+           "\t-m\t\tSet V4l2 video num\n"
+           "\t-s\t\tSet output resolution of video device\n"
+           "\t-n\t\tSet sync and async camera no. for example: [-n 2-4] the forward one is sync cameras no 2,the after one is async cameras no 4 (8 sync cameras is setted by default.like [-n 8-0])\n"
+           "\t-r\t\tSet sync and async camera freq for example: [-r 30-20] the forward one is sync cameras freq 30,the after one is async cameras freq 20(sync freq 30 is setted by default.like [-r 30-0]) \n"
+           "\t-b\t\tSet which cameras you want to trigger.example: [-b 0x0f-0xf0] the forward one is sync cameras which you want trigger,the after one is async cameras which you want trigger(all 8 cameras is setted sync model by default.like[-b 0xff-0])\n"
+           "\t-p\t\tSet async cameras is triggered at which angle in a circle,not set by default.\n"
+           "\t-c\t\tEnter the name of the camera that requires frame counting .example:isx031\n"
+           "\t-h\t\tPrint this usage\n\n"
+           "\tNOTE: It runs infinitely until you terminate it with <ctrl+c>\n");
+}
+/*demo程序命令相关参数设置接口,详细解释见README*/
+static bool
+parse_cmdline(int argc, char **argv) {
+    int c;
+    unsigned int tmp_w;
+    unsigned int tmp_h;
+
+    int sync_camera_bit_draw = 0,async_camera_bit_draw = 0;
+
+    if (argc < 2) {
+        print_usage();
+        exit(EXIT_SUCCESS);
+    }
+
+    while ((c = getopt(argc, argv, "d:s:r:n:b:f:p:m:c:h")) != -1) {
+        switch (c) {
+            case 'd':
+                strcpy(dev_node, optarg);
+                break;
+            case 's':
+                if (sscanf(optarg, "%dx%d",
+                           &tmp_w, &tmp_h) != 2) {
+                    return false;
+                }
+                cam_w = tmp_w;
+                cam_h = tmp_h;
+                break;
+                case 'f':
+                camera_fmt_str = optarg;
+                break;
+            case 'm':
+                camera_num = strtol(optarg, NULL, 10);
+                break;
+            case 'r':
+                if (sscanf(optarg, "%hhu-%hhu",&stCameraCfgSend.sync_freq, &stCameraCfgSend.async_freq) != 2) {
+                   print_usage();
+                    return false;
+                }
+                printf("sync_freq : %d async_freq:%d\n",stCameraCfgSend.sync_freq,stCameraCfgSend.async_freq);
+                break;
+            case 'n':
+                if (sscanf(optarg, "%hhu-%hhu",&stCameraCfgSend.sync_camera_num, &stCameraCfgSend.async_camera_num) != 2) {
+                   print_usage();
+                    return false;
+                }
+                           printf("sync_camera_num : %d async_camera_num:%d\n",(stCameraCfgSend.sync_camera_num),(stCameraCfgSend.async_camera_num));
+                break;
+            case 'b':
+                if (sscanf(optarg, "%x-%x",&sync_camera_bit_draw, &async_camera_bit_draw) != 2) {
+                   print_usage();
+                   return false;
+                }
+                   stCameraCfgSend.sync_camera_bit_draw = (unsigned char)sync_camera_bit_draw;
+                   stCameraCfgSend.async_camera_bit_draw = (unsigned char)async_camera_bit_draw;
+                   printf("sync_camera_bit_draw : %d async_camera_bit_draw:%d\n",stCameraCfgSend.sync_camera_bit_draw,stCameraCfgSend.async_camera_bit_draw);
+                break;
+           case 'p':
+               if (sscanf(optarg, "%hhu-%hhu-%hhu-%hhu-%hhu-%hhu-%hhu-%hhu",
+                          &stCameraCfgSend.async_camera_pos[0], &stCameraCfgSend.async_camera_pos[1],&stCameraCfgSend.async_camera_pos[2],
+                          &stCameraCfgSend.async_camera_pos[3],&stCameraCfgSend.async_camera_pos[4],&stCameraCfgSend.async_camera_pos[5]
+                          ,&stCameraCfgSend.async_camera_pos[6],&stCameraCfgSend.async_camera_pos[7]) != 8) {
+                   print_usage();
+                   return false;
+               }
+                  printf("pos:[0]:%hhu [1]:%hhu [2]:%hhu [3]:%hhu [4]:%hhu [5]:%hhu [6]:%hhu [7]:%hhu \n",stCameraCfgSend.async_camera_pos[0],stCameraCfgSend.async_camera_pos[1],stCameraCfgSend.async_camera_pos[2],
+                  stCameraCfgSend.async_camera_pos[3],stCameraCfgSend.async_camera_pos[4],stCameraCfgSend.async_camera_pos[5],stCameraCfgSend.async_camera_pos[6],stCameraCfgSend.async_camera_pos[7]);
+               break;
+            case 'c':
+                g_camera_dev = optarg;
+                break;
+            case 'h':
+                print_usage();
+                exit(EXIT_SUCCESS);
+                break;
+            default:
+                print_usage();
+                return false;
+        }
+    }
+    return true;
+}
+/*demo程序主函数,分别打开n个窗口,并通过反复分别调用GetImageCvMat和GetImagePtr接口获取图像和时间戳在窗口中显示*/
+int main(int argc, char *argv[]) {
+
+    camera_context_t ctx[8] = {};
+
+    stCameraCfgSend.async_camera_num = 0;
+    stCameraCfgSend.async_freq = 0;
+    stCameraCfgSend.async_camera_bit_draw = 0;
+    stCameraCfgSend.sync_camera_num = 8;
+    stCameraCfgSend.sync_freq = 30;
+    stCameraCfgSend.sync_camera_bit_draw = 0xff;
+
+    if (!parse_cmdline(argc, argv)) {
+        return -1;
+    }
+    char dev_node_tmp = dev_node[10];
+    for(int i = 0; i < camera_num; i++){
+        dev_node[10] = dev_node_tmp + i;
+        ctx[i].dev_node = dev_node;
+        ctx[i].camera_fmt_str = camera_fmt_str;
+        ctx[i].output_fmt_str = output_fmt_str;
+        ctx[i].cam_w = cam_w;
+        ctx[i].cam_h = cam_h;
+        ctx[i].out_w = out_w;
+        ctx[i].out_h = out_h;
+    }
+    miivii::MvGmslCamera mvcam(ctx, camera_num, stCameraCfgSend);
+
+    std::string windowName("DisplayCamera ");
+    for (uint32_t i = 0; i < camera_num; i++) {
+        cv::namedWindow(windowName + std::to_string(i), cv::WindowFlags::WINDOW_AUTOSIZE);
+        cv::moveWindow(windowName + std::to_string(i), 200 * i, 200 * i);
+    }
+    cv::Mat outMat[camera_num];
+    uint8_t *outbuf[camera_num];
+    cv::Mat imgbuf[camera_num];
+    signal(SIGINT, &handler);
+    bool quit = false;
+    uint64_t timestamp;
+    while (!quit) {
+        if (exitRequested) {
+            quit = true;
+            break;
+        }
+        uint8_t camera_no = dev_node[10] - 0x30;
+        /*use cv data to get image*/
+        if (mvcam.GetImageCvMat(outMat, timestamp, camera_no, g_camera_dev)) {
+            for (uint32_t i = 0; i < camera_num; i++) {
+                if ( ctx[i].output_fmt_str == "UYVY") {
+                    cv::cvtColor(outMat[i], outMat[i], cv::COLOR_YUV2BGR_UYVY);
+                    cv::imshow(windowName + std::to_string(i), outMat[i]);
+                } else if (ctx[i].output_fmt_str == "ABGR32") {
+                    cv::cvtColor(outMat[i], outMat[i], cv::COLOR_RGBA2BGR);
+                    cv::imshow(windowName + std::to_string(i), outMat[i]);
+                } else if (ctx[i].output_fmt_str == "BGRA32") {
+                    cv::cvtColor(outMat[i], outMat[i], cv::COLOR_BGRA2BGR);
+                    cv::imshow(windowName + std::to_string(i), outMat[i]);
+                }
+		std::cout << "cv_" <<std::endl;
+                CheckTimeStampLog(timestamp,dev_node_tmp - 0x30 + i);
+            }
+        } else {
+            std::cerr << "Can't get image form camera." << std::endl;
+        }
+        if (cv::waitKey(1) == 27) {// Wait for 'esc' key press to exit
+            break;
+        }
+        /*use raw data to get image*/
+        if (mvcam.GetImagePtr(outbuf, timestamp, camera_no, g_camera_dev)) {
+            for (uint32_t i = 0; i < camera_num; i++) {
+                if ( ctx[i].output_fmt_str == "UYVY") {
+                    imgbuf[i] = cv::Mat(out_h, out_w, CV_8UC2, outbuf[i]);
+                    cv::Mat mrgba(out_h, out_w, CV_8UC3);
+                    cv::cvtColor(imgbuf[i], mrgba, cv::COLOR_YUV2BGR_UYVY);
+                    cv::imshow(windowName + std::to_string(i), mrgba);
+                } else if (ctx[i].output_fmt_str == "ABGR32") {
+                    imgbuf[i] = cv::Mat(out_h, out_w , CV_8UC4, outbuf[i]);
+                    cv::cvtColor(imgbuf[i], imgbuf[i], cv::COLOR_RGBA2BGR);
+                    cv::imshow(windowName + std::to_string(i), imgbuf[i]);
+                } else if (ctx[i].output_fmt_str == "BGRA32") {
+                    imgbuf[i] = cv::Mat(out_h, out_w , CV_8UC4, outbuf[i]);
+                    cv::cvtColor(imgbuf[i], imgbuf[i], cv::COLOR_BGRA2BGR);
+                    cv::imshow(windowName + std::to_string(i), imgbuf[i]);
+                }
+		std::cout << "mvcam" << std::endl;
+		CheckTimeStampLog(timestamp,dev_node_tmp - 0x30 + i);
+            }
+        } else {
+            std::cerr << "Can't get image form camera." << std::endl;
+        }
+        if (cv::waitKey(1) == 27) {// Wait for 'esc' key press to exit
+            break;
+        }
+    }
+    return 0;
+}

+ 54 - 0
test/test-mwsdk-gmsl_camera/samples/cameras_sdk_noopencv_demo/Makefile

@@ -0,0 +1,54 @@
+###############################################################################
+#
+# Copyright (c) 2016-2017, NVIDIA CORPORATION. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above copyright
+#    notice, this list of conditions and the following disclaimer in the
+#    documentation and/or other materials provided with the distribution.
+#  * Neither the name of NVIDIA CORPORATION nor the names of its
+#    contributors may be used to endorse or promote products derived
+#    from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
+# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+###############################################################################
+
+include ../../Rules.mk
+
+EXAMPLE := cameras_sdk_noopencv_demo
+
+EXAMPLE_SRC := \
+	 cameras_sdk_noopencv_demo.cpp
+
+EXAMPLE_OBJS := $(EXAMPLE_SRC:.cpp=.o)
+
+CPPFLAGS += -I "/opt/miivii/include"  -D MIIVII_NO_OPENCV
+LDFLAGS += -L /opt/miivii/lib
+
+all: $(EXAMPLE)
+
+%.o: %.cpp
+	$(CPP) $(CPPFLAGS) -c $<
+
+$(EXAMPLE): $(EXAMPLE_OBJS)
+	@echo $(TOP_DIR)/sourcie
+	$(CPP) -o $@ $(EXAMPLE_OBJS) -L "$(TOP_DIR)/source" -lmvgmslcamera_noopencv $(CPPFLAGS) $(LDFLAGS)
+	@mkdir -p $(TOP_DIR)/bin
+	@cp -rf $(EXAMPLE) $(TOP_DIR)/bin
+clean:
+	$(AT)rm -rf $(OBJS) $(LIB_OBJS) $(EXAMPLE) $(EXAMPLE_OBJS)

+ 251 - 0
test/test-mwsdk-gmsl_camera/samples/cameras_sdk_noopencv_demo/cameras_sdk_noopencv_demo.cpp

@@ -0,0 +1,251 @@
+//
+// Created by alex on 18-12-8.
+//
+//
+#include <string.h>
+#include <iostream>
+#include <zconf.h>
+#include <csignal>
+#include <thread>
+#include "MvGmslCamera.h"
+#include <fstream>
+#include <chrono>
+
+using namespace std::chrono;
+
+using std::string;
+sig_atomic_t exitRequested = 0;
+uint camera_num = 1;
+
+struct sync_out_a_cfg_client_t stCameraCfgSend = {};
+
+char dev_node[32] = "/dev/video0";/*不输入-d参数时,默认开启的第一个的设备*/
+std::string camera_fmt_str = "UYVY";/*默认采集的图像格式*/
+std::string output_fmt_str = "ABGR32";/*默认输出的图像格式,支持类型见README*/
+uint cam_w = 1280;/*不输入-s参数时。默认采集摄像头的分辨率*/
+uint cam_h = 720;/*不输入-s参数时。默认采集摄像头的分辨率*/
+uint out_w = 640;/*屏幕输出的视频分辨率*/
+uint out_h = 360;/*屏幕输出的视频分辨率*/
+uint64_t timestampbefore[8] = {0};/*上一次采集图像时采集时间*/
+uint64_t LinuxGetFrameTimeBefore[8] = {0};/*上一次采集图像时的系统时间*/
+std::string g_camera_dev = "NONE";/*默认camera 名字*/
+
+void handler(int) {
+    std::cout << "will exit..." << std::endl;
+    exitRequested = true;
+}
+
+/*图像采集时的时间戳记录,将时间戳间隔与帧数不相符的相关时间信息打印到/tmp/cameras_sdk_demo.log文件中,每个设备生成单独log文件*/
+void CheckTimeStampLog(uint64_t timestamp, uint8_t camera_no) {
+    uint64_t FrameInterval = 0;
+    char buffer[256] = {0};
+    uint64_t LinuxFrameInterval{};
+    struct timeval cur_time;
+    uint64_t LinuxGetFrameTime{};
+    uint64_t time_interval{};
+    uint64_t FrameTransferDelay{};
+    FILE *file_diff = NULL;
+    char file_name[100] = {0};
+    if (0 == timestamp) {
+        /*camera Data is not available during camera preparation*/
+        return;
+    }
+    gettimeofday(&cur_time, NULL);
+    LinuxGetFrameTime = cur_time.tv_sec * 1000000000 + cur_time.tv_usec * 1000;
+    FrameInterval = timestamp - timestampbefore[camera_no];
+    LinuxFrameInterval = LinuxGetFrameTime - LinuxGetFrameTimeBefore[camera_no];
+    LinuxGetFrameTimeBefore[camera_no] = LinuxGetFrameTime;
+    FrameTransferDelay = LinuxGetFrameTime - timestamp;
+    if (stCameraCfgSend.sync_freq != 0)
+        time_interval = 1000000000 / stCameraCfgSend.sync_freq;
+    else {
+        time_interval = 1000000000 / stCameraCfgSend.async_freq;
+    }
+    if (timestampbefore[camera_no] == 0) { //first frame
+        FrameInterval = time_interval;
+        LinuxFrameInterval = time_interval;
+    }
+    timestampbefore[camera_no] = timestamp;
+/*    if((FrameInterval > (time_interval + 5000000) || FrameInterval < (time_interval - 5000000)))
+    {
+        printf("camera_no==========%d\n",camera_no);
+        printf("timestamp==========%ld\n",timestamp/1000000);
+        printf("FrameInterva===-------------=======%ld\n",FrameInterval/1000000);
+        printf("LinuxGetFrameTime======------------------====%ld\n",LinuxGetFrameTime/1000000);
+        printf("LinuxFrameInterval======-----------------------------------====%ld\n",LinuxFrameInterval/1000000);
+    }*/
+    if (((FrameInterval > (time_interval + 5000000) || FrameInterval < (time_interval - 5000000)) ||
+         FrameInterval == 0)) {
+        sprintf(file_name, "/tmp/cameras_sdk_demo_video%d.log", camera_no);
+        file_diff = fopen(file_name, "a+");
+        sprintf(buffer,
+                "Timestamp : %ld FrameInterval  :  %ld FrameTransferDelay : %ld LinuxGetFrameTime : %ld LinuxFrameInterval : %ld\n",
+                timestamp, FrameInterval, FrameTransferDelay, LinuxGetFrameTime, LinuxFrameInterval);
+        fwrite(buffer, sizeof(char), strlen(buffer), file_diff);
+        fflush(file_diff);
+        fclose(file_diff);
+    }
+    if (atoi(getenv("CHECK_TIME"))) {
+        printf("Timestamp : %ld FrameInterval : %ld FrameTransferDelay : %ld   LinuxGetFrameTime : %ld LinuxFrameInterval : %ld\n",
+               timestamp, FrameInterval, FrameTransferDelay, LinuxGetFrameTime, LinuxFrameInterval);
+    }
+}
+
+/*demo程序命令相关参数介绍,详细解释见README*/
+static void print_usage(void) {
+    printf("\n\tUsage: example [OPTIONS]\n\n"
+           "\tExample: \n"
+           "\t./cameras_sdk_demo -d /dev/video0 -s 1280x720\n\n"
+           "\tSupported options:\n"
+           "\t-d\t\tSet V4l2 video device node\n"
+           "\t-m\t\tSet V4l2 video num\n"
+           "\t-s\t\tSet output resolution of video device\n"
+           "\t-n\t\tSet sync and async camera no. for example: [-n 2-4] the forward one is sync cameras no 2,the after one is async cameras no 4 (8 sync cameras is setted by default.like [-n 8-0])\n"
+           "\t-r\t\tSet sync and async camera freq for example: [-r 30-20] the forward one is sync cameras freq 30,the after one is async cameras freq 20(sync freq 30 is setted by default.like [-r 30-0]) \n"
+           "\t-b\t\tSet which cameras you want to trigger.example: [-b 0x0f-0xf0] the forward one is sync cameras which you want trigger,the after one is async cameras which you want trigger(all 8 cameras is setted sync model by default.like[-b 0xff-0])\n"
+           "\t-p\t\tSet async cameras is triggered at which angle in a circle,not set by default.\n"
+           "\t-c\t\tEnter the name of the camera that requires frame counting .example:isx031\n"
+           "\t-h\t\tPrint this usage\n\n"
+           "\tNOTE: It runs infinitely until you terminate it with <ctrl+c>\n");
+}
+
+/*demo程序命令相关参数设置接口,详细解释见README*/
+static bool parse_cmdline(int argc, char **argv) {
+    int c;
+    unsigned int tmp_w;
+    unsigned int tmp_h;
+
+    int sync_camera_bit_draw = 0, async_camera_bit_draw = 0;
+
+    if (argc < 2) {
+        print_usage();
+        exit(EXIT_SUCCESS);
+    }
+
+    while ((c = getopt(argc, argv, "d:s:r:n:b:f:p:m:c:h")) != -1) {
+        switch (c) {
+            case 'd':
+                strcpy(dev_node, optarg);
+                break;
+            case 's':
+                if (sscanf(optarg, "%dx%d",
+                           &tmp_w, &tmp_h) != 2) {
+                    return false;
+                }
+                cam_w = tmp_w;
+                cam_h = tmp_h;
+                break;
+            case 'f':
+                camera_fmt_str = optarg;
+                break;
+            case 'm':
+                camera_num = strtol(optarg, NULL, 10);
+                break;
+            case 'r':
+                if (sscanf(optarg, "%hhu-%hhu", &stCameraCfgSend.sync_freq, &stCameraCfgSend.async_freq) != 2) {
+                    print_usage();
+                    return false;
+                }
+                printf("sync_freq : %d async_freq:%d\n", stCameraCfgSend.sync_freq, stCameraCfgSend.async_freq);
+                break;
+            case 'n':
+                if (sscanf(optarg, "%hhu-%hhu", &stCameraCfgSend.sync_camera_num, &stCameraCfgSend.async_camera_num) !=
+                    2) {
+                    print_usage();
+                    return false;
+                }
+                printf("sync_camera_num : %d async_camera_num:%d\n", (stCameraCfgSend.sync_camera_num),
+                       (stCameraCfgSend.async_camera_num));
+                break;
+            case 'b':
+                if (sscanf(optarg, "%x-%x", &sync_camera_bit_draw, &async_camera_bit_draw) != 2) {
+                    print_usage();
+                    return false;
+                }
+                stCameraCfgSend.sync_camera_bit_draw = (unsigned char) sync_camera_bit_draw;
+                stCameraCfgSend.async_camera_bit_draw = (unsigned char) async_camera_bit_draw;
+                printf("sync_camera_bit_draw : %d async_camera_bit_draw:%d\n", stCameraCfgSend.sync_camera_bit_draw,
+                       stCameraCfgSend.async_camera_bit_draw);
+                break;
+            case 'p':
+                if (sscanf(optarg, "%hhu-%hhu-%hhu-%hhu-%hhu-%hhu-%hhu-%hhu",
+                           &stCameraCfgSend.async_camera_pos[0], &stCameraCfgSend.async_camera_pos[1],
+                           &stCameraCfgSend.async_camera_pos[2],
+                           &stCameraCfgSend.async_camera_pos[3], &stCameraCfgSend.async_camera_pos[4],
+                           &stCameraCfgSend.async_camera_pos[5], &stCameraCfgSend.async_camera_pos[6],
+                           &stCameraCfgSend.async_camera_pos[7]) != 8) {
+                    print_usage();
+                    return false;
+                }
+                printf("pos:[0]:%hhu [1]:%hhu [2]:%hhu [3]:%hhu [4]:%hhu [5]:%hhu [6]:%hhu [7]:%hhu \n",
+                       stCameraCfgSend.async_camera_pos[0], stCameraCfgSend.async_camera_pos[1],
+                       stCameraCfgSend.async_camera_pos[2],
+                       stCameraCfgSend.async_camera_pos[3], stCameraCfgSend.async_camera_pos[4],
+                       stCameraCfgSend.async_camera_pos[5], stCameraCfgSend.async_camera_pos[6],
+                       stCameraCfgSend.async_camera_pos[7]);
+                break;
+            case 'c':
+                g_camera_dev = optarg;
+                break;
+            case 'h':
+                print_usage();
+                exit(EXIT_SUCCESS);
+                break;
+            default:
+                print_usage();
+                return false;
+        }
+    }
+    return true;
+}
+
+/*demo程序主函数,分别打开n个窗口,并通过反复分别调用GetImageCvMat和GetImagePtr接口获取图像和时间戳在窗口中显示*/
+int main(int argc, char *argv[]) {
+
+    camera_context_t ctx[8] = {};
+
+    stCameraCfgSend.async_camera_num = 0;
+    stCameraCfgSend.async_freq = 0;
+    stCameraCfgSend.async_camera_bit_draw = 0;
+    stCameraCfgSend.sync_camera_num = 8;
+    stCameraCfgSend.sync_freq = 30;
+    stCameraCfgSend.sync_camera_bit_draw = 0xff;
+
+    if (!parse_cmdline(argc, argv)) {
+        return -1;
+    }
+    char dev_node_tmp = dev_node[10];
+    for (int i = 0; i < camera_num; i++) {
+        dev_node[10] = dev_node_tmp + i;
+        ctx[i].dev_node = dev_node;
+        ctx[i].camera_fmt_str = camera_fmt_str;
+        ctx[i].output_fmt_str = output_fmt_str;
+        ctx[i].cam_w = cam_w;
+        ctx[i].cam_h = cam_h;
+        ctx[i].out_w = out_w;
+        ctx[i].out_h = out_h;
+    }
+    miivii::MvGmslCamera mvcam(ctx, camera_num, stCameraCfgSend);
+
+    std::string windowName("DisplayCamera ");
+    uint8_t *outbuf[camera_num];
+    signal(SIGINT, &handler);
+    bool quit = false;
+    uint64_t timestamp;
+    while (!quit) {
+        if (exitRequested) {
+            quit = true;
+            break;
+        }
+        uint8_t camera_no = dev_node[10] - 0x30;
+        /*use raw data to get image*/
+        if (mvcam.GetImagePtr(outbuf, timestamp, camera_no, g_camera_dev)) {
+            for (uint32_t i = 0; i < camera_num; i++) {
+                CheckTimeStampLog(timestamp, dev_node_tmp - 0x30 + i);
+            }
+        } else {
+            std::cerr << "Can't get image form camera." << std::endl;
+        }
+    }
+    return 0;
+}