jetson_v4l2_capturer.cpp 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614
  1. #include "sora/hwenc_jetson/jetson_v4l2_capturer.h"
  2. // C
  3. #include <stdio.h>
  4. #include <string.h>
  5. #include <time.h>
  6. // C++
  7. #include <new>
  8. #include <string>
  9. // Linux
  10. #include <errno.h>
  11. #include <fcntl.h>
  12. #include <linux/videodev2.h>
  13. #include <sys/ioctl.h>
  14. #include <sys/mman.h>
  15. #include <sys/select.h>
  16. #include <unistd.h>
  17. // WebRTC
  18. #include <api/scoped_refptr.h>
  19. #include <api/video/i420_buffer.h>
  20. #include <media/base/video_common.h>
  21. #include <modules/video_capture/video_capture.h>
  22. #include <modules/video_capture/video_capture_factory.h>
  23. #include <rtc_base/logging.h>
  24. #include <rtc_base/ref_counted_object.h>
  25. #include <third_party/libyuv/include/libyuv.h>
  26. // L4T Multimedia API
  27. #include <NvBufSurface.h>
  28. #include "sora/hwenc_jetson/jetson_buffer.h"
  29. #define MJPEG_EOS_SEARCH_SIZE 4096
  30. namespace sora {
  31. rtc::scoped_refptr<JetsonV4L2Capturer> JetsonV4L2Capturer::Create(
  32. const V4L2VideoCapturerConfig& config) {
  33. rtc::scoped_refptr<JetsonV4L2Capturer> capturer;
  34. std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> device_info(
  35. webrtc::VideoCaptureFactory::CreateDeviceInfo());
  36. if (!device_info) {
  37. RTC_LOG(LS_ERROR) << "Failed to CreateDeviceInfo";
  38. return nullptr;
  39. }
  40. LogDeviceList(device_info.get());
  41. for (int i = 0; i < device_info->NumberOfDevices(); ++i) {
  42. capturer = Create(device_info.get(), config, i);
  43. if (capturer) {
  44. RTC_LOG(LS_INFO) << "Get Capture";
  45. return capturer;
  46. }
  47. }
  48. RTC_LOG(LS_ERROR) << "Failed to create JetsonV4L2Capturer";
  49. return nullptr;
  50. }
  51. void JetsonV4L2Capturer::LogDeviceList(
  52. webrtc::VideoCaptureModule::DeviceInfo* device_info) {
  53. for (int i = 0; i < device_info->NumberOfDevices(); ++i) {
  54. char device_name[256];
  55. char unique_name[256];
  56. if (device_info->GetDeviceName(static_cast<uint32_t>(i), device_name,
  57. sizeof(device_name), unique_name,
  58. sizeof(unique_name)) != 0) {
  59. RTC_LOG(LS_WARNING) << "Failed to GetDeviceName(" << i << ")";
  60. continue;
  61. }
  62. RTC_LOG(LS_INFO) << "GetDeviceName(" << i
  63. << "): device_name=" << device_name
  64. << ", unique_name=" << unique_name;
  65. }
  66. }
  67. rtc::scoped_refptr<JetsonV4L2Capturer> JetsonV4L2Capturer::Create(
  68. webrtc::VideoCaptureModule::DeviceInfo* device_info,
  69. const V4L2VideoCapturerConfig& config,
  70. size_t capture_device_index) {
  71. char device_name[256];
  72. char unique_name[256];
  73. if (device_info->GetDeviceName(static_cast<uint32_t>(capture_device_index),
  74. device_name, sizeof(device_name), unique_name,
  75. sizeof(unique_name)) != 0) {
  76. RTC_LOG(LS_WARNING) << "Failed to GetDeviceName";
  77. return nullptr;
  78. }
  79. rtc::scoped_refptr<JetsonV4L2Capturer> v4l2_capturer =
  80. rtc::make_ref_counted<JetsonV4L2Capturer>(config);
  81. if (v4l2_capturer->Init((const char*)&unique_name, config.video_device) < 0) {
  82. RTC_LOG(LS_WARNING) << "Failed to create JetsonV4L2Capturer(" << unique_name
  83. << ")";
  84. return nullptr;
  85. }
  86. if (v4l2_capturer->StartCapture(config) < 0) {
  87. RTC_LOG(LS_WARNING) << "Failed to start JetsonV4L2Capturer(w = "
  88. << config.width << ", h = " << config.height
  89. << ", fps = " << config.framerate << ")";
  90. return nullptr;
  91. }
  92. return v4l2_capturer;
  93. }
  94. JetsonV4L2Capturer::JetsonV4L2Capturer(const V4L2VideoCapturerConfig& config)
  95. : ScalableVideoTrackSource(config),
  96. _deviceFd(-1),
  97. _buffersAllocatedByDevice(-1),
  98. _currentWidth(-1),
  99. _currentHeight(-1),
  100. _currentFrameRate(-1),
  101. _captureStarted(false),
  102. _captureVideoType(webrtc::VideoType::kI420),
  103. _pool(NULL) {}
  104. bool JetsonV4L2Capturer::FindDevice(const char* deviceUniqueIdUTF8,
  105. const std::string& device) {
  106. int fd;
  107. if ((fd = open(device.c_str(), O_RDONLY)) != -1) {
  108. // query device capabilities
  109. struct v4l2_capability cap;
  110. if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) {
  111. if (cap.bus_info[0] != 0) {
  112. if (strncmp((const char*)cap.bus_info, (const char*)deviceUniqueIdUTF8,
  113. strlen((const char*)deviceUniqueIdUTF8)) ==
  114. 0) // match with device id
  115. {
  116. close(fd);
  117. return true;
  118. }
  119. }
  120. }
  121. close(fd); // close since this is not the matching device
  122. }
  123. return false;
  124. }
  125. int32_t JetsonV4L2Capturer::Init(const char* deviceUniqueIdUTF8,
  126. const std::string& specifiedVideoDevice) {
  127. int fd;
  128. bool found = false;
  129. if (!specifiedVideoDevice.empty()) {
  130. // specifiedVideoDevice が指定されてる場合はそれだけ調べる
  131. if (FindDevice(deviceUniqueIdUTF8, specifiedVideoDevice)) {
  132. found = true;
  133. _videoDevice = specifiedVideoDevice;
  134. }
  135. } else {
  136. // specifiedVideoDevice が指定されてない場合は頑張って探す
  137. /* detect /dev/video [0-63] entries */
  138. char device[32];
  139. int n;
  140. for (n = 0; n < 64; n++) {
  141. sprintf(device, "/dev/video%d", n);
  142. if (FindDevice(deviceUniqueIdUTF8, device)) {
  143. found = true;
  144. _videoDevice = device; // store the video device
  145. break;
  146. }
  147. }
  148. }
  149. if (!found) {
  150. RTC_LOG(LS_INFO) << "no matching device found";
  151. return -1;
  152. }
  153. return 0;
  154. }
  155. JetsonV4L2Capturer::~JetsonV4L2Capturer() {
  156. StopCapture();
  157. if (_deviceFd != -1)
  158. close(_deviceFd);
  159. }
  160. int32_t JetsonV4L2Capturer::StartCapture(
  161. const V4L2VideoCapturerConfig& config) {
  162. if (_captureStarted) {
  163. if (config.width == _currentWidth && config.height == _currentHeight) {
  164. return 0;
  165. } else {
  166. StopCapture();
  167. }
  168. }
  169. webrtc::MutexLock lock(&capture_lock_);
  170. // first open /dev/video device
  171. if ((_deviceFd = open(_videoDevice.c_str(), O_RDWR | O_NONBLOCK, 0)) < 0) {
  172. RTC_LOG(LS_INFO) << "error in opening " << _videoDevice
  173. << " errono = " << errno;
  174. return -1;
  175. }
  176. // Supported video formats in preferred order.
  177. // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
  178. // I420 otherwise.
  179. const int nFormats = 6;
  180. unsigned int fmts[nFormats] = {};
  181. if (config.use_native) {
  182. fmts[0] = V4L2_PIX_FMT_MJPEG;
  183. fmts[1] = V4L2_PIX_FMT_JPEG;
  184. } else if (!config.force_i420 &&
  185. (config.width > 640 || config.height > 480)) {
  186. fmts[0] = V4L2_PIX_FMT_MJPEG;
  187. fmts[1] = V4L2_PIX_FMT_YUV420;
  188. fmts[2] = V4L2_PIX_FMT_YVU420;
  189. fmts[3] = V4L2_PIX_FMT_YUYV;
  190. fmts[4] = V4L2_PIX_FMT_UYVY;
  191. fmts[5] = V4L2_PIX_FMT_JPEG;
  192. } else {
  193. fmts[0] = V4L2_PIX_FMT_YUV420;
  194. fmts[1] = V4L2_PIX_FMT_YVU420;
  195. fmts[2] = V4L2_PIX_FMT_YUYV;
  196. fmts[3] = V4L2_PIX_FMT_UYVY;
  197. fmts[4] = V4L2_PIX_FMT_MJPEG;
  198. fmts[5] = V4L2_PIX_FMT_JPEG;
  199. }
  200. // Enumerate image formats.
  201. struct v4l2_fmtdesc fmt;
  202. int fmtsIdx = nFormats;
  203. memset(&fmt, 0, sizeof(fmt));
  204. fmt.index = 0;
  205. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  206. RTC_LOG(LS_INFO) << "Video Capture enumerats supported image formats:";
  207. while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
  208. RTC_LOG(LS_INFO) << " { pixelformat = "
  209. << cricket::GetFourccName(fmt.pixelformat)
  210. << ", description = '" << fmt.description << "' }";
  211. // Match the preferred order.
  212. for (int i = 0; i < nFormats; i++) {
  213. if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
  214. fmtsIdx = i;
  215. }
  216. // Keep enumerating.
  217. fmt.index++;
  218. }
  219. if (fmtsIdx == nFormats) {
  220. RTC_LOG(LS_INFO) << "no supporting video formats found";
  221. return -1;
  222. } else {
  223. RTC_LOG(LS_INFO) << "We prefer format "
  224. << cricket::GetFourccName(fmts[fmtsIdx]);
  225. }
  226. struct v4l2_format video_fmt;
  227. memset(&video_fmt, 0, sizeof(struct v4l2_format));
  228. video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  229. video_fmt.fmt.pix.sizeimage = 0;
  230. video_fmt.fmt.pix.width = config.width;
  231. video_fmt.fmt.pix.height = config.height;
  232. video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
  233. // set format and frame size now
  234. if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) {
  235. RTC_LOG(LS_INFO) << "error in VIDIOC_S_FMT, errno = " << errno;
  236. return -1;
  237. }
  238. // initialize current width and height
  239. _currentWidth = video_fmt.fmt.pix.width;
  240. _currentHeight = video_fmt.fmt.pix.height;
  241. _currentPixelFormat = video_fmt.fmt.pix.pixelformat;
  242. if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
  243. _captureVideoType = webrtc::VideoType::kYUY2;
  244. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
  245. _captureVideoType = webrtc::VideoType::kI420;
  246. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420)
  247. _captureVideoType = webrtc::VideoType::kYV12;
  248. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
  249. _captureVideoType = webrtc::VideoType::kUYVY;
  250. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
  251. video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
  252. _captureVideoType = webrtc::VideoType::kMJPEG;
  253. // Trying to set frame rate, before check driver capability.
  254. bool driver_framerate_support = true;
  255. struct v4l2_streamparm streamparms;
  256. memset(&streamparms, 0, sizeof(streamparms));
  257. streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  258. if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
  259. RTC_LOG(LS_INFO) << "error in VIDIOC_G_PARM errno = " << errno;
  260. driver_framerate_support = false;
  261. // continue
  262. } else {
  263. // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
  264. if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
  265. // driver supports the feature. Set required framerate.
  266. memset(&streamparms, 0, sizeof(streamparms));
  267. streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  268. streamparms.parm.capture.timeperframe.numerator = 1;
  269. streamparms.parm.capture.timeperframe.denominator = config.framerate;
  270. if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
  271. RTC_LOG(LS_INFO) << "Failed to set the framerate. errno=" << errno;
  272. driver_framerate_support = false;
  273. } else {
  274. _currentFrameRate = config.framerate;
  275. }
  276. }
  277. }
  278. // If driver doesn't support framerate control, need to hardcode.
  279. // Hardcoding the value based on the frame size.
  280. if (!driver_framerate_support) {
  281. if (!config.use_native && _currentWidth >= 800 &&
  282. _captureVideoType != webrtc::VideoType::kMJPEG) {
  283. _currentFrameRate = 15;
  284. } else {
  285. _currentFrameRate = 30;
  286. }
  287. }
  288. if (!AllocateVideoBuffers()) {
  289. RTC_LOG(LS_INFO) << "failed to allocate video capture buffers";
  290. return -1;
  291. }
  292. // start capture thread;
  293. if (_captureThread.empty()) {
  294. quit_ = false;
  295. _captureThread = rtc::PlatformThread::SpawnJoinable(
  296. std::bind(JetsonV4L2Capturer::CaptureThread, this), "CaptureThread",
  297. rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh));
  298. }
  299. // Needed to start UVC camera - from the uvcview application
  300. enum v4l2_buf_type type;
  301. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  302. if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) {
  303. RTC_LOG(LS_INFO) << "Failed to turn on stream";
  304. return -1;
  305. }
  306. _captureStarted = true;
  307. return 0;
  308. }
  309. int32_t JetsonV4L2Capturer::StopCapture() {
  310. if (!_captureThread.empty()) {
  311. {
  312. webrtc::MutexLock lock(&capture_lock_);
  313. quit_ = true;
  314. }
  315. _captureThread.Finalize();
  316. }
  317. webrtc::MutexLock lock(&capture_lock_);
  318. if (_captureStarted) {
  319. _captureStarted = false;
  320. DeAllocateVideoBuffers();
  321. close(_deviceFd);
  322. _deviceFd = -1;
  323. }
  324. return 0;
  325. }
  326. // critical section protected by the caller
  327. bool JetsonV4L2Capturer::AllocateVideoBuffers() {
  328. struct v4l2_requestbuffers rbuffer;
  329. memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
  330. rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  331. rbuffer.memory = V4L2_MEMORY_MMAP;
  332. rbuffer.count = kNoOfV4L2Bufffers;
  333. if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) {
  334. RTC_LOG(LS_INFO) << "Could not get buffers from device. errno = " << errno;
  335. return false;
  336. }
  337. if (rbuffer.count > kNoOfV4L2Bufffers)
  338. rbuffer.count = kNoOfV4L2Bufffers;
  339. _buffersAllocatedByDevice = rbuffer.count;
  340. std::unique_ptr<int[]> fds;
  341. if (_captureVideoType != webrtc::VideoType::kMJPEG) {
  342. fds.reset(new int[rbuffer.count]);
  343. NvBufSurf::NvCommonAllocateParams params = {0};
  344. params.memType = NVBUF_MEM_SURFACE_ARRAY;
  345. params.width = _currentWidth;
  346. params.height = _currentHeight;
  347. params.layout = NVBUF_LAYOUT_PITCH;
  348. if (_captureVideoType == webrtc::VideoType::kYUY2)
  349. params.colorFormat = NVBUF_COLOR_FORMAT_YUYV;
  350. else if (_captureVideoType == webrtc::VideoType::kI420)
  351. params.colorFormat = NVBUF_COLOR_FORMAT_YUV420;
  352. else if (_captureVideoType == webrtc::VideoType::kYV12)
  353. params.colorFormat = NVBUF_COLOR_FORMAT_YVU420;
  354. else if (_captureVideoType == webrtc::VideoType::kUYVY)
  355. params.colorFormat = NVBUF_COLOR_FORMAT_UYVY;
  356. params.memtag = NvBufSurfaceTag_CAMERA;
  357. if (NvBufSurf::NvAllocate(&params, rbuffer.count, fds.get())) {
  358. return false;
  359. }
  360. }
  361. // Map the buffers
  362. _pool = new Buffer[rbuffer.count];
  363. for (unsigned int i = 0; i < rbuffer.count; i++) {
  364. struct v4l2_buffer buffer;
  365. memset(&buffer, 0, sizeof(v4l2_buffer));
  366. if (fds == nullptr) {
  367. buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  368. buffer.memory = V4L2_MEMORY_MMAP;
  369. buffer.index = i;
  370. if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) {
  371. return false;
  372. }
  373. _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
  374. MAP_SHARED, _deviceFd, buffer.m.offset);
  375. if (MAP_FAILED == _pool[i].start) {
  376. for (unsigned int j = 0; j < i; j++)
  377. munmap(_pool[j].start, _pool[j].length);
  378. return false;
  379. }
  380. _pool[i].length = buffer.length;
  381. } else {
  382. buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  383. buffer.memory = V4L2_MEMORY_DMABUF;
  384. buffer.index = i;
  385. if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) {
  386. return false;
  387. }
  388. _pool[i].fd = fds[i];
  389. _pool[i].length = buffer.length;
  390. }
  391. if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0) {
  392. return false;
  393. }
  394. }
  395. if (_captureVideoType == webrtc::VideoType::kMJPEG) {
  396. jpeg_decoder_pool_.reset(new JetsonJpegDecoderPool());
  397. }
  398. return true;
  399. }
  400. bool JetsonV4L2Capturer::DeAllocateVideoBuffers() {
  401. if (_captureVideoType == webrtc::VideoType::kMJPEG) {
  402. jpeg_decoder_pool_ = nullptr;
  403. // unmap buffers
  404. for (int i = 0; i < _buffersAllocatedByDevice; i++) {
  405. munmap(_pool[i].start, _pool[i].length);
  406. }
  407. delete[] _pool;
  408. } else {
  409. for (int i = 0; i < _buffersAllocatedByDevice; i++) {
  410. NvBufSurf::NvDestroy(_pool[i].fd);
  411. }
  412. delete[] _pool;
  413. }
  414. // turn off stream
  415. enum v4l2_buf_type type;
  416. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  417. if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) {
  418. RTC_LOG(LS_INFO) << "VIDIOC_STREAMOFF error. errno: " << errno;
  419. }
  420. return true;
  421. }
  422. void JetsonV4L2Capturer::CaptureThread(void* obj) {
  423. JetsonV4L2Capturer* capturer = static_cast<JetsonV4L2Capturer*>(obj);
  424. while (capturer->CaptureProcess()) {
  425. }
  426. }
  427. bool JetsonV4L2Capturer::CaptureProcess() {
  428. int retVal = 0;
  429. fd_set rSet;
  430. struct timeval timeout;
  431. FD_ZERO(&rSet);
  432. FD_SET(_deviceFd, &rSet);
  433. timeout.tv_sec = 1;
  434. timeout.tv_usec = 0;
  435. // _deviceFd written only in StartCapture, when this thread isn't running.
  436. retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout);
  437. {
  438. webrtc::MutexLock lock(&capture_lock_);
  439. if (quit_) {
  440. return false;
  441. } else if (retVal < 0 && errno != EINTR /* continue if interrupted */) {
  442. // select failed
  443. return false;
  444. } else if (retVal == 0) {
  445. // select timed out
  446. return true;
  447. } else if (!FD_ISSET(_deviceFd, &rSet)) {
  448. // not event on camera handle
  449. return true;
  450. }
  451. if (_captureStarted) {
  452. struct v4l2_buffer buf;
  453. memset(&buf, 0, sizeof(struct v4l2_buffer));
  454. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  455. if (_captureVideoType == webrtc::VideoType::kMJPEG) {
  456. buf.memory = V4L2_MEMORY_MMAP;
  457. } else {
  458. buf.memory = V4L2_MEMORY_DMABUF;
  459. }
  460. // dequeue a buffer - repeat until dequeued properly!
  461. while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) {
  462. if (errno != EINTR) {
  463. RTC_LOG(LS_INFO) << "could not sync on a buffer on device "
  464. << strerror(errno);
  465. return true;
  466. }
  467. }
  468. OnCaptured(&buf);
  469. // enqueue the buffer again
  470. if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) {
  471. RTC_LOG(LS_INFO) << __FUNCTION__ << " Failed to enqueue capture buffer";
  472. }
  473. }
  474. }
  475. usleep(0);
  476. return true;
  477. }
  478. void JetsonV4L2Capturer::OnCaptured(v4l2_buffer* buf) {
  479. const int64_t timestamp_us = rtc::TimeMicros();
  480. int adapted_width, adapted_height, crop_width, crop_height, crop_x, crop_y;
  481. if (!AdaptFrame(_currentWidth, _currentHeight, timestamp_us, &adapted_width,
  482. &adapted_height, &crop_width, &crop_height, &crop_x,
  483. &crop_y)) {
  484. return;
  485. }
  486. if (_captureVideoType == webrtc::VideoType::kMJPEG) {
  487. uint8_t* data = (uint8_t*)_pool[buf->index].start;
  488. uint32_t bytesused = buf->bytesused;
  489. // 一部のカメラ (DELL WB7022) は不正なデータを送ってくることがある。
  490. // これをハードウェアJPEGデコーダーに送ると Momo ごとクラッシュしてしまう。
  491. // JPEG の先頭は SOI マーカー 0xffd8 で始まるのでチェックして落ちないようにする。
  492. if (bytesused < 2 || data[0] != 0xff || data[1] != 0xd8) {
  493. RTC_LOG(LS_WARNING) << __FUNCTION__
  494. << " Invalid JPEG buffer frame skipped";
  495. return;
  496. }
  497. unsigned int eosSearchSize = MJPEG_EOS_SEARCH_SIZE;
  498. uint8_t* p;
  499. /* v4l2_buf.bytesused may have padding bytes for alignment
  500. Search for EOF to get exact size */
  501. if (eosSearchSize > bytesused)
  502. eosSearchSize = bytesused;
  503. for (unsigned int i = 0; i < eosSearchSize; i++) {
  504. p = data + bytesused;
  505. if ((*(p - 2) == 0xff) && (*(p - 1) == 0xd9)) {
  506. break;
  507. }
  508. bytesused--;
  509. }
  510. std::shared_ptr<JetsonJpegDecoder> decoder = jpeg_decoder_pool_->Pop();
  511. int fd = 0;
  512. uint32_t width, height, pixfmt;
  513. if (decoder->DecodeToFd(fd, data, bytesused, pixfmt, width, height) < 0) {
  514. RTC_LOG(LS_ERROR) << "decodeToFd Failed";
  515. return;
  516. }
  517. rtc::scoped_refptr<JetsonBuffer> jetson_buffer(
  518. JetsonBuffer::Create(_captureVideoType, width, height, adapted_width,
  519. adapted_height, fd, pixfmt, std::move(decoder)));
  520. OnFrame(webrtc::VideoFrame::Builder()
  521. .set_video_frame_buffer(jetson_buffer)
  522. .set_timestamp_rtp(0)
  523. .set_timestamp_ms(rtc::TimeMillis())
  524. .set_timestamp_us(rtc::TimeMicros())
  525. .set_rotation(webrtc::kVideoRotation_0)
  526. .build());
  527. } else {
  528. rtc::scoped_refptr<JetsonBuffer> jetson_buffer(JetsonBuffer::Create(
  529. _captureVideoType, _currentWidth, _currentHeight, adapted_width,
  530. adapted_height, _pool[buf->index].fd, _currentPixelFormat, nullptr));
  531. OnFrame(webrtc::VideoFrame::Builder()
  532. .set_video_frame_buffer(jetson_buffer)
  533. .set_timestamp_rtp(0)
  534. .set_timestamp_ms(rtc::TimeMillis())
  535. .set_timestamp_us(rtc::TimeMicros())
  536. .set_rotation(webrtc::kVideoRotation_0)
  537. .build());
  538. }
  539. }
  540. } // namespace sora