jetson_buffer.cpp 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243
  1. #include "jetson_buffer.h"
  2. // Linux
  3. #include <sys/ioctl.h>
  4. // WebRTC
  5. #include <api/video/i420_buffer.h>
  6. #include <rtc_base/logging.h>
  7. #include <third_party/libyuv/include/libyuv.h>
  8. // Jetson Linux Multimedia API
  9. #include <nvbufsurface.h>
  10. #include <nvbufsurftransform.h>
  11. namespace sora {
  12. static const int kBufferAlignment = 64;
  13. rtc::scoped_refptr<JetsonBuffer> JetsonBuffer::Create(
  14. webrtc::VideoType video_type,
  15. int raw_width,
  16. int raw_height,
  17. int scaled_width,
  18. int scaled_height,
  19. int fd,
  20. uint32_t pixfmt,
  21. std::shared_ptr<JetsonJpegDecoder> decoder) {
  22. return rtc::make_ref_counted<JetsonBuffer>(video_type, raw_width, raw_height,
  23. scaled_width, scaled_height, fd,
  24. pixfmt, decoder);
  25. }
  26. rtc::scoped_refptr<JetsonBuffer> JetsonBuffer::Create(
  27. webrtc::VideoType video_type,
  28. int raw_width,
  29. int raw_height,
  30. int scaled_width,
  31. int scaled_height) {
  32. return rtc::make_ref_counted<JetsonBuffer>(video_type, raw_width, raw_height,
  33. scaled_width, scaled_height);
  34. }
  35. webrtc::VideoFrameBuffer::Type JetsonBuffer::type() const {
  36. return Type::kNative;
  37. }
  38. webrtc::VideoType JetsonBuffer::VideoType() const {
  39. return video_type_;
  40. }
  41. int JetsonBuffer::width() const {
  42. return scaled_width_;
  43. }
  44. int JetsonBuffer::height() const {
  45. return scaled_height_;
  46. }
  47. rtc::scoped_refptr<webrtc::I420BufferInterface> JetsonBuffer::ToI420() {
  48. if (video_type_ == webrtc::VideoType::kMJPEG) {
  49. rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer =
  50. webrtc::I420Buffer::Create(scaled_width_, scaled_height_);
  51. int32_t buffer_width = ((scaled_width_ + 15) / 16) * 16;
  52. int32_t buffer_height = ((scaled_height_ + 15) / 16) * 16;
  53. NvBufSurfaceAllocateParams input_params = {0};
  54. input_params.params.width = buffer_width;
  55. input_params.params.height = buffer_height;
  56. input_params.params.layout = NVBUF_LAYOUT_PITCH;
  57. input_params.params.colorFormat = NVBUF_COLOR_FORMAT_YUV420;
  58. input_params.params.memType = NVBUF_MEM_SURFACE_ARRAY;
  59. input_params.memtag = NvBufSurfaceTag_NONE;
  60. NvBufSurface* dst_surf = 0;
  61. if (NvBufSurfaceAllocate(
  62. &dst_surf,
  63. 1, /* NvUtils では複数のバッファーを同時に初期化できるため、バッファーの数を指定する */
  64. &input_params) == -1) {
  65. RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufSurfaceAllocate";
  66. return scaled_buffer;
  67. }
  68. NvBufSurfaceParams params = dst_surf->surfaceList[0];
  69. NvBufSurfTransformRect src_rect, dest_rect;
  70. src_rect.top = 0;
  71. src_rect.left = 0;
  72. src_rect.width = params.width;
  73. src_rect.height = params.height;
  74. dest_rect.top = 0;
  75. dest_rect.left = 0;
  76. dest_rect.width = buffer_width;
  77. dest_rect.height = buffer_height;
  78. NvBufSurfTransformParams trans_params;
  79. memset(&trans_params, 0, sizeof(trans_params));
  80. trans_params.transform_flag = NVBUFSURF_TRANSFORM_FILTER;
  81. trans_params.transform_flip = NvBufSurfTransform_None;
  82. trans_params.transform_filter = NvBufSurfTransformInter_Algo3;
  83. trans_params.src_rect = &src_rect;
  84. trans_params.dst_rect = &dest_rect;
  85. NvBufSurface* src_surf = 0;
  86. if (NvBufSurfaceFromFd(fd_, (void**)(&src_surf)) == -1) {
  87. RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufSurfaceFromFd";
  88. return scaled_buffer;
  89. }
  90. if (NvBufSurfTransform(src_surf, dst_surf, &trans_params) !=
  91. NvBufSurfTransformError_Success) {
  92. RTC_LOG(LS_ERROR) << __FUNCTION__ << " Failed to NvBufSurfTransform";
  93. return scaled_buffer;
  94. }
  95. int ret;
  96. void* data_addr;
  97. uint8_t* dest_addr;
  98. int num_planes = dst_surf->surfaceList->planeParams.num_planes;
  99. int index = 0;
  100. for (int plane = 0; plane < num_planes; plane++) {
  101. ret = NvBufSurfaceMap(dst_surf, index, plane, NVBUF_MAP_READ);
  102. if (ret == 0) {
  103. NvBufSurfaceSyncForCpu(dst_surf, index, plane);
  104. data_addr = dst_surf->surfaceList->mappedAddr.addr[plane];
  105. int height, width;
  106. if (plane == 0) {
  107. dest_addr = scaled_buffer.get()->MutableDataY();
  108. width = scaled_width_;
  109. height = scaled_height_;
  110. } else if (plane == 1) {
  111. dest_addr = scaled_buffer.get()->MutableDataU();
  112. width = (scaled_width_ + 1) >> 1;
  113. height = (scaled_height_ + 1) >> 1;
  114. } else if (plane == 2) {
  115. dest_addr = scaled_buffer.get()->MutableDataV();
  116. width = (scaled_width_ + 1) >> 1;
  117. height = (scaled_height_ + 1) >> 1;
  118. }
  119. for (int i = 0; i < height; i++) {
  120. memcpy(dest_addr + width * i,
  121. (uint8_t*)data_addr +
  122. dst_surf->surfaceList->planeParams.pitch[plane] * i,
  123. width);
  124. }
  125. }
  126. NvBufSurfaceUnMap(dst_surf, index, plane);
  127. if (ret == -1) {
  128. RTC_LOG(LS_ERROR) << __FUNCTION__
  129. << " Failed to NvBufSurfaceMap plane=" << plane;
  130. return scaled_buffer;
  131. }
  132. }
  133. NvBufSurfaceDestroy(dst_surf);
  134. return scaled_buffer;
  135. } else {
  136. rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer =
  137. webrtc::I420Buffer::Create(raw_width_, raw_height_);
  138. const int conversionResult = libyuv::ConvertToI420(
  139. data_.get(), length_, i420_buffer.get()->MutableDataY(),
  140. i420_buffer.get()->StrideY(), i420_buffer.get()->MutableDataU(),
  141. i420_buffer.get()->StrideU(), i420_buffer.get()->MutableDataV(),
  142. i420_buffer.get()->StrideV(), 0, 0, raw_width_, raw_height_, raw_width_,
  143. raw_height_, libyuv::kRotate0, ConvertVideoType(video_type_));
  144. if (raw_width_ == scaled_width_ && raw_height_ == scaled_height_) {
  145. return i420_buffer;
  146. }
  147. rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer =
  148. webrtc::I420Buffer::Create(scaled_width_, scaled_height_);
  149. scaled_buffer->ScaleFrom(*i420_buffer->ToI420());
  150. return scaled_buffer;
  151. }
  152. }
  153. int JetsonBuffer::RawWidth() const {
  154. return raw_width_;
  155. }
  156. int JetsonBuffer::RawHeight() const {
  157. return raw_height_;
  158. }
  159. int JetsonBuffer::DecodedFd() const {
  160. return fd_;
  161. }
  162. uint32_t JetsonBuffer::V4L2PixelFormat() const {
  163. return pixfmt_;
  164. }
  165. std::shared_ptr<JetsonJpegDecoder> JetsonBuffer::JpegDecoder() const {
  166. return decoder_;
  167. }
  168. uint8_t* JetsonBuffer::Data() const {
  169. return data_.get();
  170. }
  171. void JetsonBuffer::SetLength(size_t length) {
  172. length_ = length;
  173. }
  174. size_t JetsonBuffer::Length() const {
  175. return length_;
  176. }
  177. JetsonBuffer::JetsonBuffer(webrtc::VideoType video_type,
  178. int raw_width,
  179. int raw_height,
  180. int scaled_width,
  181. int scaled_height,
  182. int fd,
  183. uint32_t pixfmt,
  184. std::shared_ptr<JetsonJpegDecoder> decoder)
  185. : video_type_(video_type),
  186. raw_width_(raw_width),
  187. raw_height_(raw_height),
  188. scaled_width_(scaled_width),
  189. scaled_height_(scaled_height),
  190. fd_(fd),
  191. pixfmt_(pixfmt),
  192. decoder_(decoder),
  193. data_(nullptr) {}
  194. JetsonBuffer::JetsonBuffer(webrtc::VideoType video_type,
  195. int raw_width,
  196. int raw_height,
  197. int scaled_width,
  198. int scaled_height)
  199. : video_type_(video_type),
  200. raw_width_(raw_width),
  201. raw_height_(raw_height),
  202. scaled_width_(scaled_width),
  203. scaled_height_(scaled_height),
  204. fd_(-1),
  205. pixfmt_(0),
  206. decoder_(nullptr),
  207. data_(static_cast<uint8_t*>(webrtc::AlignedMalloc(
  208. webrtc::CalcBufferSize(video_type, raw_width, raw_height),
  209. kBufferAlignment))) {}
  210. } // namespace sora