NvDrmRenderer.cpp 33 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180
  1. /*
  2. * Copyright (c) 2016-2023, NVIDIA CORPORATION. All rights reserved.
  3. * NVIDIA CORPORATION and its licensors retain all intellectual property
  4. * and proprietary rights in and to this software, related documentation
  5. * and any modifications thereto. Any use, reproduction, disclosure or
  6. * distribution of this software and related documentation without an express
  7. * license agreement from NVIDIA CORPORATION is strictly prohibited.
  8. */
  9. #include "NvDrmRenderer.h"
  10. #include "NvLogging.h"
  11. #include "nvbufsurface.h"
  12. #include <sys/time.h>
  13. #include <sys/poll.h>
  14. #include <unistd.h>
  15. #include <string.h>
  16. #include <xf86drm.h>
  17. #include <xf86drmMode.h>
  18. #include <drm_fourcc.h>
  19. #include <fcntl.h>
  20. #include "tegra_drm.h"
  21. #include <sys/mman.h>
  22. #ifndef DOWNSTREAM_TEGRA_DRM
  23. #include "tegra_drm_nvdc.h"
  24. #endif
  25. using namespace std;
  26. #define CAT_NAME "DrmRenderer"
  27. #define DRM_DEVICE_NAME "drm-nvdc"
  28. #define ZERO_FD 0x0
  29. struct NvBufDrmParams
  30. {
  31. uint32_t num_planes;
  32. uint32_t pitch[4];
  33. uint32_t offset[4];
  34. uint32_t pixel_format;
  35. };
  36. struct NvBOFormat {
  37. uint32_t drm_format;
  38. int num_buffers;
  39. struct {
  40. int w; // width divisor from overall fb_width (luma size)
  41. int h; // height divisor from overall fb_height (luma size)
  42. int bpp;
  43. } buffers[3];
  44. };
  45. const NvBOFormat NvBOFormats[] = {
  46. // drm fourcc type #buffers w1 h1 bpp1 w2 h2 bpp2 w3 h3 bpp3
  47. {DRM_FORMAT_RGB332, 1, {{1, 1, 8}, {0, 0, 0}, {0, 0, 0}}},
  48. {DRM_FORMAT_BGR233, 1, {{1, 1, 8}, {0, 0, 0}, {0, 0, 0}}},
  49. {DRM_FORMAT_XRGB4444, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  50. {DRM_FORMAT_ARGB4444, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  51. {DRM_FORMAT_XBGR4444, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  52. {DRM_FORMAT_ABGR4444, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  53. {DRM_FORMAT_RGBX4444, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  54. {DRM_FORMAT_RGBA4444, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  55. {DRM_FORMAT_BGRX4444, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  56. {DRM_FORMAT_BGRA4444, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  57. {DRM_FORMAT_XRGB1555, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  58. {DRM_FORMAT_ARGB1555, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  59. {DRM_FORMAT_XBGR1555, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  60. {DRM_FORMAT_ABGR1555, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  61. {DRM_FORMAT_RGBX5551, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  62. {DRM_FORMAT_RGBA5551, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  63. {DRM_FORMAT_BGRX5551, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  64. {DRM_FORMAT_BGRA5551, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  65. {DRM_FORMAT_RGB565, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  66. {DRM_FORMAT_BGR565, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  67. {DRM_FORMAT_RGB888, 1, {{1, 1, 24}, {0, 0, 0}, {0, 0, 0}}},
  68. {DRM_FORMAT_BGR888, 1, {{1, 1, 24}, {0, 0, 0}, {0, 0, 0}}},
  69. {DRM_FORMAT_XRGB8888, 1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  70. {DRM_FORMAT_ARGB8888, 1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  71. {DRM_FORMAT_XBGR8888, 1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  72. {DRM_FORMAT_ABGR8888, 1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  73. {DRM_FORMAT_RGBX8888, 1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  74. {DRM_FORMAT_RGBA8888, 1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  75. {DRM_FORMAT_BGRX8888, 1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  76. {DRM_FORMAT_BGRA8888, 1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  77. {DRM_FORMAT_ARGB2101010,1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  78. {DRM_FORMAT_ABGR2101010,1, {{1, 1, 32}, {0, 0, 0}, {0, 0, 0}}},
  79. {DRM_FORMAT_YUYV, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  80. {DRM_FORMAT_YVYU, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  81. {DRM_FORMAT_UYVY, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  82. {DRM_FORMAT_VYUY, 1, {{1, 1, 16}, {0, 0, 0}, {0, 0, 0}}},
  83. {DRM_FORMAT_NV12, 2, {{1, 1, 8}, {2, 2, 16}, {0, 0, 0}}},
  84. {DRM_FORMAT_NV21, 2, {{1, 1, 8}, {2, 2, 16}, {0, 0, 0}}},
  85. {DRM_FORMAT_NV16, 2, {{1, 1, 8}, {2, 1, 16}, {0, 0, 0}}},
  86. {DRM_FORMAT_NV61, 2, {{1, 1, 8}, {2, 1, 16}, {0, 0, 0}}},
  87. {DRM_FORMAT_YUV410, 3, {{1, 1, 8}, {4, 4, 8}, {4, 4, 8}}},
  88. {DRM_FORMAT_YVU410, 3, {{1, 1, 8}, {4, 4, 8}, {4, 4, 8}}},
  89. {DRM_FORMAT_YUV411, 3, {{1, 1, 8}, {4, 1, 8}, {4, 1, 8}}},
  90. {DRM_FORMAT_YVU411, 3, {{1, 1, 8}, {4, 1, 8}, {4, 1, 8}}},
  91. {DRM_FORMAT_YUV420, 3, {{1, 1, 8}, {2, 2, 8}, {2, 2, 8}}},
  92. {DRM_FORMAT_YVU420, 3, {{1, 1, 8}, {2, 2, 8}, {2, 2, 8}}},
  93. {DRM_FORMAT_YUV422, 3, {{1, 1, 8}, {2, 1, 8}, {2, 1, 8}}},
  94. {DRM_FORMAT_YVU422, 3, {{1, 1, 8}, {2, 1, 8}, {2, 1, 8}}},
  95. {DRM_FORMAT_YUV444, 3, {{1, 1, 8}, {1, 1, 8}, {1, 1, 8}}},
  96. {DRM_FORMAT_YVU444, 3, {{1, 1, 8}, {1, 1, 8}, {1, 1, 8}}},
  97. };
  98. static int NvBufGetDrmParams(NvBufSurface *nvbuf_surface, NvBufDrmParams *dParams)
  99. {
  100. unsigned int i;
  101. if (nvbuf_surface == NULL || dParams == NULL)
  102. goto error;
  103. memset(dParams, 0 , sizeof(NvBufDrmParams));
  104. dParams->num_planes = nvbuf_surface->surfaceList[0].planeParams.num_planes;
  105. for (i = 0; i < nvbuf_surface->surfaceList[0].planeParams.num_planes; i++) {
  106. dParams->pitch[i] = nvbuf_surface->surfaceList[0].planeParams.pitch[i];
  107. dParams->offset[i] = nvbuf_surface->surfaceList[0].planeParams.offset[i];
  108. }
  109. switch (nvbuf_surface->surfaceList[0].colorFormat) {
  110. case NVBUF_COLOR_FORMAT_YUV420:
  111. dParams->pixel_format = DRM_FORMAT_YUV420;
  112. break;
  113. case NVBUF_COLOR_FORMAT_YVU420:
  114. dParams->pixel_format = DRM_FORMAT_YVU420;
  115. break;
  116. case NVBUF_COLOR_FORMAT_NV12:
  117. dParams->pixel_format = DRM_FORMAT_NV12;
  118. break;
  119. case NVBUF_COLOR_FORMAT_NV21:
  120. dParams->pixel_format = DRM_FORMAT_NV21;
  121. break;
  122. case NVBUF_COLOR_FORMAT_UYVY:
  123. dParams->pixel_format = DRM_FORMAT_UYVY;
  124. break;
  125. case NVBUF_COLOR_FORMAT_NV12_10LE_2020:
  126. dParams->pixel_format = DRM_FORMAT_TEGRA_P010_2020;
  127. break;
  128. case NVBUF_COLOR_FORMAT_NV12_10LE_709:
  129. dParams->pixel_format = DRM_FORMAT_TEGRA_P010_709;
  130. break;
  131. case NVBUF_COLOR_FORMAT_NV12_10LE:
  132. dParams->pixel_format = DRM_FORMAT_P010;
  133. break;
  134. case NVBUF_COLOR_FORMAT_INVALID:
  135. default:
  136. goto error;
  137. }
  138. return 0;
  139. error:
  140. ERROR_MSG("Error in transforming buffer information ");
  141. return -1;
  142. }
  143. static int get_format_info(uint32_t drm_format, NvBOFormat *bo)
  144. {
  145. unsigned int i;
  146. for (i = 0; i < sizeof(NvBOFormats) / sizeof(NvBOFormats[0]); i++) {
  147. if (NvBOFormats[i].drm_format == drm_format) {
  148. *bo = NvBOFormats[i];
  149. return 1;
  150. }
  151. }
  152. return 0;
  153. }
  154. NvDrmRenderer::NvDrmRenderer(const char *name, uint32_t w, uint32_t h,
  155. uint32_t w_x, uint32_t w_y, uint32_t aconn, uint32_t acrtc,
  156. struct drm_tegra_hdr_metadata_smpte_2086 metadata,
  157. bool streamHDR)
  158. :NvElement(name, valid_fields)
  159. {
  160. drmModeRes* drm_res_info = NULL;
  161. drmModeConnector* drm_conn_info = NULL;
  162. drmModeEncoder* drm_enc_info = NULL;
  163. drmModeCrtc* drm_crtc_info = NULL;
  164. uint32_t crtc_mask;
  165. int i;
  166. conn = aconn;
  167. crtc = acrtc;
  168. width = w;
  169. height = h;
  170. stop_thread = false;
  171. flipPending = false;
  172. renderingStarted = false;
  173. is_nvidia_drm = false;
  174. activeFd = flippedFd = -1;
  175. last_fb = 0;
  176. int ret =0;
  177. log_level = LOG_LEVEL_ERROR;
  178. last_render_time.tv_sec = 0;
  179. drmVersion *version;
  180. drm_fd = drmOpen(DRM_DEVICE_NAME, NULL);
  181. if (drm_fd < 0)
  182. drm_fd = open("/dev/dri/card0", O_RDWR, 0);
  183. if (drm_fd == -1) {
  184. COMP_ERROR_MSG("Couldn't open device");
  185. goto error;
  186. }
  187. version = drmGetVersion(drm_fd);
  188. if (version == NULL) {
  189. COMP_ERROR_MSG("Failed to get drm version\n");
  190. goto error;
  191. }
  192. if (!strcmp(version->name, "nvidia-drm")) {
  193. is_nvidia_drm = true;
  194. }
  195. drmFreeVersion(version);
  196. // Obtain DRM-KMS resources
  197. drm_res_info = drmModeGetResources(drm_fd);
  198. if (!drm_res_info) {
  199. COMP_ERROR_MSG("Couldn't obtain DRM-KMS resources ");
  200. goto error;
  201. }
  202. COMP_DEBUG_MSG("Obtained device information ");
  203. // If a specific crtc was requested, make sure it exists
  204. if (crtc >= drm_res_info->count_crtcs) {
  205. COMP_ERROR_MSG("Requested crtc index " << crtc << " exceeds count " << drm_res_info->count_crtcs);
  206. goto error;
  207. }
  208. crtc_mask = (crtc >= 0) ? (1<<crtc) : ((1<<drm_res_info->count_crtcs)-1);
  209. if (conn >= 0) {
  210. // Query info for requested connector
  211. if (conn >= drm_res_info->count_connectors) {
  212. COMP_ERROR_MSG("Requested connector index " << conn << " exceeds count " << drm_res_info->count_connectors);
  213. goto error;
  214. }
  215. drm_conn_id = drm_res_info->connectors[conn];
  216. drm_conn_info = drmModeGetConnector(drm_fd, drm_conn_id);
  217. if (!drm_conn_info) {
  218. COMP_ERROR_MSG("Unable to obtain info for connector " << drm_conn_id);
  219. goto error;
  220. } else if (drm_conn_info->connection != DRM_MODE_CONNECTED) {
  221. COMP_ERROR_MSG("Requested connnector is not connected ");
  222. goto error;
  223. } else if (drm_conn_info->count_modes <= 0) {
  224. COMP_ERROR_MSG("Requested connnector has no available modes ");
  225. goto error;
  226. }
  227. } else {
  228. for (i=0; i<drm_res_info->count_connectors; ++i) {
  229. // Query info for requested connector
  230. drm_conn_id = drm_res_info->connectors[i];
  231. drm_conn_info = drmModeGetConnector(drm_fd, drm_conn_id);
  232. if (!drm_conn_info) {
  233. COMP_ERROR_MSG("Unable to obtain info for connector " << drm_conn_id);
  234. goto error;
  235. } else if (drm_conn_info->connection != DRM_MODE_CONNECTED) {
  236. drmModeFreeConnector(drm_conn_info);
  237. continue;
  238. } else if (drm_conn_info->count_modes <= 0) {
  239. COMP_ERROR_MSG("Requested connnector has no available modes ");
  240. goto error;
  241. } else if (drm_conn_info->connection == DRM_MODE_CONNECTED) {
  242. break;
  243. }
  244. }
  245. if (i > drm_res_info->count_connectors) {
  246. COMP_ERROR_MSG("Requested connector index " << i << " exceeds count " << drm_res_info->count_connectors);
  247. goto error;
  248. }
  249. }
  250. COMP_DEBUG_MSG("Obtained connector information\n");
  251. // If there is already an encoder attached to the connector, choose
  252. // it unless not compatible with crtc/plane
  253. drm_enc_id = drm_conn_info->encoder_id;
  254. drm_enc_info = drmModeGetEncoder(drm_fd, drm_enc_id);
  255. if (drm_enc_info) {
  256. if (!(drm_enc_info->possible_crtcs & crtc_mask)) {
  257. drmModeFreeEncoder(drm_enc_info);
  258. drm_enc_info = NULL;
  259. }
  260. }
  261. // If we didn't have a suitable encoder, find one
  262. if (!drm_enc_info) {
  263. for (i=0; i<drm_conn_info->count_encoders; ++i) {
  264. drm_enc_id = drm_conn_info->encoders[i];
  265. drm_enc_info = drmModeGetEncoder(drm_fd, drm_enc_id);
  266. if (drm_enc_info) {
  267. if (crtc_mask & drm_enc_info->possible_crtcs) {
  268. crtc_mask &= drm_enc_info->possible_crtcs;
  269. break;
  270. }
  271. drmModeFreeEncoder(drm_enc_info);
  272. drm_enc_info = NULL;
  273. }
  274. }
  275. if (i == drm_conn_info->count_encoders) {
  276. COMP_ERROR_MSG("Unable to find suitable encoder ");
  277. goto error;
  278. }
  279. }
  280. COMP_DEBUG_MSG("Obtained encoder information ");
  281. // Select a suitable crtc. Give preference to one that's already
  282. // attached to the encoder.
  283. for (i=0; i<drm_res_info->count_crtcs; ++i) {
  284. if (crtc_mask & (1 << i)) {
  285. drm_crtc_id = drm_res_info->crtcs[i];
  286. if (drm_enc_info && drm_res_info->crtcs[i] == drm_enc_info->crtc_id) {
  287. break;
  288. }
  289. }
  290. }
  291. if (streamHDR && hdrSupported()) {
  292. ret = setHDRMetadataSmpte2086(metadata);
  293. if(ret!=0)
  294. COMP_DEBUG_MSG("Error while getting HDR mastering display data\n");
  295. }
  296. else {
  297. COMP_DEBUG_MSG("APP_INFO : HDR not supported \n");
  298. }
  299. // Query info for crtc
  300. drm_crtc_info = drmModeGetCrtc(drm_fd, drm_crtc_id);
  301. if (!drm_crtc_info) {
  302. COMP_ERROR_MSG("Unable to obtain info for crtc " << drm_crtc_id);
  303. goto error;
  304. }
  305. COMP_DEBUG_MSG("Obtained crtc information\n");
  306. #if 0
  307. if ((drm_conn_info->encoder_id != drm_enc_id) ||
  308. (drm_enc_info->crtc_id != drm_crtc_id) ||
  309. !drm_crtc_info->mode_valid) {
  310. drmModeSetCrtc(drm_fd, drm_crtc_id, -1, 0, 0, &drm_conn_id, 1, NULL);
  311. }
  312. #endif
  313. drmModeModeInfoPtr mode;
  314. mode = NULL;
  315. int area, current_area;
  316. area = 0;
  317. current_area = 0;
  318. /* Find the mode with highest resolution */
  319. for (int i = 0; i < drm_conn_info->count_modes; i++) {
  320. drmModeModeInfoPtr current_mode = &(drm_conn_info)->modes[i];
  321. current_area = current_mode->hdisplay * current_mode->vdisplay;
  322. if (current_area > area) {
  323. mode = current_mode;
  324. area = current_area;
  325. }
  326. }
  327. NvDrmFB fb;
  328. createDumbFB(mode->hdisplay, mode->vdisplay, DRM_FORMAT_NV12, &fb);
  329. drmModeSetCrtc(drm_fd, drm_crtc_id, fb.fb_id, w_x, w_y, &drm_conn_id, 1, mode);
  330. pthread_mutex_init(&enqueue_lock, NULL);
  331. pthread_cond_init(&enqueue_cond, NULL);
  332. pthread_mutex_init(&dequeue_lock, NULL);
  333. pthread_mutex_init(&render_lock, NULL);
  334. pthread_cond_init(&render_cond, NULL);
  335. pthread_cond_init(&dequeue_cond, NULL);
  336. setFPS(30);
  337. if (is_nvidia_drm)
  338. pthread_create(&render_thread, NULL, renderThreadOrin, this);
  339. else
  340. pthread_create(&render_thread, NULL, renderThread, this);
  341. pthread_setname_np(render_thread, "DrmRenderer");
  342. error_crtc:
  343. drmModeFreeCrtc(drm_crtc_info);
  344. error_enc:
  345. drmModeFreeEncoder(drm_enc_info);
  346. error_conn:
  347. drmModeFreeConnector(drm_conn_info);
  348. error_res:
  349. drmModeFreeResources(drm_res_info);
  350. return;
  351. error:
  352. is_in_error = 1;
  353. if (drm_fd != -1)
  354. drmClose(drm_fd);
  355. if (drm_crtc_info)
  356. goto error_crtc;
  357. if (drm_enc_info)
  358. goto error_enc;
  359. if (drm_conn_info)
  360. goto error_conn;
  361. if (drm_res_info)
  362. goto error_res;
  363. return;
  364. }
  365. int
  366. NvDrmRenderer::drmUtilCloseGemBo (int fd, uint32_t bo_handle)
  367. {
  368. struct drm_gem_close gemCloseArgs;
  369. memset (&gemCloseArgs, 0, sizeof (gemCloseArgs));
  370. gemCloseArgs.handle = bo_handle;
  371. drmIoctl (fd, DRM_IOCTL_GEM_CLOSE, &gemCloseArgs);
  372. return 1;
  373. }
  374. void NvDrmRenderer::page_flip_handler(int drm_fd, unsigned int frame,
  375. unsigned int sec, unsigned int usec, void *data)
  376. {
  377. NvDrmRenderer *renderer = (NvDrmRenderer *) data;
  378. int fd;
  379. int ret;
  380. pthread_mutex_lock(&renderer->dequeue_lock);
  381. if (renderer->activeFd != -1) {
  382. renderer->freeBuffers.push(renderer->activeFd);
  383. pthread_cond_signal(&renderer->dequeue_cond);
  384. }
  385. renderer->activeFd = renderer->flippedFd;
  386. pthread_mutex_unlock(&renderer->dequeue_lock);
  387. pthread_mutex_lock(&renderer->enqueue_lock);
  388. if (renderer->pendingBuffers.empty()) {
  389. renderer->flipPending = false;
  390. pthread_mutex_unlock(&renderer->enqueue_lock);
  391. return;
  392. } else {
  393. fd = (int)renderer->pendingBuffers.front();
  394. renderer->pendingBuffers.pop();
  395. if (fd == -1) {
  396. // drmModeSetCrtc with a ZERO FD will walk through the path that
  397. // disable the windows.
  398. // Note: drmModePageFlip doesn't support this trick.
  399. ret = drmModeSetCrtc(drm_fd, renderer->drm_crtc_id,
  400. ZERO_FD, 0, 0, &renderer->drm_conn_id, 1, NULL);
  401. if (ret) {
  402. std::cout << "Failed to disable windows before exiting" << std::endl;
  403. pthread_mutex_unlock(&renderer->enqueue_lock);
  404. return;
  405. }
  406. // EOS buffer. Release last buffer held.
  407. renderer->stop_thread = true;
  408. pthread_mutex_lock(&renderer->dequeue_lock);
  409. renderer->freeBuffers.push(renderer->activeFd);
  410. pthread_cond_signal(&renderer->dequeue_cond);
  411. pthread_mutex_unlock(&renderer->dequeue_lock);
  412. renderer->flipPending = false;
  413. pthread_mutex_unlock(&renderer->enqueue_lock);
  414. return;
  415. }
  416. pthread_mutex_unlock(&renderer->enqueue_lock);
  417. renderer->renderInternal(fd);
  418. }
  419. }
  420. void *
  421. NvDrmRenderer::renderThread(void *arg)
  422. {
  423. NvDrmRenderer *renderer = (NvDrmRenderer *) arg;
  424. drmEventContext evctx;
  425. struct pollfd fds;
  426. int ret;
  427. int timeout = 500; // 500ms
  428. memset(&fds, 0, sizeof(fds));
  429. fds.fd = renderer->drm_fd;
  430. fds.events = POLLIN;
  431. pthread_mutex_lock(&renderer->enqueue_lock);
  432. while (renderer->pendingBuffers.empty()) {
  433. if (renderer->stop_thread) {
  434. pthread_mutex_unlock(&renderer->enqueue_lock);
  435. return NULL;
  436. }
  437. pthread_cond_wait(&renderer->enqueue_cond, &renderer->enqueue_lock);
  438. }
  439. int fd = (int)renderer->pendingBuffers.front();
  440. renderer->pendingBuffers.pop();
  441. pthread_mutex_unlock(&renderer->enqueue_lock);
  442. ret = renderer->renderInternal(fd);
  443. if (ret < 0) {
  444. renderer->is_in_error = 1;
  445. return NULL;
  446. }
  447. renderer->renderingStarted = true;
  448. while (!renderer->isInError() || !renderer->stop_thread) {
  449. ret = poll(&fds, 1, timeout);
  450. if (ret > 0) {
  451. if (fds.revents & POLLIN) {
  452. memset(&evctx, 0, sizeof evctx);
  453. evctx.version = DRM_EVENT_CONTEXT_VERSION;
  454. evctx.page_flip_handler = page_flip_handler;
  455. drmHandleEvent(renderer->drm_fd, &evctx);
  456. }
  457. } else if (ret < 0) {
  458. renderer->is_in_error = 1;
  459. return NULL;
  460. } else {
  461. // Timeout
  462. return NULL;
  463. }
  464. }
  465. return NULL;
  466. }
  467. void *
  468. NvDrmRenderer::renderThreadOrin(void *arg)
  469. {
  470. NvDrmRenderer *renderer = (NvDrmRenderer *) arg;
  471. int ret;
  472. pthread_mutex_lock(&renderer->enqueue_lock);
  473. while (renderer->pendingBuffers.empty()) {
  474. if (renderer->stop_thread) {
  475. pthread_mutex_unlock(&renderer->enqueue_lock);
  476. return NULL;
  477. }
  478. pthread_cond_wait(&renderer->enqueue_cond, &renderer->enqueue_lock);
  479. }
  480. int fd = (int)renderer->pendingBuffers.front();
  481. renderer->pendingBuffers.pop();
  482. pthread_mutex_unlock(&renderer->enqueue_lock);
  483. ret = renderer->renderInternal(fd);
  484. if (ret < 0) {
  485. renderer->is_in_error = 1;
  486. return NULL;
  487. }
  488. renderer->renderingStarted = true;
  489. while (!renderer->stop_thread) {
  490. page_flip_handler (renderer->drm_fd, 0, 0, 0, renderer);
  491. }
  492. return NULL;
  493. }
  494. bool NvDrmRenderer::hdrSupported()
  495. {
  496. uint32_t i;
  497. bool hdr_supported = 0;
  498. drmModeObjectProperties *props;
  499. drmModePropertyRes **props_info;
  500. props = drmModeObjectGetProperties(drm_fd, drm_crtc_id, DRM_MODE_OBJECT_CRTC);
  501. props_info = (drmModePropertyRes **) calloc(props->count_props, sizeof(props_info));
  502. for (i = 0; i < props->count_props; i++) {
  503. props_info[i] = drmModeGetProperty(drm_fd, props->props[i]);
  504. }
  505. for (i = 0; i < props->count_props; i++) {
  506. if (strcmp(props_info[i]->name, "HDR_SUPPORTED") == 0) {
  507. hdr_supported = props_info[i]->values[0];
  508. break;
  509. }
  510. }
  511. drmModeFreeObjectProperties(props);
  512. drmModeFreeProperty(*props_info);
  513. return hdr_supported;
  514. }
  515. int NvDrmRenderer::setHDRMetadataSmpte2086(struct drm_tegra_hdr_metadata_smpte_2086 metadata)
  516. {
  517. int prop_id = -1;
  518. uint32_t i;
  519. drmModeObjectProperties *props;
  520. drmModePropertyRes **props_info;
  521. if (hdrBlobCreated) {
  522. drmModeDestroyPropertyBlob(drm_fd, hdrBlobId);
  523. hdrBlobCreated = 0;
  524. }
  525. if (drmModeCreatePropertyBlob(drm_fd, &metadata, sizeof(metadata), &hdrBlobId) != 0) {
  526. return -1;
  527. }
  528. hdrBlobCreated = 1;
  529. props = drmModeObjectGetProperties(drm_fd, drm_crtc_id, DRM_MODE_OBJECT_CRTC);
  530. props_info = (drmModePropertyRes **) calloc(props->count_props, sizeof(props_info));
  531. for (i = 0; i < props->count_props; i++) {
  532. props_info[i] = drmModeGetProperty(drm_fd, props->props[i]);
  533. }
  534. for (i = 0; i < props->count_props; i++) {
  535. if (strcmp(props_info[i]->name, "HDR_METADATA_SMPTE_2086_ID") == 0) {
  536. prop_id = props_info[i]->prop_id;
  537. break;
  538. }
  539. }
  540. if (prop_id < 0) {
  541. return -1;
  542. }
  543. return drmModeObjectSetProperty(drm_fd, drm_crtc_id, DRM_MODE_OBJECT_CRTC, prop_id, hdrBlobId);
  544. }
  545. NvDrmRenderer::~NvDrmRenderer()
  546. {
  547. uint32_t fb;
  548. stop_thread = true;
  549. pthread_mutex_lock(&enqueue_lock);
  550. pthread_cond_broadcast(&enqueue_cond);
  551. pthread_mutex_unlock(&enqueue_lock);
  552. pthread_join(render_thread, NULL);
  553. pthread_mutex_destroy(&enqueue_lock);
  554. pthread_cond_destroy(&enqueue_cond);
  555. pthread_mutex_lock(&dequeue_lock);
  556. pthread_cond_broadcast(&dequeue_cond);
  557. pthread_mutex_unlock(&dequeue_lock);
  558. pthread_mutex_destroy(&dequeue_lock);
  559. pthread_cond_destroy(&dequeue_cond);
  560. pthread_mutex_destroy(&render_lock);
  561. pthread_cond_destroy(&render_cond);
  562. for (auto map_entry = map_list.begin();
  563. map_entry != map_list.end(); ++map_entry) {
  564. fb = (uint32_t) map_entry->second;
  565. drmModeRmFB(drm_fd, fb);
  566. }
  567. if(last_fb)
  568. drmModeRmFB(drm_fd, last_fb);
  569. if (hdrBlobCreated) {
  570. drmModeDestroyPropertyBlob(drm_fd, hdrBlobId);
  571. hdrBlobCreated = 0;
  572. }
  573. if (drm_fd != -1)
  574. drmClose(drm_fd);
  575. }
  576. int
  577. NvDrmRenderer::dequeBuffer()
  578. {
  579. int fd = -1;
  580. // if (stop_thread)
  581. // return fd;
  582. // usleep(15000);
  583. pthread_mutex_lock(&dequeue_lock);
  584. while (freeBuffers.empty()) {
  585. if (stop_thread) {
  586. pthread_mutex_unlock(&dequeue_lock);
  587. return fd;
  588. }
  589. pthread_cond_wait (&dequeue_cond, &dequeue_lock);
  590. }
  591. fd = (int) freeBuffers.front();
  592. freeBuffers.pop();
  593. pthread_mutex_unlock(&dequeue_lock);
  594. return fd;
  595. }
  596. int
  597. NvDrmRenderer::enqueBuffer(int fd)
  598. {
  599. int ret = -1;
  600. int tmpFd;
  601. if (is_in_error)
  602. return ret;
  603. pthread_mutex_lock(&enqueue_lock);
  604. pendingBuffers.push(fd);
  605. if (renderingStarted && !flipPending) {
  606. tmpFd = (int) pendingBuffers.front();
  607. pendingBuffers.pop();
  608. if (tmpFd == -1) {
  609. // drmModeSetCrtc with a ZERO FD will walk through the path that
  610. // disable the windows.
  611. // Note: drmModePageFlip doesn't support this trick.
  612. ret = drmModeSetCrtc(drm_fd, drm_crtc_id,
  613. ZERO_FD, 0, 0, &drm_conn_id, 1, NULL);
  614. if (ret) {
  615. COMP_ERROR_MSG("Failed to disable windows before exiting ");
  616. pthread_mutex_unlock(&enqueue_lock);
  617. return ret;
  618. }
  619. // This is EOS and it is assumed to be last buffer.
  620. // No buffer will be processed after this.
  621. // Release last buffer held.
  622. stop_thread = true;
  623. pthread_mutex_lock(&dequeue_lock);
  624. if (activeFd != -1)
  625. freeBuffers.push(activeFd);
  626. pthread_cond_signal(&dequeue_cond);
  627. pthread_mutex_unlock(&dequeue_lock);
  628. pthread_mutex_unlock(&enqueue_lock);
  629. return 0;
  630. }
  631. pthread_mutex_unlock(&enqueue_lock);
  632. ret = renderInternal(tmpFd);
  633. } else {
  634. ret = 0;
  635. pthread_cond_signal(&enqueue_cond);
  636. pthread_mutex_unlock(&enqueue_lock);
  637. }
  638. return ret;
  639. }
  640. int
  641. NvDrmRenderer::renderInternal(int fd)
  642. {
  643. int ret;
  644. uint32_t i;
  645. uint32_t handle;
  646. uint32_t fb;
  647. uint32_t bo_handles[4] = {0};
  648. uint32_t flags = 0;
  649. bool frame_is_late = false;
  650. NvBufDrmParams dParams;
  651. struct drm_tegra_gem_set_tiling args;
  652. auto map_entry = map_list.find (fd);
  653. if (map_entry != map_list.end()) {
  654. fb = (uint32_t) map_entry->second;
  655. } else {
  656. // Create a new FB.
  657. NvBufSurface *nvbuf_surf = 0;
  658. NvBufSurfaceFromFd(fd, (void**)(&nvbuf_surf));
  659. if (nvbuf_surf == NULL) {
  660. COMP_ERROR_MSG("NvBufSurfaceFromFd Failed ");
  661. goto error;
  662. }
  663. ret = NvBufGetDrmParams(nvbuf_surf, &dParams);
  664. if (ret < 0) {
  665. COMP_ERROR_MSG("Failed to convert to DRM params ");
  666. goto error;
  667. }
  668. for (i = 0; i < dParams.num_planes; i++) {
  669. ret = drmPrimeFDToHandle(drm_fd, fd, &handle);
  670. if (ret)
  671. {
  672. COMP_ERROR_MSG("Failed to import buffer object. ");
  673. goto error;
  674. }
  675. if (!is_nvidia_drm) {
  676. memset(&args, 0, sizeof(args));
  677. args.handle = handle;
  678. args.mode = DRM_TEGRA_GEM_TILING_MODE_PITCH;
  679. args.value = 1;
  680. ret = drmIoctl(drm_fd, DRM_IOCTL_TEGRA_GEM_SET_TILING, &args);
  681. if (ret < 0)
  682. {
  683. COMP_ERROR_MSG("Failed to set tiling parameters ");
  684. goto error;
  685. }
  686. }
  687. bo_handles[i] = handle;
  688. }
  689. if (is_nvidia_drm) {
  690. static uint64_t modifiers[NVBUF_MAX_PLANES] = { 0 };
  691. uint64_t hm = 0;
  692. for (hm = 0; hm < dParams.num_planes; hm++) {
  693. modifiers[hm] = DRM_FORMAT_MOD_LINEAR;
  694. //modifiers[hm] = DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 2, 0x06, 0x01);
  695. }
  696. if (drmModeAddFB2WithModifiers (drm_fd, width, height,
  697. dParams.pixel_format, bo_handles, dParams.pitch, dParams.offset,
  698. modifiers, &fb,
  699. DRM_MODE_FB_MODIFIERS)) {
  700. COMP_ERROR_MSG ("Failed to create frame buffer\n");
  701. goto error;
  702. }
  703. } else {
  704. ret = drmModeAddFB2(drm_fd, width, height, dParams.pixel_format, bo_handles,
  705. dParams.pitch, dParams.offset, &fb, flags);
  706. if (ret)
  707. {
  708. COMP_ERROR_MSG("Failed to create fb ");
  709. goto error;
  710. }
  711. }
  712. ret = setPlane(0, fb, 0, 0, width, height, 0, 0, width << 16, height << 16);
  713. if(ret) {
  714. COMP_ERROR_MSG("FAILED TO SET PLANE ");
  715. goto error;
  716. }
  717. /* TODO:
  718. * We get new FDs from camera consumer. Don't do mapping until
  719. * we can resolve that.
  720. */
  721. // map_list.insert(std::make_pair(fd, fb));
  722. }
  723. if (last_render_time.tv_sec != 0)
  724. {
  725. pthread_mutex_lock(&render_lock);
  726. last_render_time.tv_sec += render_time_sec;
  727. last_render_time.tv_nsec += render_time_nsec;
  728. last_render_time.tv_sec += last_render_time.tv_nsec / 1000000000UL;
  729. last_render_time.tv_nsec %= 1000000000UL;
  730. if (isProfilingEnabled())
  731. {
  732. struct timeval cur_time;
  733. gettimeofday(&cur_time, NULL);
  734. if ((cur_time.tv_sec * 1000000.0 + cur_time.tv_usec) >
  735. (last_render_time.tv_sec * 1000000.0 +
  736. last_render_time.tv_nsec / 1000.0))
  737. {
  738. frame_is_late = true;
  739. }
  740. }
  741. pthread_cond_timedwait(&render_cond, &render_lock,
  742. &last_render_time);
  743. pthread_mutex_unlock(&render_lock);
  744. }
  745. else
  746. {
  747. struct timeval now;
  748. gettimeofday(&now, NULL);
  749. last_render_time.tv_sec = now.tv_sec;
  750. last_render_time.tv_nsec = now.tv_usec * 1000L;
  751. }
  752. flippedFd = fd;
  753. flipPending = true;
  754. if (!is_nvidia_drm) {
  755. ret = drmModePageFlip(drm_fd, drm_crtc_id, fb,
  756. DRM_MODE_PAGE_FLIP_EVENT,
  757. this);
  758. if (ret)
  759. {
  760. COMP_ERROR_MSG("Failed to flip");
  761. flipPending = false;
  762. goto error;
  763. }
  764. }
  765. /* TODO:
  766. * Don't create/remove fb for each frame but maintain mapping.
  767. * We will do that once new FD for each frame from consumer is resolved.
  768. */
  769. for (i = 0; i < dParams.num_planes; i++)
  770. {
  771. drmUtilCloseGemBo (drm_fd,bo_handles[i]);
  772. }
  773. if(last_fb)
  774. drmModeRmFB(drm_fd, last_fb);
  775. last_fb = fb;
  776. profiler.finishProcessing(0, frame_is_late);
  777. return 0;
  778. error:
  779. COMP_ERROR_MSG("Error in rendering frame ");
  780. return -1;
  781. }
  782. int
  783. NvDrmRenderer::createDumbBO(int width, int height, int bpp, NvDrmBO *bo)
  784. {
  785. struct drm_mode_create_dumb creq;
  786. struct drm_mode_destroy_dumb dreq;
  787. struct drm_mode_map_dumb mreq;
  788. int ret;
  789. uint8_t* map = NULL;
  790. /* create dumb buffer */
  791. memset(&creq, 0, sizeof(creq));
  792. creq.width = width;
  793. creq.height = height;
  794. creq.bpp = bpp;
  795. ret = drmIoctl(drm_fd, DRM_IOCTL_MODE_CREATE_DUMB, &creq);
  796. if (ret < 0) {
  797. COMP_ERROR_MSG("cannot create dumb buffer\n");
  798. return 0;
  799. }
  800. /* prepare buffer for memory mapping */
  801. memset(&mreq, 0, sizeof(mreq));
  802. mreq.handle = creq.handle;
  803. ret = drmIoctl(drm_fd, DRM_IOCTL_MODE_MAP_DUMB, &mreq);
  804. if (ret) {
  805. COMP_ERROR_MSG("cannot map dumb buffer\n");
  806. ret = -errno;
  807. goto err_destroy;
  808. }
  809. if (is_nvidia_drm) {
  810. map = (uint8_t*)mmap(0, creq.size, PROT_READ | PROT_WRITE, MAP_SHARED, drm_fd,
  811. mreq.offset);
  812. if (map == MAP_FAILED) {
  813. COMP_ERROR_MSG("cannot mmap dumb buffer\n");
  814. return 0;
  815. }
  816. } else {
  817. map = (uint8_t *) (mreq.offset);
  818. }
  819. /* clear the buffer object */
  820. memset(map, 0x00, creq.size);
  821. bo->bo_handle = creq.handle;
  822. bo->width = width;
  823. bo->height = height;
  824. bo->pitch = creq.pitch;
  825. bo->data = map;
  826. return 1;
  827. err_destroy:
  828. memset(&dreq, 0, sizeof(dreq));
  829. dreq.handle = creq.handle;
  830. drmIoctl(drm_fd, DRM_IOCTL_MODE_DESTROY_DUMB, &dreq);
  831. return 0;
  832. }
  833. int
  834. NvDrmRenderer::setFPS(float fps)
  835. {
  836. uint64_t render_time_usec;
  837. if (fps == 0)
  838. {
  839. COMP_WARN_MSG("Fps 0 is not allowed. Not changing fps");
  840. return -1;
  841. }
  842. pthread_mutex_lock(&render_lock);
  843. this->fps = fps;
  844. render_time_usec = 1000000L / fps;
  845. render_time_sec = render_time_usec / 1000000;
  846. render_time_nsec = (render_time_usec % 1000000) * 1000L;
  847. pthread_mutex_unlock(&render_lock);
  848. return 0;
  849. }
  850. bool NvDrmRenderer::enableUniversalPlanes (int enable)
  851. {
  852. return !drmSetClientCap(drm_fd, DRM_CLIENT_CAP_UNIVERSAL_PLANES, enable);
  853. }
  854. uint32_t
  855. NvDrmRenderer::createDumbFB(uint32_t width, uint32_t height,
  856. uint32_t drm_format, NvDrmFB *fb)
  857. {
  858. int buf_count;
  859. int i = 0;
  860. struct drm_mode_destroy_dumb dreq;
  861. int ret;
  862. struct NvBOFormat boFormat = {0};
  863. if (!get_format_info(drm_format, &boFormat)) {
  864. COMP_ERROR_MSG("Can't make a FB of type " << drm_format);
  865. return 0;
  866. }
  867. buf_count = boFormat.num_buffers;
  868. uint32_t buf_id;
  869. uint32_t bo_handles[4] = {0};
  870. uint32_t pitches[4] = {0};
  871. uint32_t offsets[4] = {0};
  872. /* create dumb buffers */
  873. for (i = 0; i < buf_count; i++) {
  874. NvDrmBO *bo = &(fb->bo[i]);
  875. ret = createDumbBO(width / boFormat.buffers[i].w,
  876. height / boFormat.buffers[i].h,
  877. boFormat.buffers[i].bpp, bo);
  878. if (ret < 0) {
  879. COMP_ERROR_MSG("cannot create dumb buffer ");
  880. return 0;
  881. }
  882. bo_handles[i] = fb->bo[i].bo_handle;
  883. pitches[i] = fb->bo[i].pitch;
  884. offsets[i] = 0;
  885. }
  886. /* create framebuffer object for the dumb-buffer */
  887. ret = drmModeAddFB2(drm_fd, width, height, drm_format, bo_handles,
  888. pitches, offsets, &buf_id, 0);
  889. if (ret) {
  890. COMP_ERROR_MSG("cannot create framebuffer ");
  891. goto err_destroy;
  892. }
  893. fb->fb_id = buf_id;
  894. fb->width = width;
  895. fb->height = height;
  896. fb->format = drm_format;
  897. return 1;
  898. err_destroy:
  899. for (i = 0; i < buf_count; i++) {
  900. memset(&dreq, 0, sizeof(dreq));
  901. dreq.handle = fb->bo[i].bo_handle;
  902. drmIoctl(drm_fd, DRM_IOCTL_MODE_DESTROY_DUMB, &dreq);
  903. }
  904. return 0;
  905. }
  906. int NvDrmRenderer::removeFB(uint32_t fb_id)
  907. {
  908. return drmModeRmFB(drm_fd, fb_id);
  909. }
  910. int NvDrmRenderer::setPlane(uint32_t pl_index,
  911. uint32_t fb_id,
  912. uint32_t crtc_x,
  913. uint32_t crtc_y,
  914. uint32_t crtc_w,
  915. uint32_t crtc_h,
  916. uint32_t src_x,
  917. uint32_t src_y,
  918. uint32_t src_w,
  919. uint32_t src_h)
  920. {
  921. int ret = -1;
  922. drmModePlaneResPtr pl = NULL;
  923. drmModePlanePtr plane = NULL;
  924. pl = drmModeGetPlaneResources(drm_fd);
  925. if (pl) {
  926. if (pl_index < pl->count_planes) {
  927. plane = drmModeGetPlane(drm_fd, pl->planes[pl_index]);
  928. if (plane) {
  929. ret = drmModeSetPlane(drm_fd, plane->plane_id, drm_crtc_id,
  930. fb_id, 0, crtc_x, crtc_y, crtc_w,
  931. crtc_h, src_x, src_y,
  932. src_w, src_h);
  933. drmModeFreePlane(plane);
  934. }
  935. } else {
  936. ret = -EINVAL;
  937. }
  938. drmModeFreePlaneResources(pl);
  939. return ret;
  940. }
  941. COMP_ERROR_MSG("No plane resource available ");
  942. return ret;
  943. }
  944. int NvDrmRenderer::getPlaneCount()
  945. {
  946. drmModePlaneResPtr pl = NULL;
  947. int count = 0;
  948. pl = drmModeGetPlaneResources(drm_fd);
  949. if (pl) {
  950. count = pl->count_planes;
  951. drmModeFreePlaneResources(pl);
  952. }
  953. return count;
  954. }
  955. int32_t NvDrmRenderer::getPlaneIndex(uint32_t crtc_index,
  956. int32_t* plane_index)
  957. {
  958. drmModePlaneResPtr pl = NULL;
  959. uint32_t count = 0;
  960. if (!plane_index)
  961. return 0;
  962. pl = drmModeGetPlaneResources(drm_fd);
  963. if (pl) {
  964. for (uint32_t i = 0; i < pl->count_planes; i++) {
  965. drmModePlanePtr plane;
  966. plane = drmModeGetPlane(drm_fd, pl->planes[i]);
  967. plane_index[i] = -1;
  968. if (plane) {
  969. //Find the plane is with the given crtc
  970. if (plane->possible_crtcs & (1 << crtc_index)) {
  971. plane_index[count] = i;
  972. count++;
  973. }
  974. drmModeFreePlane(plane);
  975. }
  976. }
  977. drmModeFreePlaneResources(pl);
  978. }
  979. return count;
  980. }
  981. int NvDrmRenderer::getCrtcCount()
  982. {
  983. drmModeResPtr resPtr = NULL;
  984. int count = 0;
  985. resPtr = drmModeGetResources(drm_fd);
  986. if (resPtr) {
  987. count = resPtr->count_crtcs;
  988. drmModeFreeResources(resPtr);
  989. }
  990. return count;
  991. }
  992. int NvDrmRenderer::getEncoderCount()
  993. {
  994. drmModeResPtr resPtr = NULL;
  995. int count = 0;
  996. resPtr = drmModeGetResources(drm_fd);
  997. if (resPtr) {
  998. count = resPtr->count_encoders;
  999. drmModeFreeResources(resPtr);
  1000. }
  1001. return count;
  1002. }
  1003. NvDrmRenderer *
  1004. NvDrmRenderer::createDrmRenderer(const char *name, uint32_t width,
  1005. uint32_t height, uint32_t w_x, uint32_t w_y,
  1006. uint32_t connector, uint32_t crtc,
  1007. struct drm_tegra_hdr_metadata_smpte_2086 metadata,
  1008. bool streamHDR)
  1009. {
  1010. if (!width || ! height) {
  1011. width = 640;
  1012. height = 480;
  1013. }
  1014. NvDrmRenderer* renderer = new NvDrmRenderer(name, width, height, w_x, w_y, connector, crtc, metadata, streamHDR);
  1015. if (renderer && renderer->isInError())
  1016. {
  1017. delete renderer;
  1018. return NULL;
  1019. }
  1020. return renderer;
  1021. }