1 /*
2 * Copyright (c) 2017-2019, The Linux Foundation. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above
10 * copyright notice, this list of conditions and the following
11 * disclaimer in the documentation and/or other materials provided
12 * with the distribution.
13 * * Neither the name of The Linux Foundation nor the names of its
14 * contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #define __STDC_FORMAT_MACROS
31
32 #include <ctype.h>
33 #include <drm/drm_fourcc.h>
34 #include <drm_lib_loader.h>
35 #include <drm_master.h>
36 #include <drm_res_mgr.h>
37 #include <fcntl.h>
38 #include <inttypes.h>
39 #include <linux/fb.h>
40 #include <math.h>
41 #include <stdio.h>
42 #include <string.h>
43 #include <sys/ioctl.h>
44 #include <sys/stat.h>
45 #include <sys/types.h>
46 #include <unistd.h>
47 #include <utils/constants.h>
48 #include <utils/debug.h>
49 #include <utils/formats.h>
50 #include <utils/sys.h>
51 #include <drm/sde_drm.h>
52 #include <private/color_params.h>
53 #include <utils/rect.h>
54 #include <utils/utils.h>
55
56 #include <sstream>
57 #include <ctime>
58 #include <algorithm>
59 #include <string>
60 #include <unordered_map>
61 #include <utility>
62 #include <vector>
63 #include <limits>
64
65 #include "hw_device_drm.h"
66 #include "hw_info_interface.h"
67
68 #define __CLASS__ "HWDeviceDRM"
69
70 #ifndef DRM_FORMAT_MOD_QCOM_COMPRESSED
71 #define DRM_FORMAT_MOD_QCOM_COMPRESSED fourcc_mod_code(QCOM, 1)
72 #endif
73 #ifndef DRM_FORMAT_MOD_QCOM_DX
74 #define DRM_FORMAT_MOD_QCOM_DX fourcc_mod_code(QCOM, 0x2)
75 #endif
76 #ifndef DRM_FORMAT_MOD_QCOM_TIGHT
77 #define DRM_FORMAT_MOD_QCOM_TIGHT fourcc_mod_code(QCOM, 0x4)
78 #endif
79
80 using std::string;
81 using std::to_string;
82 using std::fstream;
83 using std::unordered_map;
84 using std::stringstream;
85 using std::ifstream;
86 using std::ofstream;
87 using drm_utils::DRMMaster;
88 using drm_utils::DRMResMgr;
89 using drm_utils::DRMLibLoader;
90 using drm_utils::DRMBuffer;
91 using sde_drm::GetDRMManager;
92 using sde_drm::DestroyDRMManager;
93 using sde_drm::DRMDisplayType;
94 using sde_drm::DRMDisplayToken;
95 using sde_drm::DRMConnectorInfo;
96 using sde_drm::DRMPPFeatureInfo;
97 using sde_drm::DRMRect;
98 using sde_drm::DRMRotation;
99 using sde_drm::DRMBlendType;
100 using sde_drm::DRMSrcConfig;
101 using sde_drm::DRMOps;
102 using sde_drm::DRMTopology;
103 using sde_drm::DRMPowerMode;
104 using sde_drm::DRMSecureMode;
105 using sde_drm::DRMSecurityLevel;
106 using sde_drm::DRMCscType;
107 using sde_drm::DRMMultiRectMode;
108
109 namespace sdm {
110
GetPPBlock(const HWToneMapLut & lut_type)111 static PPBlock GetPPBlock(const HWToneMapLut &lut_type) {
112 PPBlock pp_block = kPPBlockMax;
113 switch (lut_type) {
114 case kDma1dIgc:
115 case kDma1dGc:
116 pp_block = kDGM;
117 break;
118 case kVig1dIgc:
119 case kVig3dGamut:
120 pp_block = kVIG;
121 break;
122 default:
123 DLOGE("Unknown PP Block");
124 break;
125 }
126 return pp_block;
127 }
128
GetDRMFormat(LayerBufferFormat format,uint32_t * drm_format,uint64_t * drm_format_modifier)129 static void GetDRMFormat(LayerBufferFormat format, uint32_t *drm_format,
130 uint64_t *drm_format_modifier) {
131 switch (format) {
132 case kFormatRGBA8888:
133 *drm_format = DRM_FORMAT_ABGR8888;
134 break;
135 case kFormatRGBA8888Ubwc:
136 *drm_format = DRM_FORMAT_ABGR8888;
137 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
138 break;
139 case kFormatRGBA5551:
140 *drm_format = DRM_FORMAT_ABGR1555;
141 break;
142 case kFormatRGBA4444:
143 *drm_format = DRM_FORMAT_ABGR4444;
144 break;
145 case kFormatBGRA8888:
146 *drm_format = DRM_FORMAT_ARGB8888;
147 break;
148 case kFormatRGBX8888:
149 *drm_format = DRM_FORMAT_XBGR8888;
150 break;
151 case kFormatRGBX8888Ubwc:
152 *drm_format = DRM_FORMAT_XBGR8888;
153 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
154 break;
155 case kFormatBGRX8888:
156 *drm_format = DRM_FORMAT_XRGB8888;
157 break;
158 case kFormatRGB888:
159 *drm_format = DRM_FORMAT_BGR888;
160 break;
161 case kFormatRGB565:
162 *drm_format = DRM_FORMAT_BGR565;
163 break;
164 case kFormatBGR565:
165 *drm_format = DRM_FORMAT_RGB565;
166 break;
167 case kFormatBGR565Ubwc:
168 *drm_format = DRM_FORMAT_BGR565;
169 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
170 break;
171 case kFormatRGBA1010102:
172 *drm_format = DRM_FORMAT_ABGR2101010;
173 break;
174 case kFormatRGBA1010102Ubwc:
175 *drm_format = DRM_FORMAT_ABGR2101010;
176 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
177 break;
178 case kFormatARGB2101010:
179 *drm_format = DRM_FORMAT_BGRA1010102;
180 break;
181 case kFormatRGBX1010102:
182 *drm_format = DRM_FORMAT_XBGR2101010;
183 break;
184 case kFormatRGBX1010102Ubwc:
185 *drm_format = DRM_FORMAT_XBGR2101010;
186 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
187 break;
188 case kFormatXRGB2101010:
189 *drm_format = DRM_FORMAT_BGRX1010102;
190 break;
191 case kFormatBGRA1010102:
192 *drm_format = DRM_FORMAT_ARGB2101010;
193 break;
194 case kFormatABGR2101010:
195 *drm_format = DRM_FORMAT_RGBA1010102;
196 break;
197 case kFormatBGRX1010102:
198 *drm_format = DRM_FORMAT_XRGB2101010;
199 break;
200 case kFormatXBGR2101010:
201 *drm_format = DRM_FORMAT_RGBX1010102;
202 break;
203 case kFormatYCbCr420SemiPlanar:
204 *drm_format = DRM_FORMAT_NV12;
205 break;
206 case kFormatYCbCr420SemiPlanarVenus:
207 *drm_format = DRM_FORMAT_NV12;
208 break;
209 case kFormatYCbCr420SPVenusUbwc:
210 *drm_format = DRM_FORMAT_NV12;
211 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
212 break;
213 case kFormatYCbCr420SPVenusTile:
214 *drm_format = DRM_FORMAT_NV12;
215 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_TILE;
216 break;
217 case kFormatYCrCb420SemiPlanar:
218 *drm_format = DRM_FORMAT_NV21;
219 break;
220 case kFormatYCrCb420SemiPlanarVenus:
221 *drm_format = DRM_FORMAT_NV21;
222 break;
223 case kFormatYCbCr420P010:
224 case kFormatYCbCr420P010Venus:
225 *drm_format = DRM_FORMAT_NV12;
226 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_DX;
227 break;
228 case kFormatYCbCr420P010Ubwc:
229 *drm_format = DRM_FORMAT_NV12;
230 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED |
231 DRM_FORMAT_MOD_QCOM_DX;
232 break;
233 case kFormatYCbCr420P010Tile:
234 *drm_format = DRM_FORMAT_NV12;
235 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_TILE |
236 DRM_FORMAT_MOD_QCOM_DX;
237 break;
238 case kFormatYCbCr420TP10Ubwc:
239 *drm_format = DRM_FORMAT_NV12;
240 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED |
241 DRM_FORMAT_MOD_QCOM_DX | DRM_FORMAT_MOD_QCOM_TIGHT;
242 break;
243 case kFormatYCbCr420TP10Tile:
244 *drm_format = DRM_FORMAT_NV12;
245 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_TILE |
246 DRM_FORMAT_MOD_QCOM_DX | DRM_FORMAT_MOD_QCOM_TIGHT;
247 break;
248 case kFormatYCbCr422H2V1SemiPlanar:
249 *drm_format = DRM_FORMAT_NV16;
250 break;
251 case kFormatYCrCb422H2V1SemiPlanar:
252 *drm_format = DRM_FORMAT_NV61;
253 break;
254 case kFormatYCrCb420PlanarStride16:
255 *drm_format = DRM_FORMAT_YVU420;
256 break;
257 default:
258 DLOGW("Unsupported format %s", GetFormatString(format));
259 }
260 }
261
262 class FrameBufferObject : public LayerBufferObject {
263 public:
FrameBufferObject(uint32_t fb_id)264 explicit FrameBufferObject(uint32_t fb_id) : fb_id_(fb_id) {
265 }
266
~FrameBufferObject()267 ~FrameBufferObject() {
268 DRMMaster *master;
269 DRMMaster::GetInstance(&master);
270 int ret = master->RemoveFbId(fb_id_);
271 if (ret < 0) {
272 DLOGE("Removing fb_id %d failed with error %d", fb_id_, errno);
273 }
274 }
GetFbId()275 uint32_t GetFbId() { return fb_id_; }
276
277 private:
278 uint32_t fb_id_;
279 };
280
Registry(BufferAllocator * buffer_allocator)281 HWDeviceDRM::Registry::Registry(BufferAllocator *buffer_allocator) :
282 buffer_allocator_(buffer_allocator) {
283 int value = 0;
284 if (Debug::GetProperty(DISABLE_FBID_CACHE, &value) == kErrorNone) {
285 disable_fbid_cache_ = (value == 1);
286 }
287 }
288
Register(HWLayers * hw_layers)289 void HWDeviceDRM::Registry::Register(HWLayers *hw_layers) {
290 HWLayersInfo &hw_layer_info = hw_layers->info;
291 uint32_t hw_layer_count = UINT32(hw_layer_info.hw_layers.size());
292
293 DTRACE_SCOPED();
294 for (uint32_t i = 0; i < hw_layer_count; i++) {
295 Layer &layer = hw_layer_info.hw_layers.at(i);
296 LayerBuffer *input_buffer = &layer.input_buffer;
297 HWRotatorSession *hw_rotator_session = &hw_layers->config[i].hw_rotator_session;
298 HWRotateInfo *hw_rotate_info = &hw_rotator_session->hw_rotate_info[0];
299 fbid_cache_limit_ = input_buffer->flags.video ? VIDEO_FBID_LIMIT : UI_FBID_LIMIT;
300
301 if (hw_rotator_session->mode == kRotatorOffline && hw_rotate_info->valid) {
302 input_buffer = &hw_rotator_session->output_buffer;
303 fbid_cache_limit_ = OFFLINE_ROTATOR_FBID_LIMIT;
304 }
305
306 // layer input buffer map to fb id also applies for inline rot
307 if (hw_rotator_session->mode == kRotatorInline && hw_rotate_info->valid &&
308 hw_rotator_session->output_buffer.planes[0].fd >= 0) {
309 fbid_cache_limit_ += 1; // account for inline rot scratch buffer
310 MapBufferToFbId(&layer, &hw_rotator_session->output_buffer);
311 }
312
313 MapBufferToFbId(&layer, input_buffer);
314 }
315 }
316
CreateFbId(LayerBuffer * buffer,uint32_t * fb_id)317 int HWDeviceDRM::Registry::CreateFbId(LayerBuffer *buffer, uint32_t *fb_id) {
318 DRMMaster *master = nullptr;
319 DRMMaster::GetInstance(&master);
320 int ret = -1;
321
322 if (!master) {
323 DLOGE("Failed to acquire DRM Master instance");
324 return ret;
325 }
326
327 DRMBuffer layout{};
328 AllocatedBufferInfo buf_info{};
329 buf_info.fd = layout.fd = buffer->planes[0].fd;
330 buf_info.aligned_width = layout.width = buffer->width;
331 buf_info.aligned_height = layout.height = buffer->height;
332 buf_info.format = buffer->format;
333 GetDRMFormat(buf_info.format, &layout.drm_format, &layout.drm_format_modifier);
334 buffer_allocator_->GetBufferLayout(buf_info, layout.stride, layout.offset, &layout.num_planes);
335 ret = master->CreateFbId(layout, fb_id);
336 if (ret < 0) {
337 DLOGE("CreateFbId failed. width %d, height %d, format: %s, stride %u, error %d",
338 layout.width, layout.height, GetFormatString(buf_info.format), layout.stride[0], errno);
339 }
340
341 return ret;
342 }
343
MapBufferToFbId(Layer * layer,LayerBuffer * buffer)344 void HWDeviceDRM::Registry::MapBufferToFbId(Layer* layer, LayerBuffer* buffer) {
345 if (buffer->planes[0].fd < 0) {
346 return;
347 }
348
349 uint64_t handle_id = buffer->handle_id;
350 if (!handle_id || disable_fbid_cache_) {
351 // In legacy path, clear fb_id map in each frame.
352 layer->buffer_map->buffer_map.clear();
353 } else {
354 if (layer->buffer_map->buffer_map.find(handle_id) != layer->buffer_map->buffer_map.end()) {
355 // Found fb_id for given handle_id key
356 return;
357 }
358
359 if (layer->buffer_map->buffer_map.size() >= fbid_cache_limit_) {
360 // Clear fb_id map, if the size reaches cache limit.
361 layer->buffer_map->buffer_map.clear();
362 }
363 }
364
365 uint32_t fb_id = 0;
366 if (CreateFbId(buffer, &fb_id) >= 0) {
367 // Create and cache the fb_id in map
368 layer->buffer_map->buffer_map[handle_id] = std::make_shared<FrameBufferObject>(fb_id);
369 }
370 }
371
MapOutputBufferToFbId(LayerBuffer * output_buffer)372 void HWDeviceDRM::Registry::MapOutputBufferToFbId(LayerBuffer *output_buffer) {
373 if (output_buffer->planes[0].fd < 0) {
374 return;
375 }
376
377 uint64_t handle_id = output_buffer->handle_id;
378 if (!handle_id || disable_fbid_cache_) {
379 // In legacy path, clear output buffer map in each frame.
380 output_buffer_map_.clear();
381 } else {
382 if (output_buffer_map_.find(handle_id) != output_buffer_map_.end()) {
383 return;
384 }
385
386 if (output_buffer_map_.size() >= UI_FBID_LIMIT) {
387 // Clear output buffer map, if the size reaches cache limit.
388 output_buffer_map_.clear();
389 }
390 }
391
392 uint32_t fb_id = 0;
393 if (CreateFbId(output_buffer, &fb_id) >= 0) {
394 output_buffer_map_[handle_id] = std::make_shared<FrameBufferObject>(fb_id);
395 }
396 }
397
Clear()398 void HWDeviceDRM::Registry::Clear() {
399 output_buffer_map_.clear();
400 }
401
GetFbId(Layer * layer,uint64_t handle_id)402 uint32_t HWDeviceDRM::Registry::GetFbId(Layer *layer, uint64_t handle_id) {
403 auto it = layer->buffer_map->buffer_map.find(handle_id);
404 if (it != layer->buffer_map->buffer_map.end()) {
405 FrameBufferObject *fb_obj = static_cast<FrameBufferObject*>(it->second.get());
406 return fb_obj->GetFbId();
407 }
408
409 return 0;
410 }
411
GetOutputFbId(uint64_t handle_id)412 uint32_t HWDeviceDRM::Registry::GetOutputFbId(uint64_t handle_id) {
413 auto it = output_buffer_map_.find(handle_id);
414 if (it != output_buffer_map_.end()) {
415 FrameBufferObject *fb_obj = static_cast<FrameBufferObject*>(it->second.get());
416 return fb_obj->GetFbId();
417 }
418
419 return 0;
420 }
421
HWDeviceDRM(BufferSyncHandler * buffer_sync_handler,BufferAllocator * buffer_allocator,HWInfoInterface * hw_info_intf)422 HWDeviceDRM::HWDeviceDRM(BufferSyncHandler *buffer_sync_handler, BufferAllocator *buffer_allocator,
423 HWInfoInterface *hw_info_intf)
424 : hw_info_intf_(hw_info_intf), buffer_sync_handler_(buffer_sync_handler),
425 registry_(buffer_allocator) {
426 hw_info_intf_ = hw_info_intf;
427 }
428
Init()429 DisplayError HWDeviceDRM::Init() {
430 int ret = 0;
431 DRMMaster *drm_master = {};
432 DRMMaster::GetInstance(&drm_master);
433 drm_master->GetHandle(&dev_fd_);
434 DRMLibLoader::GetInstance()->FuncGetDRMManager()(dev_fd_, &drm_mgr_intf_);
435
436 if (-1 == display_id_) {
437 if (drm_mgr_intf_->RegisterDisplay(disp_type_, &token_)) {
438 DLOGE("RegisterDisplay (by type) failed for %s", device_name_);
439 return kErrorResources;
440 }
441 } else if (drm_mgr_intf_->RegisterDisplay(display_id_, &token_)) {
442 DLOGE("RegisterDisplay (by id) failed for %s - %d", device_name_, display_id_);
443 return kErrorResources;
444 }
445
446 if (token_.conn_id > INT32_MAX) {
447 DLOGE("Connector id %u beyond supported range", token_.conn_id);
448 drm_mgr_intf_->UnregisterDisplay(&token_);
449 return kErrorNotSupported;
450 }
451
452 display_id_ = static_cast<int32_t>(token_.conn_id);
453
454 ret = drm_mgr_intf_->CreateAtomicReq(token_, &drm_atomic_intf_);
455 if (ret) {
456 DLOGE("Failed creating atomic request for connector id %u. Error: %d.", token_.conn_id, ret);
457 drm_mgr_intf_->UnregisterDisplay(&token_);
458 return kErrorResources;
459 }
460
461 ret = drm_mgr_intf_->GetConnectorInfo(token_.conn_id, &connector_info_);
462 if (ret) {
463 DLOGE("Failed getting info for connector id %u. Error: %d.", token_.conn_id, ret);
464 drm_mgr_intf_->DestroyAtomicReq(drm_atomic_intf_);
465 drm_atomic_intf_ = {};
466 drm_mgr_intf_->UnregisterDisplay(&token_);
467 return kErrorHardware;
468 }
469
470 if (!connector_info_.is_connected || connector_info_.modes.empty()) {
471 DLOGW("Device removal detected on connector id %u. Connector status %s and %d modes.",
472 token_.conn_id, connector_info_.is_connected ? "connected":"disconnected",
473 connector_info_.modes.size());
474 drm_mgr_intf_->DestroyAtomicReq(drm_atomic_intf_);
475 drm_atomic_intf_ = {};
476 drm_mgr_intf_->UnregisterDisplay(&token_);
477 return kErrorDeviceRemoved;
478 }
479
480 hw_info_intf_->GetHWResourceInfo(&hw_resource_);
481
482 InitializeConfigs();
483 PopulateHWPanelInfo();
484 UpdateMixerAttributes();
485
486 // TODO(user): In future, remove has_qseed3 member, add version and pass version to constructor
487 if (hw_resource_.has_qseed3) {
488 hw_scale_ = new HWScaleDRM(HWScaleDRM::Version::V2);
489 }
490
491 std::unique_ptr<HWColorManagerDrm> hw_color_mgr(new HWColorManagerDrm());
492 hw_color_mgr_ = std::move(hw_color_mgr);
493
494 return kErrorNone;
495 }
496
Deinit()497 DisplayError HWDeviceDRM::Deinit() {
498 DisplayError err = kErrorNone;
499 if (!first_cycle_) {
500 // A null-commit is needed only if the first commit had gone through. e.g., If a pluggable
501 // display is plugged in and plugged out immediately, HWDeviceDRM::Deinit() may be called
502 // before any commit happened on the device. The driver may have removed any not-in-use
503 // connector (i.e., any connector which did not have a display commit on it and a crtc path
504 // setup), so token_.conn_id may have been removed if there was no commit, resulting in
505 // drmModeAtomicCommit() failure with ENOENT, 'No such file or directory'.
506 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, token_.conn_id, 0);
507 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::OFF);
508 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, nullptr);
509 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 0);
510 int ret = NullCommit(true /* synchronous */, false /* retain_planes */);
511 if (ret) {
512 DLOGE("Commit failed with error: %d", ret);
513 err = kErrorHardware;
514 }
515 }
516 delete hw_scale_;
517 registry_.Clear();
518 display_attributes_ = {};
519 drm_mgr_intf_->DestroyAtomicReq(drm_atomic_intf_);
520 drm_atomic_intf_ = {};
521 drm_mgr_intf_->UnregisterDisplay(&token_);
522 return err;
523 }
524
GetDisplayId(int32_t * display_id)525 DisplayError HWDeviceDRM::GetDisplayId(int32_t *display_id) {
526 *display_id = display_id_;
527 return kErrorNone;
528 }
529
InitializeConfigs()530 void HWDeviceDRM::InitializeConfigs() {
531 current_mode_index_ = 0;
532 // Update current mode with preferred mode
533 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
534 if (connector_info_.modes[mode_index].mode.type & DRM_MODE_TYPE_PREFERRED) {
535 DLOGI("Updating current display mode %d to preferred mode %d.", current_mode_index_,
536 mode_index);
537 current_mode_index_ = mode_index;
538 break;
539 }
540 }
541
542 display_attributes_.resize(connector_info_.modes.size());
543
544 uint32_t width = connector_info_.modes[current_mode_index_].mode.hdisplay;
545 uint32_t height = connector_info_.modes[current_mode_index_].mode.vdisplay;
546 for (uint32_t i = 0; i < connector_info_.modes.size(); i++) {
547 auto &mode = connector_info_.modes[i].mode;
548 if (mode.hdisplay != width || mode.vdisplay != height) {
549 resolution_switch_enabled_ = true;
550 }
551 PopulateDisplayAttributes(i);
552 }
553 }
554
PopulateDisplayAttributes(uint32_t index)555 DisplayError HWDeviceDRM::PopulateDisplayAttributes(uint32_t index) {
556 drmModeModeInfo mode = {};
557 uint32_t mm_width = 0;
558 uint32_t mm_height = 0;
559 DRMTopology topology = DRMTopology::SINGLE_LM;
560
561 if (default_mode_) {
562 DRMResMgr *res_mgr = nullptr;
563 int ret = DRMResMgr::GetInstance(&res_mgr);
564 if (ret < 0) {
565 DLOGE("Failed to acquire DRMResMgr instance");
566 return kErrorResources;
567 }
568
569 res_mgr->GetMode(&mode);
570 res_mgr->GetDisplayDimInMM(&mm_width, &mm_height);
571 } else {
572 mode = connector_info_.modes[index].mode;
573 mm_width = connector_info_.mmWidth;
574 mm_height = connector_info_.mmHeight;
575 topology = connector_info_.modes[index].topology;
576 }
577
578 display_attributes_[index].x_pixels = mode.hdisplay;
579 display_attributes_[index].y_pixels = mode.vdisplay;
580 display_attributes_[index].fps = mode.vrefresh;
581 display_attributes_[index].vsync_period_ns =
582 UINT32(1000000000L / display_attributes_[index].fps);
583
584 /*
585 Active Front Sync Back
586 Region Porch Porch
587 <-----------------------><----------------><-------------><-------------->
588 <----- [hv]display ----->
589 <------------- [hv]sync_start ------------>
590 <--------------------- [hv]sync_end --------------------->
591 <-------------------------------- [hv]total ----------------------------->
592 */
593
594 display_attributes_[index].v_front_porch = mode.vsync_start - mode.vdisplay;
595 display_attributes_[index].v_pulse_width = mode.vsync_end - mode.vsync_start;
596 display_attributes_[index].v_back_porch = mode.vtotal - mode.vsync_end;
597 display_attributes_[index].v_total = mode.vtotal;
598 display_attributes_[index].h_total = mode.htotal;
599 display_attributes_[index].is_device_split =
600 (topology == DRMTopology::DUAL_LM || topology == DRMTopology::DUAL_LM_MERGE ||
601 topology == DRMTopology::DUAL_LM_MERGE_DSC || topology == DRMTopology::DUAL_LM_DSC ||
602 topology == DRMTopology::DUAL_LM_DSCMERGE);
603 display_attributes_[index].clock_khz = mode.clock;
604
605 // If driver doesn't return panel width/height information, default to 320 dpi
606 if (INT(mm_width) <= 0 || INT(mm_height) <= 0) {
607 mm_width = UINT32(((FLOAT(mode.hdisplay) * 25.4f) / 320.0f) + 0.5f);
608 mm_height = UINT32(((FLOAT(mode.vdisplay) * 25.4f) / 320.0f) + 0.5f);
609 DLOGW("Driver doesn't report panel physical width and height - defaulting to 320dpi");
610 }
611
612 display_attributes_[index].x_dpi = (FLOAT(mode.hdisplay) * 25.4f) / FLOAT(mm_width);
613 display_attributes_[index].y_dpi = (FLOAT(mode.vdisplay) * 25.4f) / FLOAT(mm_height);
614 SetTopology(topology, &display_attributes_[index].topology);
615
616 DLOGI("Display attributes[%d]: WxH: %dx%d, DPI: %fx%f, FPS: %d, LM_SPLIT: %d, V_BACK_PORCH: %d," \
617 " V_FRONT_PORCH: %d, V_PULSE_WIDTH: %d, V_TOTAL: %d, H_TOTAL: %d, CLK: %dKHZ, TOPOLOGY: %d",
618 index, display_attributes_[index].x_pixels, display_attributes_[index].y_pixels,
619 display_attributes_[index].x_dpi, display_attributes_[index].y_dpi,
620 display_attributes_[index].fps, display_attributes_[index].is_device_split,
621 display_attributes_[index].v_back_porch, display_attributes_[index].v_front_porch,
622 display_attributes_[index].v_pulse_width, display_attributes_[index].v_total,
623 display_attributes_[index].h_total, display_attributes_[index].clock_khz,
624 display_attributes_[index].topology);
625
626 return kErrorNone;
627 }
628
PopulateHWPanelInfo()629 void HWDeviceDRM::PopulateHWPanelInfo() {
630 hw_panel_info_ = {};
631
632 snprintf(hw_panel_info_.panel_name, sizeof(hw_panel_info_.panel_name), "%s",
633 connector_info_.panel_name.c_str());
634
635 uint32_t index = current_mode_index_;
636 hw_panel_info_.split_info.left_split = display_attributes_[index].x_pixels;
637 if (display_attributes_[index].is_device_split) {
638 hw_panel_info_.split_info.left_split = hw_panel_info_.split_info.right_split =
639 display_attributes_[index].x_pixels / 2;
640 }
641
642 hw_panel_info_.partial_update = connector_info_.modes[index].num_roi;
643 hw_panel_info_.left_roi_count = UINT32(connector_info_.modes[index].num_roi);
644 hw_panel_info_.right_roi_count = UINT32(connector_info_.modes[index].num_roi);
645 hw_panel_info_.left_align = connector_info_.modes[index].xstart;
646 hw_panel_info_.top_align = connector_info_.modes[index].ystart;
647 hw_panel_info_.width_align = connector_info_.modes[index].walign;
648 hw_panel_info_.height_align = connector_info_.modes[index].halign;
649 hw_panel_info_.min_roi_width = connector_info_.modes[index].wmin;
650 hw_panel_info_.min_roi_height = connector_info_.modes[index].hmin;
651 hw_panel_info_.needs_roi_merge = connector_info_.modes[index].roi_merge;
652 hw_panel_info_.transfer_time_us = connector_info_.modes[index].transfer_time_us;
653 hw_panel_info_.dynamic_fps = connector_info_.dynamic_fps;
654 hw_panel_info_.qsync_support = connector_info_.qsync_support;
655 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
656 if (hw_panel_info_.dynamic_fps) {
657 uint32_t min_fps = current_mode.vrefresh;
658 uint32_t max_fps = current_mode.vrefresh;
659 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
660 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
661 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay)) {
662 if (min_fps > connector_info_.modes[mode_index].mode.vrefresh) {
663 min_fps = connector_info_.modes[mode_index].mode.vrefresh;
664 }
665 if (max_fps < connector_info_.modes[mode_index].mode.vrefresh) {
666 max_fps = connector_info_.modes[mode_index].mode.vrefresh;
667 }
668 }
669 }
670 hw_panel_info_.min_fps = min_fps;
671 hw_panel_info_.max_fps = max_fps;
672 } else {
673 hw_panel_info_.min_fps = current_mode.vrefresh;
674 hw_panel_info_.max_fps = current_mode.vrefresh;
675 }
676
677 hw_panel_info_.is_primary_panel = connector_info_.is_primary;
678 hw_panel_info_.is_pluggable = 0;
679 hw_panel_info_.hdr_enabled = connector_info_.panel_hdr_prop.hdr_enabled;
680 // Convert the luminance values to cd/m^2 units.
681 hw_panel_info_.peak_luminance = FLOAT(connector_info_.panel_hdr_prop.peak_brightness) / 10000.0f;
682 hw_panel_info_.blackness_level = FLOAT(connector_info_.panel_hdr_prop.blackness_level) / 10000.0f;
683 hw_panel_info_.primaries.white_point[0] = connector_info_.panel_hdr_prop.display_primaries[0];
684 hw_panel_info_.primaries.white_point[1] = connector_info_.panel_hdr_prop.display_primaries[1];
685 hw_panel_info_.primaries.red[0] = connector_info_.panel_hdr_prop.display_primaries[2];
686 hw_panel_info_.primaries.red[1] = connector_info_.panel_hdr_prop.display_primaries[3];
687 hw_panel_info_.primaries.green[0] = connector_info_.panel_hdr_prop.display_primaries[4];
688 hw_panel_info_.primaries.green[1] = connector_info_.panel_hdr_prop.display_primaries[5];
689 hw_panel_info_.primaries.blue[0] = connector_info_.panel_hdr_prop.display_primaries[6];
690 hw_panel_info_.primaries.blue[1] = connector_info_.panel_hdr_prop.display_primaries[7];
691 hw_panel_info_.dyn_bitclk_support = connector_info_.dyn_bitclk_support;
692
693 // no supprt for 90 rotation only flips or 180 supported
694 hw_panel_info_.panel_orientation.rotation = 0;
695 hw_panel_info_.panel_orientation.flip_horizontal =
696 (connector_info_.panel_orientation == DRMRotation::FLIP_H) ||
697 (connector_info_.panel_orientation == DRMRotation::ROT_180);
698 hw_panel_info_.panel_orientation.flip_vertical =
699 (connector_info_.panel_orientation == DRMRotation::FLIP_V) ||
700 (connector_info_.panel_orientation == DRMRotation::ROT_180);
701
702 GetHWDisplayPortAndMode();
703 GetHWPanelMaxBrightness();
704 }
705
GetDisplayIdentificationData(uint8_t * out_port,uint32_t * out_data_size,uint8_t * out_data)706 DisplayError HWDeviceDRM::GetDisplayIdentificationData(uint8_t *out_port, uint32_t *out_data_size,
707 uint8_t *out_data) {
708 *out_port = token_.hw_port;
709 std::vector<uint8_t> &edid = connector_info_.edid;
710
711 if (out_data == nullptr) {
712 *out_data_size = (uint32_t)(edid.size());
713 if (*out_data_size == 0) {
714 DLOGE("EDID blob is empty, no data to return");
715 return kErrorDriverData;
716 }
717 } else {
718 *out_data_size = std::min(*out_data_size, (uint32_t)(edid.size()));
719 memcpy(out_data, edid.data(), *out_data_size);
720 }
721
722 return kErrorNone;
723 }
724
GetHWDisplayPortAndMode()725 void HWDeviceDRM::GetHWDisplayPortAndMode() {
726 hw_panel_info_.port = kPortDefault;
727 hw_panel_info_.mode =
728 (connector_info_.panel_mode == sde_drm::DRMPanelMode::VIDEO) ? kModeVideo : kModeCommand;
729
730 if (default_mode_) {
731 return;
732 }
733
734 switch (connector_info_.type) {
735 case DRM_MODE_CONNECTOR_DSI:
736 hw_panel_info_.port = kPortDSI;
737 interface_str_ = "DSI";
738 break;
739 case DRM_MODE_CONNECTOR_LVDS:
740 hw_panel_info_.port = kPortLVDS;
741 interface_str_ = "LVDS";
742 break;
743 case DRM_MODE_CONNECTOR_eDP:
744 hw_panel_info_.port = kPortEDP;
745 interface_str_ = "EDP";
746 break;
747 case DRM_MODE_CONNECTOR_TV:
748 case DRM_MODE_CONNECTOR_HDMIA:
749 case DRM_MODE_CONNECTOR_HDMIB:
750 hw_panel_info_.port = kPortDTV;
751 interface_str_ = "HDMI";
752 break;
753 case DRM_MODE_CONNECTOR_VIRTUAL:
754 hw_panel_info_.port = kPortWriteBack;
755 interface_str_ = "Virtual";
756 break;
757 case DRM_MODE_CONNECTOR_DisplayPort:
758 // TODO(user): Add when available
759 interface_str_ = "DisplayPort";
760 break;
761 }
762
763 return;
764 }
765
GetHWPanelMaxBrightness()766 void HWDeviceDRM::GetHWPanelMaxBrightness() {
767 char brightness[kMaxStringLength] = {0};
768 string kMaxBrightnessNode = "/sys/class/backlight/panel0-backlight/max_brightness";
769
770 hw_panel_info_.panel_max_brightness = 255;
771 int fd = Sys::open_(kMaxBrightnessNode.c_str(), O_RDONLY);
772 if (fd < 0) {
773 DLOGW("Failed to open max brightness node = %s, error = %s", kMaxBrightnessNode.c_str(),
774 strerror(errno));
775 return;
776 }
777
778 if (Sys::pread_(fd, brightness, sizeof(brightness), 0) > 0) {
779 hw_panel_info_.panel_max_brightness = atoi(brightness);
780 DLOGI_IF(kTagDisplay, "Max brightness level = %d", hw_panel_info_.panel_max_brightness);
781 } else {
782 DLOGW("Failed to read max brightness level. error = %s", strerror(errno));
783 }
784
785 Sys::close_(fd);
786 }
787
GetActiveConfig(uint32_t * active_config)788 DisplayError HWDeviceDRM::GetActiveConfig(uint32_t *active_config) {
789 *active_config = current_mode_index_;
790 return kErrorNone;
791 }
792
GetNumDisplayAttributes(uint32_t * count)793 DisplayError HWDeviceDRM::GetNumDisplayAttributes(uint32_t *count) {
794 *count = UINT32(display_attributes_.size());
795 return kErrorNone;
796 }
797
GetDisplayAttributes(uint32_t index,HWDisplayAttributes * display_attributes)798 DisplayError HWDeviceDRM::GetDisplayAttributes(uint32_t index,
799 HWDisplayAttributes *display_attributes) {
800 if (index >= display_attributes_.size()) {
801 return kErrorParameters;
802 }
803 *display_attributes = display_attributes_[index];
804 return kErrorNone;
805 }
806
GetHWPanelInfo(HWPanelInfo * panel_info)807 DisplayError HWDeviceDRM::GetHWPanelInfo(HWPanelInfo *panel_info) {
808 *panel_info = hw_panel_info_;
809 return kErrorNone;
810 }
811
SetDisplayAttributes(uint32_t index)812 DisplayError HWDeviceDRM::SetDisplayAttributes(uint32_t index) {
813 if (index >= display_attributes_.size()) {
814 DLOGE("Invalid mode index %d mode size %d", index, UINT32(display_attributes_.size()));
815 return kErrorParameters;
816 }
817
818 drmModeModeInfo to_set = connector_info_.modes[index].mode;
819 uint64_t current_bit_clk = connector_info_.modes[current_mode_index_].bit_clk_rate;
820 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
821 if ((to_set.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
822 (to_set.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
823 (to_set.vrefresh == connector_info_.modes[mode_index].mode.vrefresh) &&
824 (current_bit_clk == connector_info_.modes[mode_index].bit_clk_rate)) {
825 index = mode_index;
826 break;
827 }
828 }
829
830 current_mode_index_ = index;
831 PopulateHWPanelInfo();
832 UpdateMixerAttributes();
833
834 return kErrorNone;
835 }
836
SetDisplayAttributes(const HWDisplayAttributes & display_attributes)837 DisplayError HWDeviceDRM::SetDisplayAttributes(const HWDisplayAttributes &display_attributes) {
838 return kErrorNotSupported;
839 }
840
GetConfigIndex(char * mode,uint32_t * index)841 DisplayError HWDeviceDRM::GetConfigIndex(char *mode, uint32_t *index) {
842 return kErrorNone;
843 }
844
PowerOn(const HWQosData & qos_data,int * release_fence)845 DisplayError HWDeviceDRM::PowerOn(const HWQosData &qos_data, int *release_fence) {
846 SetQOSData(qos_data);
847
848 int64_t release_fence_t = -1;
849 update_mode_ = true;
850 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
851 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::ON);
852 drm_atomic_intf_->Perform(DRMOps::CRTC_GET_RELEASE_FENCE, token_.crtc_id, &release_fence_t);
853 int ret = NullCommit(true /* synchronous */, true /* retain_planes */);
854 if (ret) {
855 DLOGE("Failed with error: %d", ret);
856 return kErrorHardware;
857 }
858
859 *release_fence = static_cast<int>(release_fence_t);
860 DLOGD_IF(kTagDriverConfig, "RELEASE fence created: fd:%d", *release_fence);
861 pending_doze_ = false;
862
863 return kErrorNone;
864 }
865
PowerOff(bool teardown)866 DisplayError HWDeviceDRM::PowerOff(bool teardown) {
867 DTRACE_SCOPED();
868 if (!drm_atomic_intf_) {
869 DLOGE("DRM Atomic Interface is null!");
870 return kErrorUndefined;
871 }
872
873 if (first_cycle_) {
874 return kErrorNone;
875 }
876
877 SetFullROI();
878 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
879 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, ¤t_mode);
880 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::OFF);
881 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 0);
882 int ret = NullCommit(true /* synchronous */, false /* retain_planes */);
883 if (ret) {
884 DLOGE("Failed with error: %d", ret);
885 return kErrorHardware;
886 }
887 pending_doze_ = false;
888
889 return kErrorNone;
890 }
891
Doze(const HWQosData & qos_data,int * release_fence)892 DisplayError HWDeviceDRM::Doze(const HWQosData &qos_data, int *release_fence) {
893 DTRACE_SCOPED();
894
895 if (!first_cycle_) {
896 pending_doze_ = true;
897 return kErrorNone;
898 }
899
900 SetQOSData(qos_data);
901
902 int64_t release_fence_t = -1;
903
904 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, token_.conn_id, token_.crtc_id);
905 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
906 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, ¤t_mode);
907
908 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
909 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::DOZE);
910 drm_atomic_intf_->Perform(DRMOps::CRTC_GET_RELEASE_FENCE, token_.crtc_id, &release_fence_t);
911 int ret = NullCommit(true /* synchronous */, true /* retain_planes */);
912 if (ret) {
913 DLOGE("Failed with error: %d", ret);
914 return kErrorHardware;
915 }
916
917 *release_fence = static_cast<int>(release_fence_t);
918 DLOGD_IF(kTagDriverConfig, "RELEASE fence created: fd:%d", *release_fence);
919 return kErrorNone;
920 }
921
DozeSuspend(const HWQosData & qos_data,int * release_fence)922 DisplayError HWDeviceDRM::DozeSuspend(const HWQosData &qos_data, int *release_fence) {
923 DTRACE_SCOPED();
924
925 SetQOSData(qos_data);
926
927 int64_t release_fence_t = -1;
928
929 if (first_cycle_) {
930 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, token_.conn_id, token_.crtc_id);
931 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
932 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, ¤t_mode);
933 }
934 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
935 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id,
936 DRMPowerMode::DOZE_SUSPEND);
937 drm_atomic_intf_->Perform(DRMOps::CRTC_GET_RELEASE_FENCE, token_.crtc_id, &release_fence_t);
938 int ret = NullCommit(true /* synchronous */, true /* retain_planes */);
939 if (ret) {
940 DLOGE("Failed with error: %d", ret);
941 return kErrorHardware;
942 }
943
944 *release_fence = static_cast<int>(release_fence_t);
945 DLOGD_IF(kTagDriverConfig, "RELEASE fence created: fd:%d", *release_fence);
946 pending_doze_ = false;
947
948 return kErrorNone;
949 }
950
SetQOSData(const HWQosData & qos_data)951 void HWDeviceDRM::SetQOSData(const HWQosData &qos_data) {
952 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_CORE_CLK, token_.crtc_id, qos_data.clock_hz);
953 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_CORE_AB, token_.crtc_id, qos_data.core_ab_bps);
954 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_CORE_IB, token_.crtc_id, qos_data.core_ib_bps);
955 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_LLCC_AB, token_.crtc_id, qos_data.llcc_ab_bps);
956 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_LLCC_IB, token_.crtc_id, qos_data.llcc_ib_bps);
957 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_DRAM_AB, token_.crtc_id, qos_data.dram_ab_bps);
958 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_DRAM_IB, token_.crtc_id, qos_data.dram_ib_bps);
959 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ROT_PREFILL_BW, token_.crtc_id,
960 qos_data.rot_prefill_bw_bps);
961 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ROT_CLK, token_.crtc_id, qos_data.rot_clock_hz);
962 }
963
Standby()964 DisplayError HWDeviceDRM::Standby() {
965 return kErrorNone;
966 }
967
SetupAtomic(HWLayers * hw_layers,bool validate)968 void HWDeviceDRM::SetupAtomic(HWLayers *hw_layers, bool validate) {
969 if (default_mode_) {
970 return;
971 }
972
973 DTRACE_SCOPED();
974 HWLayersInfo &hw_layer_info = hw_layers->info;
975 uint32_t hw_layer_count = UINT32(hw_layer_info.hw_layers.size());
976 HWQosData &qos_data = hw_layers->qos_data;
977 DRMSecurityLevel crtc_security_level = DRMSecurityLevel::SECURE_NON_SECURE;
978 uint32_t index = current_mode_index_;
979 drmModeModeInfo current_mode = connector_info_.modes[index].mode;
980 uint64_t current_bit_clk = connector_info_.modes[index].bit_clk_rate;
981
982 solid_fills_.clear();
983 bool resource_update = hw_layers->updates_mask.test(kUpdateResources);
984 bool update_config = resource_update || hw_layer_info.stack->flags.geometry_changed;
985
986 // TODO(user): Once destination scalar is enabled we can always send ROIs if driver allows
987 if (hw_panel_info_.partial_update && update_config) {
988 const int kNumMaxROIs = 4;
989 DRMRect crtc_rects[kNumMaxROIs] = {{0, 0, mixer_attributes_.width, mixer_attributes_.height}};
990 DRMRect conn_rects[kNumMaxROIs] = {{0, 0, display_attributes_[index].x_pixels,
991 display_attributes_[index].y_pixels}};
992
993 for (uint32_t i = 0; i < hw_layer_info.left_frame_roi.size(); i++) {
994 auto &roi = hw_layer_info.left_frame_roi.at(i);
995 // TODO(user): In multi PU, stitch ROIs vertically adjacent and upate plane destination
996 crtc_rects[i].left = UINT32(roi.left);
997 crtc_rects[i].right = UINT32(roi.right);
998 crtc_rects[i].top = UINT32(roi.top);
999 crtc_rects[i].bottom = UINT32(roi.bottom);
1000 // TODO(user): In Dest scaler + PU, populate from HWDestScaleInfo->panel_roi
1001 // TODO(user): panel_roi need to be made as a vector in HWLayersInfo and
1002 // needs to be removed from HWDestScaleInfo.
1003 conn_rects[i].left = UINT32(roi.left);
1004 conn_rects[i].right = UINT32(roi.right);
1005 conn_rects[i].top = UINT32(roi.top);
1006 conn_rects[i].bottom = UINT32(roi.bottom);
1007 }
1008
1009 uint32_t num_rects = std::max(1u, static_cast<uint32_t>(hw_layer_info.left_frame_roi.size()));
1010 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ROI, token_.crtc_id,
1011 num_rects, crtc_rects);
1012 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_ROI, token_.conn_id,
1013 num_rects, conn_rects);
1014 }
1015
1016 for (uint32_t i = 0; i < hw_layer_count; i++) {
1017 Layer &layer = hw_layer_info.hw_layers.at(i);
1018 LayerBuffer *input_buffer = &layer.input_buffer;
1019 HWPipeInfo *left_pipe = &hw_layers->config[i].left_pipe;
1020 HWPipeInfo *right_pipe = &hw_layers->config[i].right_pipe;
1021 HWRotatorSession *hw_rotator_session = &hw_layers->config[i].hw_rotator_session;
1022
1023 if (hw_layers->config[i].use_solidfill_stage) {
1024 hw_layers->config[i].hw_solidfill_stage.solid_fill_info = layer.solid_fill_info;
1025 AddSolidfillStage(hw_layers->config[i].hw_solidfill_stage, layer.plane_alpha);
1026 continue;
1027 }
1028
1029 for (uint32_t count = 0; count < 2; count++) {
1030 HWPipeInfo *pipe_info = (count == 0) ? left_pipe : right_pipe;
1031 HWRotateInfo *hw_rotate_info = &hw_rotator_session->hw_rotate_info[count];
1032
1033 if (hw_rotator_session->mode == kRotatorOffline && hw_rotate_info->valid) {
1034 input_buffer = &hw_rotator_session->output_buffer;
1035 }
1036
1037 uint32_t fb_id = registry_.GetFbId(&layer, input_buffer->handle_id);
1038
1039 if (pipe_info->valid && fb_id) {
1040 uint32_t pipe_id = pipe_info->pipe_id;
1041
1042 if (update_config) {
1043 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_ALPHA, pipe_id, layer.plane_alpha);
1044
1045 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_ZORDER, pipe_id, pipe_info->z_order);
1046
1047 DRMBlendType blending = {};
1048 SetBlending(layer.blending, &blending);
1049 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_BLEND_TYPE, pipe_id, blending);
1050
1051 DRMRect src = {};
1052 SetRect(pipe_info->src_roi, &src);
1053 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_SRC_RECT, pipe_id, src);
1054
1055 DRMRect rot_dst = {0, 0, 0, 0};
1056 if (hw_rotator_session->mode == kRotatorInline && hw_rotate_info->valid) {
1057 SetRect(hw_rotate_info->dst_roi, &rot_dst);
1058 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_ROTATION_DST_RECT, pipe_id, rot_dst);
1059 if (hw_rotator_session->output_buffer.planes[0].fd >= 0) {
1060 uint32_t rot_fb_id = registry_.GetFbId(&layer,
1061 hw_rotator_session->output_buffer.handle_id);
1062 if (rot_fb_id) {
1063 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_ROT_FB_ID, pipe_id, rot_fb_id);
1064 }
1065 }
1066 }
1067
1068 DRMRect dst = {};
1069 SetRect(pipe_info->dst_roi, &dst);
1070 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_DST_RECT, pipe_id, dst);
1071
1072 DRMRect excl = {};
1073 SetRect(pipe_info->excl_rect, &excl);
1074 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_EXCL_RECT, pipe_id, excl);
1075
1076 uint32_t rot_bit_mask = 0;
1077 SetRotation(layer.transform, hw_rotator_session->mode, &rot_bit_mask);
1078 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_ROTATION, pipe_id, rot_bit_mask);
1079
1080 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_H_DECIMATION, pipe_id,
1081 pipe_info->horizontal_decimation);
1082 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_V_DECIMATION, pipe_id,
1083 pipe_info->vertical_decimation);
1084
1085 DRMSecureMode fb_secure_mode;
1086 DRMSecurityLevel security_level;
1087 SetSecureConfig(layer.input_buffer, &fb_secure_mode, &security_level);
1088 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_FB_SECURE_MODE, pipe_id, fb_secure_mode);
1089 if (security_level > crtc_security_level) {
1090 crtc_security_level = security_level;
1091 }
1092
1093 uint32_t config = 0;
1094 SetSrcConfig(layer.input_buffer, hw_rotator_session->mode, &config);
1095 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_SRC_CONFIG, pipe_id, config);
1096
1097 if (hw_scale_) {
1098 SDEScaler scaler_output = {};
1099 hw_scale_->SetScaler(pipe_info->scale_data, &scaler_output);
1100 // TODO(user): Remove qseed3 and add version check, then send appropriate scaler object
1101 if (hw_resource_.has_qseed3) {
1102 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_SCALER_CONFIG, pipe_id,
1103 reinterpret_cast<uint64_t>(&scaler_output.scaler_v2));
1104 }
1105 }
1106
1107 DRMCscType csc_type = DRMCscType::kCscTypeMax;
1108 SelectCscType(layer.input_buffer, &csc_type);
1109 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_CSC_CONFIG, pipe_id, &csc_type);
1110
1111 DRMMultiRectMode multirect_mode;
1112 SetMultiRectMode(pipe_info->flags, &multirect_mode);
1113 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_MULTIRECT_MODE, pipe_id, multirect_mode);
1114
1115 SetSsppTonemapFeatures(pipe_info);
1116 }
1117
1118 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_FB_ID, pipe_id, fb_id);
1119 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_CRTC, pipe_id, token_.crtc_id);
1120
1121 if (!validate && input_buffer->acquire_fence_fd >= 0) {
1122 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_INPUT_FENCE, pipe_id,
1123 input_buffer->acquire_fence_fd);
1124 }
1125 }
1126 }
1127 }
1128
1129 if (update_config) {
1130 SetSolidfillStages();
1131 SetQOSData(qos_data);
1132 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_SECURITY_LEVEL, token_.crtc_id, crtc_security_level);
1133 sde_drm::DRMQsyncMode mode = hw_layers->hw_avr_info.enable ? sde_drm::DRMQsyncMode::CONTINUOUS :
1134 sde_drm::DRMQsyncMode::NONE;
1135 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_QSYNC_MODE, token_.conn_id, mode);
1136 }
1137
1138 drm_atomic_intf_->Perform(DRMOps::DPPS_COMMIT_FEATURE, 0 /* argument is not used */);
1139
1140 if (!validate) {
1141 drm_atomic_intf_->Perform(DRMOps::CRTC_GET_RELEASE_FENCE, token_.crtc_id, &release_fence_);
1142 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_GET_RETIRE_FENCE, token_.conn_id, &retire_fence_);
1143 }
1144
1145 DLOGI_IF(kTagDriverConfig, "%s::%s System Clock=%d Hz, Core: AB=%llu Bps, IB=%llu Bps, " \
1146 "LLCC: AB=%llu Bps, IB=%llu Bps, DRAM AB=%llu Bps, IB=%llu Bps, "\
1147 "Rot: Bw=%llu Bps, Clock=%d Hz", validate ? "Validate" : "Commit", device_name_,
1148 qos_data.clock_hz, qos_data.core_ab_bps, qos_data.core_ib_bps, qos_data.llcc_ab_bps,
1149 qos_data.llcc_ib_bps, qos_data.dram_ab_bps, qos_data.dram_ib_bps,
1150 qos_data.rot_prefill_bw_bps, qos_data.rot_clock_hz);
1151
1152 // Set refresh rate
1153 if (vrefresh_) {
1154 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1155 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1156 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1157 (current_bit_clk == connector_info_.modes[mode_index].bit_clk_rate) &&
1158 (vrefresh_ == connector_info_.modes[mode_index].mode.vrefresh)) {
1159 current_mode = connector_info_.modes[mode_index].mode;
1160 break;
1161 }
1162 }
1163 }
1164
1165 if (bit_clk_rate_) {
1166 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1167 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1168 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1169 (current_mode.vrefresh == connector_info_.modes[mode_index].mode.vrefresh) &&
1170 (bit_clk_rate_ == connector_info_.modes[mode_index].bit_clk_rate)) {
1171 current_mode = connector_info_.modes[mode_index].mode;
1172 break;
1173 }
1174 }
1175 }
1176
1177 if (first_cycle_) {
1178 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_TOPOLOGY_CONTROL, token_.conn_id,
1179 topology_control_);
1180 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
1181 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, token_.conn_id, token_.crtc_id);
1182 DRMPowerMode power_mode = pending_doze_ ? DRMPowerMode::DOZE : DRMPowerMode::ON;
1183 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, power_mode);
1184 } else if (pending_doze_ && !validate) {
1185 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
1186 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::DOZE);
1187 pending_doze_ = false;
1188 }
1189
1190 // Set CRTC mode, only if display config changes
1191 if (vrefresh_ || first_cycle_ || update_mode_) {
1192 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, ¤t_mode);
1193 }
1194
1195 if (!validate && (hw_layer_info.set_idle_time_ms >= 0)) {
1196 DLOGI_IF(kTagDriverConfig, "Setting idle timeout to = %d ms",
1197 hw_layer_info.set_idle_time_ms);
1198 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_IDLE_TIMEOUT, token_.crtc_id,
1199 hw_layer_info.set_idle_time_ms);
1200 }
1201
1202 if (hw_panel_info_.mode == kModeCommand) {
1203 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_AUTOREFRESH, token_.conn_id, autorefresh_);
1204 }
1205 }
1206
AddSolidfillStage(const HWSolidfillStage & sf,uint32_t plane_alpha)1207 void HWDeviceDRM::AddSolidfillStage(const HWSolidfillStage &sf, uint32_t plane_alpha) {
1208 sde_drm::DRMSolidfillStage solidfill;
1209 solidfill.bounding_rect.left = UINT32(sf.roi.left);
1210 solidfill.bounding_rect.top = UINT32(sf.roi.top);
1211 solidfill.bounding_rect.right = UINT32(sf.roi.right);
1212 solidfill.bounding_rect.bottom = UINT32(sf.roi.bottom);
1213 solidfill.is_exclusion_rect = sf.is_exclusion_rect;
1214 solidfill.plane_alpha = plane_alpha;
1215 solidfill.z_order = sf.z_order;
1216 if (!sf.solid_fill_info.bit_depth) {
1217 solidfill.color_bit_depth = 8;
1218 solidfill.alpha = (0xff000000 & sf.color) >> 24;
1219 solidfill.red = (0xff0000 & sf.color) >> 16;
1220 solidfill.green = (0xff00 & sf.color) >> 8;
1221 solidfill.blue = 0xff & sf.color;
1222 } else {
1223 solidfill.color_bit_depth = sf.solid_fill_info.bit_depth;
1224 solidfill.alpha = sf.solid_fill_info.alpha;
1225 solidfill.red = sf.solid_fill_info.red;
1226 solidfill.green = sf.solid_fill_info.green;
1227 solidfill.blue = sf.solid_fill_info.blue;
1228 }
1229 solid_fills_.push_back(solidfill);
1230 DLOGI_IF(kTagDriverConfig, "Add a solidfill stage at z_order:%d argb_color:%x plane_alpha:%x",
1231 solidfill.z_order, solidfill.color, solidfill.plane_alpha);
1232 }
1233
SetSolidfillStages()1234 void HWDeviceDRM::SetSolidfillStages() {
1235 if (hw_resource_.num_solidfill_stages) {
1236 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_SOLIDFILL_STAGES, token_.crtc_id,
1237 reinterpret_cast<uint64_t> (&solid_fills_));
1238 }
1239 }
1240
ClearSolidfillStages()1241 void HWDeviceDRM::ClearSolidfillStages() {
1242 solid_fills_.clear();
1243 SetSolidfillStages();
1244 }
1245
Validate(HWLayers * hw_layers)1246 DisplayError HWDeviceDRM::Validate(HWLayers *hw_layers) {
1247 DTRACE_SCOPED();
1248
1249 DisplayError err = kErrorNone;
1250 registry_.Register(hw_layers);
1251 SetupAtomic(hw_layers, true /* validate */);
1252
1253 int ret = drm_atomic_intf_->Validate();
1254 if (ret) {
1255 DLOGE("failed with error %d for %s", ret, device_name_);
1256 vrefresh_ = 0;
1257 err = kErrorHardware;
1258 }
1259
1260 return err;
1261 }
1262
Commit(HWLayers * hw_layers)1263 DisplayError HWDeviceDRM::Commit(HWLayers *hw_layers) {
1264 DTRACE_SCOPED();
1265
1266 DisplayError err = kErrorNone;
1267 registry_.Register(hw_layers);
1268
1269 if (default_mode_) {
1270 err = DefaultCommit(hw_layers);
1271 } else {
1272 err = AtomicCommit(hw_layers);
1273 }
1274
1275 return err;
1276 }
1277
DefaultCommit(HWLayers * hw_layers)1278 DisplayError HWDeviceDRM::DefaultCommit(HWLayers *hw_layers) {
1279 DTRACE_SCOPED();
1280
1281 HWLayersInfo &hw_layer_info = hw_layers->info;
1282 LayerStack *stack = hw_layer_info.stack;
1283
1284 stack->retire_fence_fd = -1;
1285 for (Layer &layer : hw_layer_info.hw_layers) {
1286 layer.input_buffer.release_fence_fd = -1;
1287 }
1288
1289 DRMMaster *master = nullptr;
1290 int ret = DRMMaster::GetInstance(&master);
1291 if (ret < 0) {
1292 DLOGE("Failed to acquire DRMMaster instance");
1293 return kErrorResources;
1294 }
1295
1296 DRMResMgr *res_mgr = nullptr;
1297 ret = DRMResMgr::GetInstance(&res_mgr);
1298 if (ret < 0) {
1299 DLOGE("Failed to acquire DRMResMgr instance");
1300 return kErrorResources;
1301 }
1302
1303 int dev_fd = -1;
1304 master->GetHandle(&dev_fd);
1305
1306 uint32_t connector_id = 0;
1307 res_mgr->GetConnectorId(&connector_id);
1308
1309 uint32_t crtc_id = 0;
1310 res_mgr->GetCrtcId(&crtc_id);
1311
1312 drmModeModeInfo mode;
1313 res_mgr->GetMode(&mode);
1314
1315 uint64_t handle_id = hw_layer_info.hw_layers.at(0).input_buffer.handle_id;
1316 uint32_t fb_id = registry_.GetFbId(&hw_layer_info.hw_layers.at(0), handle_id);
1317 ret = drmModeSetCrtc(dev_fd, crtc_id, fb_id, 0 /* x */, 0 /* y */, &connector_id,
1318 1 /* num_connectors */, &mode);
1319 if (ret < 0) {
1320 DLOGE("drmModeSetCrtc failed dev fd %d, fb_id %d, crtc id %d, connector id %d, %s", dev_fd,
1321 fb_id, crtc_id, connector_id, strerror(errno));
1322 return kErrorHardware;
1323 }
1324
1325 return kErrorNone;
1326 }
1327
AtomicCommit(HWLayers * hw_layers)1328 DisplayError HWDeviceDRM::AtomicCommit(HWLayers *hw_layers) {
1329 DTRACE_SCOPED();
1330 SetupAtomic(hw_layers, false /* validate */);
1331
1332 int ret = drm_atomic_intf_->Commit(synchronous_commit_, false /* retain_planes*/);
1333 int release_fence = INT(release_fence_);
1334 int retire_fence = INT(retire_fence_);
1335 if (ret) {
1336 DLOGE("%s failed with error %d crtc %d", __FUNCTION__, ret, token_.crtc_id);
1337 vrefresh_ = 0;
1338 CloseFd(&release_fence);
1339 CloseFd(&retire_fence);
1340 release_fence_ = -1;
1341 retire_fence_ = -1;
1342 return kErrorHardware;
1343 }
1344
1345 DLOGD_IF(kTagDriverConfig, "RELEASE fence created: fd:%d", release_fence);
1346 DLOGD_IF(kTagDriverConfig, "RETIRE fence created: fd:%d", retire_fence);
1347
1348 HWLayersInfo &hw_layer_info = hw_layers->info;
1349 LayerStack *stack = hw_layer_info.stack;
1350 stack->retire_fence_fd = retire_fence;
1351
1352 for (uint32_t i = 0; i < hw_layer_info.hw_layers.size(); i++) {
1353 Layer &layer = hw_layer_info.hw_layers.at(i);
1354 HWRotatorSession *hw_rotator_session = &hw_layers->config[i].hw_rotator_session;
1355 if (hw_rotator_session->mode == kRotatorOffline) {
1356 hw_rotator_session->output_buffer.release_fence_fd = Sys::dup_(release_fence);
1357 } else {
1358 layer.input_buffer.release_fence_fd = Sys::dup_(release_fence);
1359 }
1360 }
1361
1362 hw_layer_info.sync_handle = release_fence;
1363
1364 if (vrefresh_) {
1365 // Update current mode index if refresh rate is changed
1366 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
1367 uint64_t current_bit_clk = connector_info_.modes[current_mode_index_].bit_clk_rate;
1368 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1369 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1370 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1371 (current_bit_clk == connector_info_.modes[mode_index].bit_clk_rate) &&
1372 (vrefresh_ == connector_info_.modes[mode_index].mode.vrefresh)) {
1373 current_mode_index_ = mode_index;
1374 break;
1375 }
1376 }
1377 vrefresh_ = 0;
1378 }
1379
1380 if (bit_clk_rate_) {
1381 // Update current mode index if bit clk rate is changed.
1382 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
1383 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1384 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1385 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1386 (current_mode.vrefresh == connector_info_.modes[mode_index].mode.vrefresh) &&
1387 (bit_clk_rate_ == connector_info_.modes[mode_index].bit_clk_rate)) {
1388 current_mode_index_ = mode_index;
1389 break;
1390 }
1391 }
1392 bit_clk_rate_ = 0;
1393 }
1394
1395 first_cycle_ = false;
1396 update_mode_ = false;
1397 hw_layers->updates_mask = 0;
1398 pending_doze_ = false;
1399
1400 return kErrorNone;
1401 }
1402
Flush(HWLayers * hw_layers)1403 DisplayError HWDeviceDRM::Flush(HWLayers *hw_layers) {
1404 ClearSolidfillStages();
1405 int ret = NullCommit(secure_display_active_ /* synchronous */, false /* retain_planes*/);
1406 if (ret) {
1407 DLOGE("failed with error %d", ret);
1408 return kErrorHardware;
1409 }
1410
1411 return kErrorNone;
1412 }
1413
SetBlending(const LayerBlending & source,DRMBlendType * target)1414 void HWDeviceDRM::SetBlending(const LayerBlending &source, DRMBlendType *target) {
1415 switch (source) {
1416 case kBlendingPremultiplied:
1417 *target = DRMBlendType::PREMULTIPLIED;
1418 break;
1419 case kBlendingOpaque:
1420 *target = DRMBlendType::OPAQUE;
1421 break;
1422 case kBlendingCoverage:
1423 *target = DRMBlendType::COVERAGE;
1424 break;
1425 default:
1426 *target = DRMBlendType::UNDEFINED;
1427 }
1428 }
1429
SetSrcConfig(const LayerBuffer & input_buffer,const HWRotatorMode & mode,uint32_t * config)1430 void HWDeviceDRM::SetSrcConfig(const LayerBuffer &input_buffer, const HWRotatorMode &mode,
1431 uint32_t *config) {
1432 // In offline rotation case, rotator will handle deinterlacing.
1433 if (mode != kRotatorOffline) {
1434 if (input_buffer.flags.interlace) {
1435 *config |= (0x01 << UINT32(DRMSrcConfig::DEINTERLACE));
1436 }
1437 }
1438 }
1439
SelectCscType(const LayerBuffer & input_buffer,DRMCscType * type)1440 void HWDeviceDRM::SelectCscType(const LayerBuffer &input_buffer, DRMCscType *type) {
1441 if (type == NULL) {
1442 return;
1443 }
1444
1445 *type = DRMCscType::kCscTypeMax;
1446 if (input_buffer.format < kFormatYCbCr420Planar) {
1447 return;
1448 }
1449
1450 switch (input_buffer.color_metadata.colorPrimaries) {
1451 case ColorPrimaries_BT601_6_525:
1452 case ColorPrimaries_BT601_6_625:
1453 *type = ((input_buffer.color_metadata.range == Range_Full) ?
1454 DRMCscType::kCscYuv2Rgb601FR : DRMCscType::kCscYuv2Rgb601L);
1455 break;
1456 case ColorPrimaries_BT709_5:
1457 *type = DRMCscType::kCscYuv2Rgb709L;
1458 break;
1459 case ColorPrimaries_BT2020:
1460 *type = ((input_buffer.color_metadata.range == Range_Full) ?
1461 DRMCscType::kCscYuv2Rgb2020FR : DRMCscType::kCscYuv2Rgb2020L);
1462 break;
1463 default:
1464 break;
1465 }
1466 }
1467
SetRect(const LayerRect & source,DRMRect * target)1468 void HWDeviceDRM::SetRect(const LayerRect &source, DRMRect *target) {
1469 target->left = UINT32(source.left);
1470 target->top = UINT32(source.top);
1471 target->right = UINT32(source.right);
1472 target->bottom = UINT32(source.bottom);
1473 }
1474
SetRotation(LayerTransform transform,const HWRotatorMode & mode,uint32_t * rot_bit_mask)1475 void HWDeviceDRM::SetRotation(LayerTransform transform, const HWRotatorMode &mode,
1476 uint32_t* rot_bit_mask) {
1477 // In offline rotation case, rotator will handle flips set via offline rotator interface.
1478 if (mode == kRotatorOffline) {
1479 *rot_bit_mask = 0;
1480 return;
1481 }
1482
1483 // In no rotation case or inline rotation case, plane will handle flips
1484 // In DRM framework rotation is applied in counter-clockwise direction.
1485 if (mode == kRotatorInline && transform.rotation == 90) {
1486 // a) rotate 90 clockwise = rotate 270 counter-clockwise in DRM
1487 // rotate 270 is translated as hflip + vflip + rotate90
1488 // b) rotate 270 clockwise = rotate 90 counter-clockwise in DRM
1489 // c) hflip + rotate 90 clockwise = vflip + rotate 90 counter-clockwise in DRM
1490 // d) vflip + rotate 90 clockwise = hflip + rotate 90 counter-clockwise in DRM
1491 *rot_bit_mask = UINT32(DRMRotation::ROT_90);
1492 transform.flip_horizontal = !transform.flip_horizontal;
1493 transform.flip_vertical = !transform.flip_vertical;
1494 }
1495
1496 if (transform.flip_horizontal) {
1497 *rot_bit_mask |= UINT32(DRMRotation::FLIP_H);
1498 }
1499
1500 if (transform.flip_vertical) {
1501 *rot_bit_mask |= UINT32(DRMRotation::FLIP_V);
1502 }
1503 }
1504
EnableHotPlugDetection(int enable)1505 bool HWDeviceDRM::EnableHotPlugDetection(int enable) {
1506 return true;
1507 }
1508
SetCursorPosition(HWLayers * hw_layers,int x,int y)1509 DisplayError HWDeviceDRM::SetCursorPosition(HWLayers *hw_layers, int x, int y) {
1510 DTRACE_SCOPED();
1511 return kErrorNone;
1512 }
1513
GetPPFeaturesVersion(PPFeatureVersion * vers)1514 DisplayError HWDeviceDRM::GetPPFeaturesVersion(PPFeatureVersion *vers) {
1515 struct DRMPPFeatureInfo info = {};
1516
1517 if (!hw_color_mgr_)
1518 return kErrorNotSupported;
1519
1520 for (uint32_t i = 0; i < kMaxNumPPFeatures; i++) {
1521 std::vector<DRMPPFeatureID> drm_id = {};
1522 memset(&info, 0, sizeof(struct DRMPPFeatureInfo));
1523 hw_color_mgr_->ToDrmFeatureId(kDSPP, i, &drm_id);
1524 if (drm_id.empty())
1525 continue;
1526
1527 info.id = drm_id.at(0);
1528
1529 drm_mgr_intf_->GetCrtcPPInfo(token_.crtc_id, &info);
1530 vers->version[i] = hw_color_mgr_->GetFeatureVersion(info);
1531 }
1532 return kErrorNone;
1533 }
1534
SetPPFeatures(PPFeaturesConfig * feature_list)1535 DisplayError HWDeviceDRM::SetPPFeatures(PPFeaturesConfig *feature_list) {
1536 if (pending_doze_) {
1537 DLOGI("Doze state pending!! Skip for now");
1538 return kErrorNone;
1539 }
1540
1541 int ret = 0;
1542 PPFeatureInfo *feature = NULL;
1543
1544 if (!hw_color_mgr_)
1545 return kErrorNotSupported;
1546
1547 while (true) {
1548 std::vector<DRMPPFeatureID> drm_id = {};
1549 DRMPPFeatureInfo kernel_params = {};
1550 bool crtc_feature = true;
1551
1552 ret = feature_list->RetrieveNextFeature(&feature);
1553 if (ret)
1554 break;
1555
1556 hw_color_mgr_->ToDrmFeatureId(kDSPP, feature->feature_id_, &drm_id);
1557 if (drm_id.empty())
1558 continue;
1559
1560 kernel_params.id = drm_id.at(0);
1561 drm_mgr_intf_->GetCrtcPPInfo(token_.crtc_id, &kernel_params);
1562 if (kernel_params.version == std::numeric_limits<uint32_t>::max())
1563 crtc_feature = false;
1564 if (feature) {
1565 DLOGV_IF(kTagDriverConfig, "feature_id = %d", feature->feature_id_);
1566 for (DRMPPFeatureID id : drm_id) {
1567 if (id >= kPPFeaturesMax) {
1568 DLOGE("Invalid feature id %d", id);
1569 continue;
1570 }
1571 kernel_params.id = id;
1572 ret = hw_color_mgr_->GetDrmFeature(feature, &kernel_params);
1573 if (!ret && crtc_feature)
1574 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_POST_PROC,
1575 token_.crtc_id, &kernel_params);
1576 else if (!ret && !crtc_feature)
1577 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POST_PROC,
1578 token_.conn_id, &kernel_params);
1579
1580 hw_color_mgr_->FreeDrmFeatureData(&kernel_params);
1581 }
1582 }
1583 }
1584
1585 // Once all features were consumed, then destroy all feature instance from feature_list,
1586 feature_list->Reset();
1587
1588 return kErrorNone;
1589 }
1590
SetVSyncState(bool enable)1591 DisplayError HWDeviceDRM::SetVSyncState(bool enable) {
1592 return kErrorNotSupported;
1593 }
1594
SetIdleTimeoutMs(uint32_t timeout_ms)1595 void HWDeviceDRM::SetIdleTimeoutMs(uint32_t timeout_ms) {
1596 // TODO(user): This function can be removed after fb is deprecated
1597 }
1598
SetDisplayMode(const HWDisplayMode hw_display_mode)1599 DisplayError HWDeviceDRM::SetDisplayMode(const HWDisplayMode hw_display_mode) {
1600 return kErrorNotSupported;
1601 }
1602
SetRefreshRate(uint32_t refresh_rate)1603 DisplayError HWDeviceDRM::SetRefreshRate(uint32_t refresh_rate) {
1604 if (bit_clk_rate_) {
1605 // bit rate update pending.
1606 // Defer any refresh rate setting.
1607 return kErrorNotSupported;
1608 }
1609
1610 // Check if requested refresh rate is valid
1611 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
1612 uint64_t current_bit_clk = connector_info_.modes[current_mode_index_].bit_clk_rate;
1613 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1614 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1615 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1616 (current_bit_clk == connector_info_.modes[mode_index].bit_clk_rate) &&
1617 (refresh_rate == connector_info_.modes[mode_index].mode.vrefresh)) {
1618 vrefresh_ = refresh_rate;
1619 DLOGV_IF(kTagDriverConfig, "Set refresh rate to %d", refresh_rate);
1620 return kErrorNone;
1621 }
1622 }
1623 return kErrorNotSupported;
1624 }
1625
SetPanelBrightness(int level)1626 DisplayError HWDeviceDRM::SetPanelBrightness(int level) {
1627 DisplayError err = kErrorNone;
1628 char buffer[kMaxSysfsCommandLength] = {0};
1629
1630 DLOGV_IF(kTagDriverConfig, "Set brightness level to %d", level);
1631 int fd = Sys::open_(kBrightnessNode, O_RDWR);
1632 if (fd < 0) {
1633 DLOGV_IF(kTagDriverConfig, "Failed to open node = %s, error = %s ", kBrightnessNode,
1634 strerror(errno));
1635 return kErrorFileDescriptor;
1636 }
1637
1638 int32_t bytes = snprintf(buffer, kMaxSysfsCommandLength, "%d\n", level);
1639 ssize_t ret = Sys::pwrite_(fd, buffer, static_cast<size_t>(bytes), 0);
1640 if (ret <= 0) {
1641 DLOGV_IF(kTagDriverConfig, "Failed to write to node = %s, error = %s ", kBrightnessNode,
1642 strerror(errno));
1643 err = kErrorHardware;
1644 }
1645
1646 Sys::close_(fd);
1647
1648 return err;
1649 }
1650
GetPanelBrightness(int * level)1651 DisplayError HWDeviceDRM::GetPanelBrightness(int *level) {
1652 DisplayError err = kErrorNone;
1653 char brightness[kMaxStringLength] = {0};
1654
1655 if (!level) {
1656 DLOGV_IF(kTagDriverConfig, "Invalid input, null pointer.");
1657 return kErrorParameters;
1658 }
1659
1660 int fd = Sys::open_(kBrightnessNode, O_RDWR);
1661 if (fd < 0) {
1662 DLOGV_IF(kTagDriverConfig, "Failed to open brightness node = %s, error = %s", kBrightnessNode,
1663 strerror(errno));
1664 return kErrorFileDescriptor;
1665 }
1666
1667 if (Sys::pread_(fd, brightness, sizeof(brightness), 0) > 0) {
1668 *level = atoi(brightness);
1669 DLOGV_IF(kTagDriverConfig, "Brightness level = %d", *level);
1670 } else {
1671 DLOGV_IF(kTagDriverConfig, "Failed to read panel brightness");
1672 err = kErrorHardware;
1673 }
1674
1675 Sys::close_(fd);
1676
1677 return err;
1678 }
1679
GetHWScanInfo(HWScanInfo * scan_info)1680 DisplayError HWDeviceDRM::GetHWScanInfo(HWScanInfo *scan_info) {
1681 return kErrorNotSupported;
1682 }
1683
GetVideoFormat(uint32_t config_index,uint32_t * video_format)1684 DisplayError HWDeviceDRM::GetVideoFormat(uint32_t config_index, uint32_t *video_format) {
1685 return kErrorNotSupported;
1686 }
1687
GetMaxCEAFormat(uint32_t * max_cea_format)1688 DisplayError HWDeviceDRM::GetMaxCEAFormat(uint32_t *max_cea_format) {
1689 return kErrorNotSupported;
1690 }
1691
OnMinHdcpEncryptionLevelChange(uint32_t min_enc_level)1692 DisplayError HWDeviceDRM::OnMinHdcpEncryptionLevelChange(uint32_t min_enc_level) {
1693 return kErrorNotSupported;
1694 }
1695
SetS3DMode(HWS3DMode s3d_mode)1696 DisplayError HWDeviceDRM::SetS3DMode(HWS3DMode s3d_mode) {
1697 return kErrorNotSupported;
1698 }
1699
SetScaleLutConfig(HWScaleLutInfo * lut_info)1700 DisplayError HWDeviceDRM::SetScaleLutConfig(HWScaleLutInfo *lut_info) {
1701 sde_drm::DRMScalerLUTInfo drm_lut_info = {};
1702 drm_lut_info.cir_lut = lut_info->cir_lut;
1703 drm_lut_info.dir_lut = lut_info->dir_lut;
1704 drm_lut_info.sep_lut = lut_info->sep_lut;
1705 drm_lut_info.cir_lut_size = lut_info->cir_lut_size;
1706 drm_lut_info.dir_lut_size = lut_info->dir_lut_size;
1707 drm_lut_info.sep_lut_size = lut_info->sep_lut_size;
1708 drm_mgr_intf_->SetScalerLUT(drm_lut_info);
1709
1710 return kErrorNone;
1711 }
1712
UnsetScaleLutConfig()1713 DisplayError HWDeviceDRM::UnsetScaleLutConfig() {
1714 drm_mgr_intf_->UnsetScalerLUT();
1715
1716 return kErrorNone;
1717 }
1718
SetMixerAttributes(const HWMixerAttributes & mixer_attributes)1719 DisplayError HWDeviceDRM::SetMixerAttributes(const HWMixerAttributes &mixer_attributes) {
1720 if (!hw_resource_.hw_dest_scalar_info.count) {
1721 return kErrorNotSupported;
1722 }
1723
1724 uint32_t index = current_mode_index_;
1725
1726 if (mixer_attributes.width > display_attributes_[index].x_pixels ||
1727 mixer_attributes.height > display_attributes_[index].y_pixels) {
1728 DLOGW("Input resolution exceeds display resolution! input: res %dx%d display: res %dx%d",
1729 mixer_attributes.width, mixer_attributes.height, display_attributes_[index].x_pixels,
1730 display_attributes_[index].y_pixels);
1731 return kErrorNotSupported;
1732 }
1733
1734 uint32_t max_input_width = hw_resource_.hw_dest_scalar_info.max_input_width;
1735 if (display_attributes_[index].is_device_split) {
1736 max_input_width *= 2;
1737 }
1738
1739 if (mixer_attributes.width > max_input_width) {
1740 DLOGW("Input width exceeds width limit! input_width %d width_limit %d", mixer_attributes.width,
1741 max_input_width);
1742 return kErrorNotSupported;
1743 }
1744
1745 if (static_cast<int>(mixer_attributes.width) < hw_panel_info_.min_roi_width) {
1746 DLOGW("Input width less than panel min_roi_width! input_width %d min_roi_width %d",
1747 mixer_attributes.width, hw_panel_info_.min_roi_width);
1748 return kErrorNotSupported;
1749 }
1750
1751 float mixer_aspect_ratio = FLOAT(mixer_attributes.width) / FLOAT(mixer_attributes.height);
1752 float display_aspect_ratio =
1753 FLOAT(display_attributes_[index].x_pixels) / FLOAT(display_attributes_[index].y_pixels);
1754
1755 if (display_aspect_ratio != mixer_aspect_ratio) {
1756 DLOGW("Aspect ratio mismatch! input: res %dx%d display: res %dx%d", mixer_attributes.width,
1757 mixer_attributes.height, display_attributes_[index].x_pixels,
1758 display_attributes_[index].y_pixels);
1759 return kErrorNotSupported;
1760 }
1761
1762 float scale_x = FLOAT(display_attributes_[index].x_pixels) / FLOAT(mixer_attributes.width);
1763 float scale_y = FLOAT(display_attributes_[index].y_pixels) / FLOAT(mixer_attributes.height);
1764 float max_scale_up = hw_resource_.hw_dest_scalar_info.max_scale_up;
1765 if (scale_x > max_scale_up || scale_y > max_scale_up) {
1766 DLOGW(
1767 "Up scaling ratio exceeds for destination scalar upscale limit scale_x %f scale_y %f "
1768 "max_scale_up %f",
1769 scale_x, scale_y, max_scale_up);
1770 return kErrorNotSupported;
1771 }
1772
1773 float mixer_split_ratio = FLOAT(mixer_attributes_.split_left) / FLOAT(mixer_attributes_.width);
1774
1775 mixer_attributes_ = mixer_attributes;
1776 mixer_attributes_.split_left = mixer_attributes_.width;
1777 if (display_attributes_[index].is_device_split) {
1778 mixer_attributes_.split_left = UINT32(FLOAT(mixer_attributes.width) * mixer_split_ratio);
1779 }
1780
1781 return kErrorNone;
1782 }
1783
GetMixerAttributes(HWMixerAttributes * mixer_attributes)1784 DisplayError HWDeviceDRM::GetMixerAttributes(HWMixerAttributes *mixer_attributes) {
1785 if (!mixer_attributes) {
1786 return kErrorParameters;
1787 }
1788
1789 *mixer_attributes = mixer_attributes_;
1790
1791 return kErrorNone;
1792 }
1793
DumpDebugData()1794 DisplayError HWDeviceDRM::DumpDebugData() {
1795 string dir_path = "/data/vendor/display/hw_recovery/";
1796 string device_str = device_name_;
1797
1798 // Attempt to make hw_recovery dir, it may exist
1799 if (mkdir(dir_path.c_str(), 0777) != 0 && errno != EEXIST) {
1800 DLOGW("Failed to create %s directory errno = %d, desc = %s", dir_path.c_str(), errno,
1801 strerror(errno));
1802 return kErrorPermission;
1803 }
1804 // If it does exist, ensure permissions are fine
1805 if (errno == EEXIST && chmod(dir_path.c_str(), 0777) != 0) {
1806 DLOGW("Failed to change permissions on %s directory", dir_path.c_str());
1807 return kErrorPermission;
1808 }
1809
1810 string filename = dir_path+device_str+"_HWR_"+to_string(debug_dump_count_);
1811 ofstream dst(filename);
1812 debug_dump_count_++;
1813
1814 {
1815 ifstream src;
1816 src.open("/sys/kernel/debug/dri/0/debug/dump");
1817 dst << "---- Event Logs ----" << std::endl;
1818 dst << src.rdbuf() << std::endl;
1819 src.close();
1820 }
1821
1822 {
1823 ifstream src;
1824 src.open("/sys/kernel/debug/dri/0/debug/recovery_reg");
1825 dst << "---- All Registers ----" << std::endl;
1826 dst << src.rdbuf() << std::endl;
1827 src.close();
1828 }
1829
1830 {
1831 ifstream src;
1832 src.open("/sys/kernel/debug/dri/0/debug/recovery_dbgbus");
1833 dst << "---- Debug Bus ----" << std::endl;
1834 dst << src.rdbuf() << std::endl;
1835 src.close();
1836 }
1837
1838 {
1839 ifstream src;
1840 src.open("/sys/kernel/debug/dri/0/debug/recovery_vbif_dbgbus");
1841 dst << "---- VBIF Debug Bus ----" << std::endl;
1842 dst << src.rdbuf() << std::endl;
1843 src.close();
1844 }
1845
1846 dst.close();
1847 DLOGI("Wrote hw_recovery file %s", filename.c_str());
1848
1849 return kErrorNone;
1850 }
1851
GetDRMDisplayToken(sde_drm::DRMDisplayToken * token) const1852 void HWDeviceDRM::GetDRMDisplayToken(sde_drm::DRMDisplayToken *token) const {
1853 *token = token_;
1854 }
1855
UpdateMixerAttributes()1856 void HWDeviceDRM::UpdateMixerAttributes() {
1857 uint32_t index = current_mode_index_;
1858
1859 mixer_attributes_.width = display_attributes_[index].x_pixels;
1860 mixer_attributes_.height = display_attributes_[index].y_pixels;
1861 mixer_attributes_.split_left = display_attributes_[index].is_device_split
1862 ? hw_panel_info_.split_info.left_split
1863 : mixer_attributes_.width;
1864 update_mode_ = true;
1865 }
1866
SetSecureConfig(const LayerBuffer & input_buffer,DRMSecureMode * fb_secure_mode,DRMSecurityLevel * security_level)1867 void HWDeviceDRM::SetSecureConfig(const LayerBuffer &input_buffer, DRMSecureMode *fb_secure_mode,
1868 DRMSecurityLevel *security_level) {
1869 *fb_secure_mode = DRMSecureMode::NON_SECURE;
1870 *security_level = DRMSecurityLevel::SECURE_NON_SECURE;
1871
1872 if (input_buffer.flags.secure) {
1873 if (input_buffer.flags.secure_camera) {
1874 // IOMMU configuration for this framebuffer mode is secure domain & requires
1875 // only stage II translation, when this buffer is accessed by Display H/W.
1876 // Secure and non-secure planes can be attached to this CRTC.
1877 *fb_secure_mode = DRMSecureMode::SECURE_DIR_TRANSLATION;
1878 } else if (input_buffer.flags.secure_display) {
1879 // IOMMU configuration for this framebuffer mode is secure domain & requires
1880 // only stage II translation, when this buffer is accessed by Display H/W.
1881 // Only secure planes can be attached to this CRTC.
1882 *fb_secure_mode = DRMSecureMode::SECURE_DIR_TRANSLATION;
1883 *security_level = DRMSecurityLevel::SECURE_ONLY;
1884 } else {
1885 // IOMMU configuration for this framebuffer mode is secure domain & requires both
1886 // stage I and stage II translations, when this buffer is accessed by Display H/W.
1887 // Secure and non-secure planes can be attached to this CRTC.
1888 *fb_secure_mode = DRMSecureMode::SECURE;
1889 }
1890 }
1891 }
1892
SetTopology(sde_drm::DRMTopology drm_topology,HWTopology * hw_topology)1893 void HWDeviceDRM::SetTopology(sde_drm::DRMTopology drm_topology, HWTopology *hw_topology) {
1894 switch (drm_topology) {
1895 case DRMTopology::SINGLE_LM: *hw_topology = kSingleLM; break;
1896 case DRMTopology::SINGLE_LM_DSC: *hw_topology = kSingleLMDSC; break;
1897 case DRMTopology::DUAL_LM: *hw_topology = kDualLM; break;
1898 case DRMTopology::DUAL_LM_DSC: *hw_topology = kDualLMDSC; break;
1899 case DRMTopology::DUAL_LM_MERGE: *hw_topology = kDualLMMerge; break;
1900 case DRMTopology::DUAL_LM_MERGE_DSC: *hw_topology = kDualLMMergeDSC; break;
1901 case DRMTopology::DUAL_LM_DSCMERGE: *hw_topology = kDualLMDSCMerge; break;
1902 case DRMTopology::PPSPLIT: *hw_topology = kPPSplit; break;
1903 default: *hw_topology = kUnknown; break;
1904 }
1905 }
1906
1907
SetMultiRectMode(const uint32_t flags,DRMMultiRectMode * target)1908 void HWDeviceDRM::SetMultiRectMode(const uint32_t flags, DRMMultiRectMode *target) {
1909 *target = DRMMultiRectMode::NONE;
1910 if (flags & kMultiRect) {
1911 *target = DRMMultiRectMode::SERIAL;
1912 if (flags & kMultiRectParallelMode) {
1913 *target = DRMMultiRectMode::PARALLEL;
1914 }
1915 }
1916 }
1917
SetSsppTonemapFeatures(HWPipeInfo * pipe_info)1918 void HWDeviceDRM::SetSsppTonemapFeatures(HWPipeInfo *pipe_info) {
1919 if (pipe_info->dgm_csc_info.op != kNoOp) {
1920 SDECsc csc = {};
1921 SetDGMCsc(pipe_info->dgm_csc_info, &csc);
1922 DLOGV_IF(kTagDriverConfig, "Call Perform DGM CSC Op = %s",
1923 (pipe_info->dgm_csc_info.op == kSet) ? "Set" : "Reset");
1924 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_DGM_CSC_CONFIG, pipe_info->pipe_id,
1925 reinterpret_cast<uint64_t>(&csc.csc_v1));
1926 }
1927 if (pipe_info->inverse_pma_info.op != kNoOp) {
1928 DLOGV_IF(kTagDriverConfig, "Call Perform Inverse PMA Op = %s",
1929 (pipe_info->inverse_pma_info.op == kSet) ? "Set" : "Reset");
1930 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_INVERSE_PMA, pipe_info->pipe_id,
1931 (pipe_info->inverse_pma_info.inverse_pma) ? 1: 0);
1932 }
1933 SetSsppLutFeatures(pipe_info);
1934 }
1935
SetDGMCsc(const HWPipeCscInfo & dgm_csc_info,SDECsc * csc)1936 void HWDeviceDRM::SetDGMCsc(const HWPipeCscInfo &dgm_csc_info, SDECsc *csc) {
1937 SetDGMCscV1(dgm_csc_info.csc, &csc->csc_v1);
1938 }
1939
SetDGMCscV1(const HWCsc & dgm_csc,sde_drm_csc_v1 * csc_v1)1940 void HWDeviceDRM::SetDGMCscV1(const HWCsc &dgm_csc, sde_drm_csc_v1 *csc_v1) {
1941 uint32_t i = 0;
1942 for (i = 0; i < MAX_CSC_MATRIX_COEFF_SIZE; i++) {
1943 csc_v1->ctm_coeff[i] = dgm_csc.ctm_coeff[i];
1944 DLOGV_IF(kTagDriverConfig, " DGM csc_v1[%d] = %d", i, csc_v1->ctm_coeff[i]);
1945 }
1946 for (i = 0; i < MAX_CSC_BIAS_SIZE; i++) {
1947 csc_v1->pre_bias[i] = dgm_csc.pre_bias[i];
1948 csc_v1->post_bias[i] = dgm_csc.post_bias[i];
1949 }
1950 for (i = 0; i < MAX_CSC_CLAMP_SIZE; i++) {
1951 csc_v1->pre_clamp[i] = dgm_csc.pre_clamp[i];
1952 csc_v1->post_clamp[i] = dgm_csc.post_clamp[i];
1953 }
1954 }
1955
SetSsppLutFeatures(HWPipeInfo * pipe_info)1956 void HWDeviceDRM::SetSsppLutFeatures(HWPipeInfo *pipe_info) {
1957 for (HWPipeTonemapLutInfo &lut_info : pipe_info->lut_info) {
1958 if (lut_info.op != kNoOp) {
1959 std::shared_ptr<PPFeatureInfo> feature = lut_info.pay_load;
1960 if (feature == nullptr) {
1961 DLOGE("Null Pointer for Op = %d lut type = %d", lut_info.op, lut_info.type);
1962 continue;
1963 }
1964 DRMPPFeatureInfo kernel_params = {};
1965 std::vector<DRMPPFeatureID> drm_id = {};
1966 PPBlock pp_block = GetPPBlock(lut_info.type);
1967 hw_color_mgr_->ToDrmFeatureId(pp_block, feature->feature_id_, &drm_id);
1968 for (DRMPPFeatureID id : drm_id) {
1969 if (id >= kPPFeaturesMax) {
1970 DLOGE("Invalid feature id %d", id);
1971 continue;
1972 }
1973 kernel_params.id = id;
1974 bool disable = (lut_info.op == kReset);
1975 DLOGV_IF(kTagDriverConfig, "Lut Type = %d PPBlock = %d Op = %s Disable = %d Feature = %p",
1976 lut_info.type, pp_block, (lut_info.op ==kSet) ? "Set" : "Reset", disable,
1977 feature.get());
1978 int ret = hw_color_mgr_->GetDrmFeature(feature.get(), &kernel_params, disable);
1979 if (!ret) {
1980 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_POST_PROC, pipe_info->pipe_id,
1981 &kernel_params);
1982 hw_color_mgr_->FreeDrmFeatureData(&kernel_params);
1983 } else {
1984 DLOGE("GetDrmFeature failed for Lut type = %d", lut_info.type);
1985 }
1986 }
1987 drm_id.clear();
1988 }
1989 }
1990 }
1991
AddDimLayerIfNeeded()1992 void HWDeviceDRM::AddDimLayerIfNeeded() {
1993 if (secure_display_active_ && hw_resource_.secure_disp_blend_stage >= 0) {
1994 HWSolidfillStage sf = {};
1995 sf.z_order = UINT32(hw_resource_.secure_disp_blend_stage);
1996 sf.roi = { 0.0, 0.0, FLOAT(mixer_attributes_.width), FLOAT(mixer_attributes_.height) };
1997 solid_fills_.clear();
1998 AddSolidfillStage(sf, 0xFF);
1999 SetSolidfillStages();
2000 }
2001
2002 if (!secure_display_active_) {
2003 DRMSecurityLevel crtc_security_level = DRMSecurityLevel::SECURE_NON_SECURE;
2004 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_SECURITY_LEVEL, token_.crtc_id, crtc_security_level);
2005 }
2006 }
2007
NullCommit(bool synchronous,bool retain_planes)2008 DisplayError HWDeviceDRM::NullCommit(bool synchronous, bool retain_planes) {
2009 DTRACE_SCOPED();
2010 AddDimLayerIfNeeded();
2011 int ret = drm_atomic_intf_->Commit(synchronous , retain_planes);
2012 if (ret) {
2013 DLOGE("failed with error %d", ret);
2014 return kErrorHardware;
2015 }
2016
2017 return kErrorNone;
2018 }
2019
DumpConnectorModeInfo()2020 void HWDeviceDRM::DumpConnectorModeInfo() {
2021 for (uint32_t i = 0; i < (uint32_t)connector_info_.modes.size(); i++) {
2022 DLOGI("Mode[%d] Name:%s vref:%d hdisp:%d hsync_s:%d hsync_e:%d htotal:%d " \
2023 "vdisp:%d vsync_s:%d vsync_e:%d vtotal:%d\n", i, connector_info_.modes[i].mode.name,
2024 connector_info_.modes[i].mode.vrefresh, connector_info_.modes[i].mode.hdisplay,
2025 connector_info_.modes[i].mode.hsync_start, connector_info_.modes[i].mode.hsync_end,
2026 connector_info_.modes[i].mode.htotal, connector_info_.modes[i].mode.vdisplay,
2027 connector_info_.modes[i].mode.vsync_start, connector_info_.modes[i].mode.vsync_end,
2028 connector_info_.modes[i].mode.vtotal);
2029 }
2030 }
2031
SetFullROI()2032 void HWDeviceDRM::SetFullROI() {
2033 // Reset the CRTC ROI and connector ROI only for the panel that supports partial update
2034 if (!hw_panel_info_.partial_update) {
2035 return;
2036 }
2037 uint32_t index = current_mode_index_;
2038 DRMRect crtc_rects = {0, 0, mixer_attributes_.width, mixer_attributes_.height};
2039 DRMRect conn_rects = {0, 0, display_attributes_[index].x_pixels,
2040 display_attributes_[index].y_pixels};
2041 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ROI, token_.crtc_id, 1, &crtc_rects);
2042 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_ROI, token_.conn_id, 1, &conn_rects);
2043 }
2044
SetDynamicDSIClock(uint64_t bit_clk_rate)2045 DisplayError HWDeviceDRM::SetDynamicDSIClock(uint64_t bit_clk_rate) {
2046 return kErrorNotSupported;
2047 }
2048
GetDynamicDSIClock(uint64_t * bit_clk_rate)2049 DisplayError HWDeviceDRM::GetDynamicDSIClock(uint64_t *bit_clk_rate) {
2050 return kErrorNotSupported;
2051 }
2052
2053 } // namespace sdm
2054