1 /*
2 Copyright (c) 2017-2019, The Linux Foundation. All rights reserved.
3 
4 Redistribution and use in source and binary forms, with or without
5 modification, are permitted provided that the following conditions are
6 met:
7     * Redistributions of source code must retain the above copyright
8       notice, this list of conditions and the following disclaimer.
9     * Redistributions in binary form must reproduce the above
10       copyright notice, this list of conditions and the following
11       disclaimer in the documentation and/or other materials provided
12       with the distribution.
13     * Neither the name of The Linux Foundation nor the names of its
14       contributors may be used to endorse or promote products derived
15       from this software without specific prior written permission.
16 
17 THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18 WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20 ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21 BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24 BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27 IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29 
30 #include <utils/debug.h>
31 #include <vector>
32 #include <cstring>
33 
34 #include "hw_peripheral_drm.h"
35 
36 #define __CLASS__ "HWPeripheralDRM"
37 
38 using sde_drm::DRMDisplayType;
39 using sde_drm::DRMOps;
40 using sde_drm::DRMPowerMode;
41 using sde_drm::DppsFeaturePayload;
42 using sde_drm::DRMDppsFeatureInfo;
43 using sde_drm::DRMSecureMode;
44 using sde_drm::DRMCWbCaptureMode;
45 
46 namespace sdm {
47 
HWPeripheralDRM(int32_t display_id,BufferSyncHandler * buffer_sync_handler,BufferAllocator * buffer_allocator,HWInfoInterface * hw_info_intf)48 HWPeripheralDRM::HWPeripheralDRM(int32_t display_id, BufferSyncHandler *buffer_sync_handler,
49                                  BufferAllocator *buffer_allocator, HWInfoInterface *hw_info_intf)
50   : HWDeviceDRM(buffer_sync_handler, buffer_allocator, hw_info_intf) {
51   disp_type_ = DRMDisplayType::PERIPHERAL;
52   device_name_ = "Peripheral";
53   display_id_ = display_id;
54 }
55 
Init()56 DisplayError HWPeripheralDRM::Init() {
57   DisplayError ret = HWDeviceDRM::Init();
58   if (ret != kErrorNone) {
59     DLOGE("Init failed for %s", device_name_);
60     return ret;
61   }
62 
63   scalar_data_.resize(hw_resource_.hw_dest_scalar_info.count);
64   dest_scalar_cache_.resize(hw_resource_.hw_dest_scalar_info.count);
65   PopulateBitClkRates();
66 
67   topology_control_ = UINT32(sde_drm::DRMTopologyControl::DSPP);
68   if (hw_panel_info_.is_primary_panel) {
69     topology_control_ |= UINT32(sde_drm::DRMTopologyControl::DEST_SCALER);
70   }
71 
72   return kErrorNone;
73 }
74 
PopulateBitClkRates()75 void HWPeripheralDRM::PopulateBitClkRates() {
76   if (!hw_panel_info_.dyn_bitclk_support) {
77     return;
78   }
79 
80   // Group all bit_clk_rates corresponding to DRM_PREFERRED mode.
81   uint32_t width = connector_info_.modes[current_mode_index_].mode.hdisplay;
82   uint32_t height = connector_info_.modes[current_mode_index_].mode.vdisplay;
83 
84   for (auto &mode_info : connector_info_.modes) {
85     auto &mode = mode_info.mode;
86     if (mode.hdisplay == width && mode.vdisplay == height) {
87       bitclk_rates_.push_back(mode_info.bit_clk_rate);
88       DLOGI("Possible bit_clk_rates %d", mode_info.bit_clk_rate);
89     }
90   }
91 
92   hw_panel_info_.bitclk_rates = bitclk_rates_;
93   DLOGI("bit_clk_rates Size %d", bitclk_rates_.size());
94 }
95 
SetDynamicDSIClock(uint64_t bit_clk_rate)96 DisplayError HWPeripheralDRM::SetDynamicDSIClock(uint64_t bit_clk_rate) {
97   bit_clk_rate_ = bit_clk_rate;
98   update_mode_ = true;
99 
100   return kErrorNone;
101 }
102 
GetDynamicDSIClock(uint64_t * bit_clk_rate)103 DisplayError HWPeripheralDRM::GetDynamicDSIClock(uint64_t *bit_clk_rate) {
104   // Update bit_rate corresponding to current refresh rate.
105   *bit_clk_rate = (uint32_t)connector_info_.modes[current_mode_index_].bit_clk_rate;
106 
107   return kErrorNone;
108 }
109 
Validate(HWLayers * hw_layers)110 DisplayError HWPeripheralDRM::Validate(HWLayers *hw_layers) {
111   HWLayersInfo &hw_layer_info = hw_layers->info;
112   SetDestScalarData(hw_layer_info, true);
113   SetupConcurrentWriteback(hw_layer_info, true);
114   SetIdlePCState();
115 
116   return HWDeviceDRM::Validate(hw_layers);
117 }
118 
Commit(HWLayers * hw_layers)119 DisplayError HWPeripheralDRM::Commit(HWLayers *hw_layers) {
120   HWLayersInfo &hw_layer_info = hw_layers->info;
121   SetDestScalarData(hw_layer_info, false);
122   SetupConcurrentWriteback(hw_layer_info, false);
123   SetIdlePCState();
124 
125   DisplayError error = HWDeviceDRM::Commit(hw_layers);
126   if (error != kErrorNone) {
127     return error;
128   }
129 
130   if (cwb_config_.enabled && (error == kErrorNone)) {
131     PostCommitConcurrentWriteback(hw_layer_info.stack->output_buffer);
132   }
133 
134   // Initialize to default after successful commit
135   synchronous_commit_ = false;
136 
137   return error;
138 }
139 
ResetDisplayParams()140 void HWPeripheralDRM::ResetDisplayParams() {
141   sde_dest_scalar_data_ = {};
142   for (uint32_t j = 0; j < scalar_data_.size(); j++) {
143     scalar_data_[j] = {};
144     dest_scalar_cache_[j] = {};
145   }
146 }
147 
SetDestScalarData(HWLayersInfo hw_layer_info,bool validate)148 void HWPeripheralDRM::SetDestScalarData(HWLayersInfo hw_layer_info, bool validate) {
149   if (!hw_scale_ || !hw_resource_.hw_dest_scalar_info.count) {
150     return;
151   }
152 
153   for (uint32_t i = 0; i < hw_resource_.hw_dest_scalar_info.count && validate; i++) {
154     DestScaleInfoMap::iterator it = hw_layer_info.dest_scale_info_map.find(i);
155 
156     if (it == hw_layer_info.dest_scale_info_map.end()) {
157       continue;
158     }
159 
160     HWDestScaleInfo *dest_scale_info = it->second;
161     SDEScaler *scale = &scalar_data_[i];
162     hw_scale_->SetScaler(dest_scale_info->scale_data, scale);
163 
164     sde_drm_dest_scaler_cfg *dest_scalar_data = &sde_dest_scalar_data_.ds_cfg[i];
165     dest_scalar_data->flags = 0;
166     if (scale->scaler_v2.enable) {
167       dest_scalar_data->flags |= SDE_DRM_DESTSCALER_ENABLE;
168     }
169     if (scale->scaler_v2.de.enable) {
170       dest_scalar_data->flags |= SDE_DRM_DESTSCALER_ENHANCER_UPDATE;
171     }
172     if (dest_scale_info->scale_update) {
173       dest_scalar_data->flags |= SDE_DRM_DESTSCALER_SCALE_UPDATE;
174     }
175     if (hw_panel_info_.partial_update) {
176       dest_scalar_data->flags |= SDE_DRM_DESTSCALER_PU_ENABLE;
177     }
178     dest_scalar_data->index = i;
179     dest_scalar_data->lm_width = dest_scale_info->mixer_width;
180     dest_scalar_data->lm_height = dest_scale_info->mixer_height;
181     dest_scalar_data->scaler_cfg = reinterpret_cast<uint64_t>(&scale->scaler_v2);
182 
183     if (std::memcmp(&dest_scalar_cache_[i].scalar_data, scale, sizeof(SDEScaler)) ||
184         dest_scalar_cache_[i].flags != dest_scalar_data->flags) {
185       needs_ds_update_ = true;
186     }
187   }
188 
189   if (needs_ds_update_) {
190     if (!validate) {
191       // Cache the destination scalar data during commit
192       for (uint32_t i = 0; i < hw_resource_.hw_dest_scalar_info.count; i++) {
193         DestScaleInfoMap::iterator it = hw_layer_info.dest_scale_info_map.find(i);
194         if (it == hw_layer_info.dest_scale_info_map.end()) {
195           continue;
196         }
197         dest_scalar_cache_[i].flags = sde_dest_scalar_data_.ds_cfg[i].flags;
198         dest_scalar_cache_[i].scalar_data = scalar_data_[i];
199       }
200       needs_ds_update_ = false;
201     }
202     sde_dest_scalar_data_.num_dest_scaler = UINT32(hw_layer_info.dest_scale_info_map.size());
203     drm_atomic_intf_->Perform(DRMOps::CRTC_SET_DEST_SCALER_CONFIG, token_.crtc_id,
204                               reinterpret_cast<uint64_t>(&sde_dest_scalar_data_));
205   }
206 }
207 
Flush(HWLayers * hw_layers)208 DisplayError HWPeripheralDRM::Flush(HWLayers *hw_layers) {
209   DisplayError err = HWDeviceDRM::Flush(hw_layers);
210   if (err != kErrorNone) {
211     return err;
212   }
213 
214   ResetDisplayParams();
215   return kErrorNone;
216 }
217 
SetDppsFeature(void * payload,size_t size)218 DisplayError HWPeripheralDRM::SetDppsFeature(void *payload, size_t size) {
219   uint32_t obj_id = 0, object_type = 0, feature_id = 0;
220   uint64_t value = 0;
221 
222   if (size != sizeof(DppsFeaturePayload)) {
223     DLOGE("invalid payload size %d, expected %d", size, sizeof(DppsFeaturePayload));
224     return kErrorParameters;
225   }
226 
227   DppsFeaturePayload *feature_payload = reinterpret_cast<DppsFeaturePayload *>(payload);
228   object_type = feature_payload->object_type;
229   feature_id = feature_payload->feature_id;
230   value = feature_payload->value;
231 
232   if (feature_id == sde_drm::kFeatureAd4Roi) {
233     if (feature_payload->value) {
234       DisplayDppsAd4RoiCfg *params = reinterpret_cast<DisplayDppsAd4RoiCfg *>
235                                                       (feature_payload->value);
236       if (!params) {
237         DLOGE("invalid playload value %d", feature_payload->value);
238         return kErrorNotSupported;
239       }
240 
241       ad4_roi_cfg_.h_x = params->h_start;
242       ad4_roi_cfg_.h_y = params->h_end;
243       ad4_roi_cfg_.v_x = params->v_start;
244       ad4_roi_cfg_.v_y = params->v_end;
245       ad4_roi_cfg_.factor_in = params->factor_in;
246       ad4_roi_cfg_.factor_out = params->factor_out;
247 
248       value = (uint64_t)&ad4_roi_cfg_;
249     }
250   }
251 
252   if (object_type == DRM_MODE_OBJECT_CRTC) {
253     obj_id = token_.crtc_id;
254   } else if (object_type == DRM_MODE_OBJECT_CONNECTOR) {
255     obj_id = token_.conn_id;
256   } else {
257     DLOGE("invalid object type 0x%x", object_type);
258     return kErrorUndefined;
259   }
260 
261   drm_atomic_intf_->Perform(DRMOps::DPPS_CACHE_FEATURE, obj_id, feature_id, value);
262   return kErrorNone;
263 }
264 
GetDppsFeatureInfo(void * payload,size_t size)265 DisplayError HWPeripheralDRM::GetDppsFeatureInfo(void *payload, size_t size) {
266   if (size != sizeof(DRMDppsFeatureInfo)) {
267     DLOGE("invalid payload size %d, expected %d", size, sizeof(DRMDppsFeatureInfo));
268     return kErrorParameters;
269   }
270   DRMDppsFeatureInfo *feature_info = reinterpret_cast<DRMDppsFeatureInfo *>(payload);
271   drm_mgr_intf_->GetDppsFeatureInfo(feature_info);
272   return kErrorNone;
273 }
274 
HandleSecureEvent(SecureEvent secure_event,HWLayers * hw_layers)275 DisplayError HWPeripheralDRM::HandleSecureEvent(SecureEvent secure_event, HWLayers *hw_layers) {
276   switch (secure_event) {
277     case kSecureDisplayStart: {
278       secure_display_active_ = true;
279       if (hw_panel_info_.mode != kModeCommand) {
280         DisplayError err = Flush(hw_layers);
281         if (err != kErrorNone) {
282           return err;
283         }
284       }
285     }
286     break;
287 
288     case kSecureDisplayEnd: {
289       if (hw_panel_info_.mode != kModeCommand) {
290         DisplayError err = Flush(hw_layers);
291         if (err != kErrorNone) {
292           return err;
293         }
294       }
295       secure_display_active_ = false;
296       synchronous_commit_ = true;
297     }
298     break;
299 
300     default:
301       DLOGE("Invalid secure event %d", secure_event);
302       return kErrorNotSupported;
303   }
304 
305   return kErrorNone;
306 }
307 
SetupConcurrentWriteback(const HWLayersInfo & hw_layer_info,bool validate)308 void HWPeripheralDRM::SetupConcurrentWriteback(const HWLayersInfo &hw_layer_info, bool validate) {
309   bool enable = hw_resource_.has_concurrent_writeback && hw_layer_info.stack->output_buffer;
310   if (!(enable || cwb_config_.enabled)) {
311     return;
312   }
313 
314   bool setup_modes = enable && !cwb_config_.enabled && validate;
315   if (setup_modes && (SetupConcurrentWritebackModes() == kErrorNone)) {
316     cwb_config_.enabled = true;
317   }
318 
319   if (cwb_config_.enabled) {
320     if (enable) {
321       // Set DRM properties for Concurrent Writeback.
322       ConfigureConcurrentWriteback(hw_layer_info.stack);
323 
324       if (!validate) {
325         // Set GET_RETIRE_FENCE property to get Concurrent Writeback fence.
326         int *fence = &hw_layer_info.stack->output_buffer->release_fence_fd;
327         drm_atomic_intf_->Perform(DRMOps::CONNECTOR_GET_RETIRE_FENCE,
328                                   cwb_config_.token.conn_id, fence);
329       }
330     } else {
331       // Tear down the Concurrent Writeback topology.
332       drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, cwb_config_.token.conn_id, 0);
333     }
334   }
335 }
336 
TeardownConcurrentWriteback(void)337 DisplayError HWPeripheralDRM::TeardownConcurrentWriteback(void) {
338   if (cwb_config_.enabled) {
339     drm_mgr_intf_->UnregisterDisplay(&(cwb_config_.token));
340     cwb_config_.enabled = false;
341     registry_.Clear();
342   }
343 
344   return kErrorNone;
345 }
346 
SetupConcurrentWritebackModes()347 DisplayError HWPeripheralDRM::SetupConcurrentWritebackModes() {
348   // To setup Concurrent Writeback topology, get the Connector ID of Virtual display
349   if (drm_mgr_intf_->RegisterDisplay(DRMDisplayType::VIRTUAL, &cwb_config_.token)) {
350     DLOGE("RegisterDisplay failed for Concurrent Writeback");
351     return kErrorResources;
352   }
353 
354   // Set the modes based on Primary display.
355   std::vector<drmModeModeInfo> modes;
356   for (auto &item : connector_info_.modes) {
357     modes.push_back(item.mode);
358   }
359 
360   // Inform the mode list to driver.
361   struct sde_drm_wb_cfg cwb_cfg = {};
362   cwb_cfg.connector_id = cwb_config_.token.conn_id;
363   cwb_cfg.flags = SDE_DRM_WB_CFG_FLAGS_CONNECTED;
364   cwb_cfg.count_modes = UINT32(modes.size());
365   cwb_cfg.modes = (uint64_t)modes.data();
366 
367   int ret = -EINVAL;
368 #ifdef DRM_IOCTL_SDE_WB_CONFIG
369   ret = drmIoctl(dev_fd_, DRM_IOCTL_SDE_WB_CONFIG, &cwb_cfg);
370 #endif
371   if (ret) {
372     drm_mgr_intf_->UnregisterDisplay(&(cwb_config_.token));
373     DLOGE("Dump CWBConfig: mode_count %d flags %x", cwb_cfg.count_modes, cwb_cfg.flags);
374     DumpConnectorModeInfo();
375     return kErrorHardware;
376   }
377 
378   return kErrorNone;
379 }
380 
ConfigureConcurrentWriteback(LayerStack * layer_stack)381 void HWPeripheralDRM::ConfigureConcurrentWriteback(LayerStack *layer_stack) {
382   LayerBuffer *output_buffer = layer_stack->output_buffer;
383   registry_.MapOutputBufferToFbId(output_buffer);
384 
385   // Set the topology for Concurrent Writeback: [CRTC_PRIMARY_DISPLAY - CONNECTOR_VIRTUAL_DISPLAY].
386   drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, cwb_config_.token.conn_id, token_.crtc_id);
387 
388   // Set CRTC Capture Mode
389   DRMCWbCaptureMode capture_mode = layer_stack->flags.post_processed_output ?
390                                    DRMCWbCaptureMode::DSPP_OUT : DRMCWbCaptureMode::MIXER_OUT;
391   drm_atomic_intf_->Perform(DRMOps::CRTC_SET_CAPTURE_MODE, token_.crtc_id, capture_mode);
392 
393   // Set Connector Output FB
394   uint32_t fb_id = registry_.GetOutputFbId(output_buffer->handle_id);
395   drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_OUTPUT_FB_ID, cwb_config_.token.conn_id, fb_id);
396 
397   // Set Connector Secure Mode
398   bool secure = output_buffer->flags.secure;
399   DRMSecureMode mode = secure ? DRMSecureMode::SECURE : DRMSecureMode::NON_SECURE;
400   drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_FB_SECURE_MODE, cwb_config_.token.conn_id, mode);
401 
402   // Set Connector Output Rect
403   sde_drm::DRMRect dst = {};
404   dst.left = 0;
405   dst.top = 0;
406   dst.right = display_attributes_[current_mode_index_].x_pixels;
407   dst.bottom = display_attributes_[current_mode_index_].y_pixels;
408   drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_OUTPUT_RECT, cwb_config_.token.conn_id, dst);
409 }
410 
PostCommitConcurrentWriteback(LayerBuffer * output_buffer)411 void HWPeripheralDRM::PostCommitConcurrentWriteback(LayerBuffer *output_buffer) {
412   bool enabled = hw_resource_.has_concurrent_writeback && output_buffer;
413 
414   if (!enabled) {
415     TeardownConcurrentWriteback();
416   }
417 }
418 
ControlIdlePowerCollapse(bool enable,bool synchronous)419 DisplayError HWPeripheralDRM::ControlIdlePowerCollapse(bool enable, bool synchronous) {
420   sde_drm::DRMIdlePCState idle_pc_state =
421     enable ? sde_drm::DRMIdlePCState::ENABLE : sde_drm::DRMIdlePCState::DISABLE;
422   if (idle_pc_state == idle_pc_state_) {
423     return kErrorNone;
424   }
425   // As idle PC is disabled after subsequent commit, Make sure to have synchrounous commit and
426   // ensure TA accesses the display_cc registers after idle PC is disabled.
427   idle_pc_state_ = idle_pc_state;
428   synchronous_commit_ = !enable ? synchronous : false;
429   return kErrorNone;
430 }
431 
PowerOn(const HWQosData & qos_data,int * release_fence)432 DisplayError HWPeripheralDRM::PowerOn(const HWQosData &qos_data, int *release_fence) {
433   DTRACE_SCOPED();
434   if (!drm_atomic_intf_) {
435     DLOGE("DRM Atomic Interface is null!");
436     return kErrorUndefined;
437   }
438 
439   if (first_cycle_) {
440     return kErrorNone;
441   }
442   drm_atomic_intf_->Perform(sde_drm::DRMOps::CRTC_SET_IDLE_PC_STATE, token_.crtc_id,
443                             sde_drm::DRMIdlePCState::ENABLE);
444   DisplayError err = HWDeviceDRM::PowerOn(qos_data, release_fence);
445   if (err != kErrorNone) {
446     return err;
447   }
448   idle_pc_state_ = sde_drm::DRMIdlePCState::ENABLE;
449 
450   return kErrorNone;
451 }
452 
SetDisplayAttributes(uint32_t index)453 DisplayError HWPeripheralDRM::SetDisplayAttributes(uint32_t index) {
454   HWDeviceDRM::SetDisplayAttributes(index);
455   // update bit clk rates.
456   hw_panel_info_.bitclk_rates = bitclk_rates_;
457 
458   return kErrorNone;
459 }
460 
SetDisplayDppsAdROI(void * payload)461 DisplayError HWPeripheralDRM::SetDisplayDppsAdROI(void *payload) {
462   DisplayError err = kErrorNone;
463   struct sde_drm::DppsFeaturePayload feature_payload = {};
464 
465   if (!payload) {
466     DLOGE("Invalid payload parameter");
467     return kErrorParameters;
468   }
469 
470   feature_payload.object_type = DRM_MODE_OBJECT_CRTC;
471   feature_payload.feature_id = sde_drm::kFeatureAd4Roi;
472   feature_payload.value = (uint64_t)(payload);
473 
474   err = SetDppsFeature(&feature_payload, sizeof(feature_payload));
475   if (err != kErrorNone) {
476     DLOGE("Faid to SetDppsFeature feature_id = %d, err = %d",
477            sde_drm::kFeatureAd4Roi, err);
478   }
479 
480   return err;
481 }
482 
483 }  // namespace sdm
484