|
| 1 | +// SPDX-License-Identifier: GPL-2.0-or-later |
| 2 | +// Copyright (c) 2024 Hisilicon Limited. |
| 3 | + |
| 4 | +#include <linux/delay.h> |
| 5 | +#include <drm/drm_device.h> |
| 6 | +#include <drm/drm_print.h> |
| 7 | +#include "dp_comm.h" |
| 8 | +#include "dp_reg.h" |
| 9 | + |
| 10 | +#define HIBMC_EQ_MAX_RETRY 5 |
| 11 | + |
| 12 | +static int hibmc_dp_link_training_configure(struct hibmc_dp_dev *dp) |
| 13 | +{ |
| 14 | + u8 buf[2]; |
| 15 | + int ret; |
| 16 | + |
| 17 | + /* DP 2 lane */ |
| 18 | + hibmc_dp_reg_write_field(dp, HIBMC_DP_PHYIF_CTRL0, HIBMC_DP_CFG_LANE_DATA_EN, |
| 19 | + dp->link.cap.lanes == 0x2 ? 0x3 : 0x1); |
| 20 | + hibmc_dp_reg_write_field(dp, HIBMC_DP_DPTX_GCTL0, HIBMC_DP_CFG_PHY_LANE_NUM, |
| 21 | + dp->link.cap.lanes == 0x2 ? 0x1 : 0); |
| 22 | + |
| 23 | + /* enhanced frame */ |
| 24 | + hibmc_dp_reg_write_field(dp, HIBMC_DP_VIDEO_CTRL, HIBMC_DP_CFG_STREAM_FRAME_MODE, 0x1); |
| 25 | + |
| 26 | + /* set rate and lane count */ |
| 27 | + buf[0] = dp->link.cap.link_rate; |
| 28 | + buf[1] = DP_LANE_COUNT_ENHANCED_FRAME_EN | dp->link.cap.lanes; |
| 29 | + ret = drm_dp_dpcd_write(&dp->aux, DP_LINK_BW_SET, buf, sizeof(buf)); |
| 30 | + if (ret != sizeof(buf)) { |
| 31 | + drm_dbg_dp(dp->dev, "dp aux write link rate and lanes failed, ret: %d\n", ret); |
| 32 | + return ret >= 0 ? -EIO : ret; |
| 33 | + } |
| 34 | + |
| 35 | + /* set 8b/10b and downspread */ |
| 36 | + buf[0] = DP_SPREAD_AMP_0_5; |
| 37 | + buf[1] = DP_SET_ANSI_8B10B; |
| 38 | + ret = drm_dp_dpcd_write(&dp->aux, DP_DOWNSPREAD_CTRL, buf, sizeof(buf)); |
| 39 | + if (ret != sizeof(buf)) { |
| 40 | + drm_dbg_dp(dp->dev, "dp aux write 8b/10b and downspread failed, ret: %d\n", ret); |
| 41 | + return ret >= 0 ? -EIO : ret; |
| 42 | + } |
| 43 | + |
| 44 | + ret = drm_dp_read_dpcd_caps(&dp->aux, dp->dpcd); |
| 45 | + if (ret) |
| 46 | + drm_err(dp->dev, "dp aux read dpcd failed, ret: %d\n", ret); |
| 47 | + |
| 48 | + return ret; |
| 49 | +} |
| 50 | + |
| 51 | +static int hibmc_dp_link_set_pattern(struct hibmc_dp_dev *dp, int pattern) |
| 52 | +{ |
| 53 | + int ret; |
| 54 | + u8 val; |
| 55 | + u8 buf; |
| 56 | + |
| 57 | + buf = (u8)pattern; |
| 58 | + if (pattern != DP_TRAINING_PATTERN_DISABLE && pattern != DP_TRAINING_PATTERN_4) { |
| 59 | + buf |= DP_LINK_SCRAMBLING_DISABLE; |
| 60 | + hibmc_dp_reg_write_field(dp, HIBMC_DP_PHYIF_CTRL0, HIBMC_DP_CFG_SCRAMBLE_EN, 0x1); |
| 61 | + } else { |
| 62 | + hibmc_dp_reg_write_field(dp, HIBMC_DP_PHYIF_CTRL0, HIBMC_DP_CFG_SCRAMBLE_EN, 0); |
| 63 | + } |
| 64 | + |
| 65 | + switch (pattern) { |
| 66 | + case DP_TRAINING_PATTERN_DISABLE: |
| 67 | + val = 0; |
| 68 | + break; |
| 69 | + case DP_TRAINING_PATTERN_1: |
| 70 | + val = 1; |
| 71 | + break; |
| 72 | + case DP_TRAINING_PATTERN_2: |
| 73 | + val = 2; |
| 74 | + break; |
| 75 | + case DP_TRAINING_PATTERN_3: |
| 76 | + val = 3; |
| 77 | + break; |
| 78 | + case DP_TRAINING_PATTERN_4: |
| 79 | + val = 4; |
| 80 | + break; |
| 81 | + default: |
| 82 | + return -EINVAL; |
| 83 | + } |
| 84 | + |
| 85 | + hibmc_dp_reg_write_field(dp, HIBMC_DP_PHYIF_CTRL0, HIBMC_DP_CFG_PAT_SEL, val); |
| 86 | + |
| 87 | + ret = drm_dp_dpcd_write(&dp->aux, DP_TRAINING_PATTERN_SET, &buf, sizeof(buf)); |
| 88 | + if (ret != sizeof(buf)) { |
| 89 | + drm_dbg_dp(dp->dev, "dp aux write training pattern set failed\n"); |
| 90 | + return ret >= 0 ? -EIO : ret; |
| 91 | + } |
| 92 | + |
| 93 | + return 0; |
| 94 | +} |
| 95 | + |
| 96 | +static int hibmc_dp_link_training_cr_pre(struct hibmc_dp_dev *dp) |
| 97 | +{ |
| 98 | + u8 *train_set = dp->link.train_set; |
| 99 | + int ret; |
| 100 | + u8 i; |
| 101 | + |
| 102 | + ret = hibmc_dp_link_training_configure(dp); |
| 103 | + if (ret) |
| 104 | + return ret; |
| 105 | + |
| 106 | + ret = hibmc_dp_link_set_pattern(dp, DP_TRAINING_PATTERN_1); |
| 107 | + if (ret) |
| 108 | + return ret; |
| 109 | + |
| 110 | + for (i = 0; i < dp->link.cap.lanes; i++) |
| 111 | + train_set[i] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2; |
| 112 | + |
| 113 | + ret = drm_dp_dpcd_write(&dp->aux, DP_TRAINING_LANE0_SET, train_set, dp->link.cap.lanes); |
| 114 | + if (ret != dp->link.cap.lanes) { |
| 115 | + drm_dbg_dp(dp->dev, "dp aux write training lane set failed\n"); |
| 116 | + return ret >= 0 ? -EIO : ret; |
| 117 | + } |
| 118 | + |
| 119 | + return 0; |
| 120 | +} |
| 121 | + |
| 122 | +static bool hibmc_dp_link_get_adjust_train(struct hibmc_dp_dev *dp, |
| 123 | + u8 lane_status[DP_LINK_STATUS_SIZE]) |
| 124 | +{ |
| 125 | + u8 train_set[HIBMC_DP_LANE_NUM_MAX] = {0}; |
| 126 | + u8 lane; |
| 127 | + |
| 128 | + for (lane = 0; lane < dp->link.cap.lanes; lane++) |
| 129 | + train_set[lane] = drm_dp_get_adjust_request_voltage(lane_status, lane) | |
| 130 | + drm_dp_get_adjust_request_pre_emphasis(lane_status, lane); |
| 131 | + |
| 132 | + if (memcmp(dp->link.train_set, train_set, HIBMC_DP_LANE_NUM_MAX)) { |
| 133 | + memcpy(dp->link.train_set, train_set, HIBMC_DP_LANE_NUM_MAX); |
| 134 | + return true; |
| 135 | + } |
| 136 | + |
| 137 | + return false; |
| 138 | +} |
| 139 | + |
| 140 | +static inline int hibmc_dp_link_reduce_rate(struct hibmc_dp_dev *dp) |
| 141 | +{ |
| 142 | + switch (dp->link.cap.link_rate) { |
| 143 | + case DP_LINK_BW_2_7: |
| 144 | + dp->link.cap.link_rate = DP_LINK_BW_1_62; |
| 145 | + return 0; |
| 146 | + case DP_LINK_BW_5_4: |
| 147 | + dp->link.cap.link_rate = DP_LINK_BW_2_7; |
| 148 | + return 0; |
| 149 | + case DP_LINK_BW_8_1: |
| 150 | + dp->link.cap.link_rate = DP_LINK_BW_5_4; |
| 151 | + return 0; |
| 152 | + default: |
| 153 | + return -EINVAL; |
| 154 | + } |
| 155 | +} |
| 156 | + |
| 157 | +static inline int hibmc_dp_link_reduce_lane(struct hibmc_dp_dev *dp) |
| 158 | +{ |
| 159 | + switch (dp->link.cap.lanes) { |
| 160 | + case 0x2: |
| 161 | + dp->link.cap.lanes--; |
| 162 | + break; |
| 163 | + case 0x1: |
| 164 | + drm_err(dp->dev, "dp link training reduce lane failed, already reach minimum\n"); |
| 165 | + return -EIO; |
| 166 | + default: |
| 167 | + return -EINVAL; |
| 168 | + } |
| 169 | + |
| 170 | + return 0; |
| 171 | +} |
| 172 | + |
| 173 | +static int hibmc_dp_link_training_cr(struct hibmc_dp_dev *dp) |
| 174 | +{ |
| 175 | + u8 lane_status[DP_LINK_STATUS_SIZE] = {0}; |
| 176 | + bool level_changed; |
| 177 | + u32 voltage_tries; |
| 178 | + u32 cr_tries; |
| 179 | + int ret; |
| 180 | + |
| 181 | + /* |
| 182 | + * DP 1.4 spec define 10 for maxtries value, for pre DP 1.4 version set a limit of 80 |
| 183 | + * (4 voltage levels x 4 preemphasis levels x 5 identical voltage retries) |
| 184 | + */ |
| 185 | + |
| 186 | + voltage_tries = 1; |
| 187 | + for (cr_tries = 0; cr_tries < 80; cr_tries++) { |
| 188 | + drm_dp_link_train_clock_recovery_delay(&dp->aux, dp->dpcd); |
| 189 | + |
| 190 | + ret = drm_dp_dpcd_read_link_status(&dp->aux, lane_status); |
| 191 | + if (ret != DP_LINK_STATUS_SIZE) { |
| 192 | + drm_err(dp->dev, "Get lane status failed\n"); |
| 193 | + return ret; |
| 194 | + } |
| 195 | + |
| 196 | + if (drm_dp_clock_recovery_ok(lane_status, dp->link.cap.lanes)) { |
| 197 | + drm_dbg_dp(dp->dev, "dp link training cr done\n"); |
| 198 | + dp->link.status.clock_recovered = true; |
| 199 | + return 0; |
| 200 | + } |
| 201 | + |
| 202 | + if (voltage_tries == 5) { |
| 203 | + drm_dbg_dp(dp->dev, "same voltage tries 5 times\n"); |
| 204 | + dp->link.status.clock_recovered = false; |
| 205 | + return 0; |
| 206 | + } |
| 207 | + |
| 208 | + level_changed = hibmc_dp_link_get_adjust_train(dp, lane_status); |
| 209 | + ret = drm_dp_dpcd_write(&dp->aux, DP_TRAINING_LANE0_SET, dp->link.train_set, |
| 210 | + dp->link.cap.lanes); |
| 211 | + if (ret != dp->link.cap.lanes) { |
| 212 | + drm_dbg_dp(dp->dev, "Update link training failed\n"); |
| 213 | + return ret >= 0 ? -EIO : ret; |
| 214 | + } |
| 215 | + |
| 216 | + voltage_tries = level_changed ? 1 : voltage_tries + 1; |
| 217 | + } |
| 218 | + |
| 219 | + drm_err(dp->dev, "dp link training clock recovery 80 times failed\n"); |
| 220 | + dp->link.status.clock_recovered = false; |
| 221 | + |
| 222 | + return 0; |
| 223 | +} |
| 224 | + |
| 225 | +static int hibmc_dp_link_training_channel_eq(struct hibmc_dp_dev *dp) |
| 226 | +{ |
| 227 | + u8 lane_status[DP_LINK_STATUS_SIZE] = {0}; |
| 228 | + u8 eq_tries; |
| 229 | + int ret; |
| 230 | + |
| 231 | + ret = hibmc_dp_link_set_pattern(dp, DP_TRAINING_PATTERN_2); |
| 232 | + if (ret) |
| 233 | + return ret; |
| 234 | + |
| 235 | + for (eq_tries = 0; eq_tries < HIBMC_EQ_MAX_RETRY; eq_tries++) { |
| 236 | + drm_dp_link_train_channel_eq_delay(&dp->aux, dp->dpcd); |
| 237 | + |
| 238 | + ret = drm_dp_dpcd_read_link_status(&dp->aux, lane_status); |
| 239 | + if (ret != DP_LINK_STATUS_SIZE) { |
| 240 | + drm_err(dp->dev, "get lane status failed\n"); |
| 241 | + break; |
| 242 | + } |
| 243 | + |
| 244 | + if (!drm_dp_clock_recovery_ok(lane_status, dp->link.cap.lanes)) { |
| 245 | + drm_dbg_dp(dp->dev, "clock recovery check failed\n"); |
| 246 | + drm_dbg_dp(dp->dev, "cannot continue channel equalization\n"); |
| 247 | + dp->link.status.clock_recovered = false; |
| 248 | + break; |
| 249 | + } |
| 250 | + |
| 251 | + if (drm_dp_channel_eq_ok(lane_status, dp->link.cap.lanes)) { |
| 252 | + dp->link.status.channel_equalized = true; |
| 253 | + drm_dbg_dp(dp->dev, "dp link training eq done\n"); |
| 254 | + break; |
| 255 | + } |
| 256 | + |
| 257 | + hibmc_dp_link_get_adjust_train(dp, lane_status); |
| 258 | + ret = drm_dp_dpcd_write(&dp->aux, DP_TRAINING_LANE0_SET, |
| 259 | + dp->link.train_set, dp->link.cap.lanes); |
| 260 | + if (ret != dp->link.cap.lanes) { |
| 261 | + drm_dbg_dp(dp->dev, "Update link training failed\n"); |
| 262 | + ret = (ret >= 0) ? -EIO : ret; |
| 263 | + break; |
| 264 | + } |
| 265 | + } |
| 266 | + |
| 267 | + if (eq_tries == HIBMC_EQ_MAX_RETRY) |
| 268 | + drm_err(dp->dev, "channel equalization failed %u times\n", eq_tries); |
| 269 | + |
| 270 | + hibmc_dp_link_set_pattern(dp, DP_TRAINING_PATTERN_DISABLE); |
| 271 | + |
| 272 | + return ret < 0 ? ret : 0; |
| 273 | +} |
| 274 | + |
| 275 | +static int hibmc_dp_link_downgrade_training_cr(struct hibmc_dp_dev *dp) |
| 276 | +{ |
| 277 | + if (hibmc_dp_link_reduce_rate(dp)) |
| 278 | + return hibmc_dp_link_reduce_lane(dp); |
| 279 | + |
| 280 | + return 0; |
| 281 | +} |
| 282 | + |
| 283 | +static int hibmc_dp_link_downgrade_training_eq(struct hibmc_dp_dev *dp) |
| 284 | +{ |
| 285 | + if ((dp->link.status.clock_recovered && !dp->link.status.channel_equalized)) { |
| 286 | + if (!hibmc_dp_link_reduce_lane(dp)) |
| 287 | + return 0; |
| 288 | + } |
| 289 | + |
| 290 | + return hibmc_dp_link_reduce_rate(dp); |
| 291 | +} |
| 292 | + |
| 293 | +int hibmc_dp_link_training(struct hibmc_dp_dev *dp) |
| 294 | +{ |
| 295 | + struct hibmc_dp_link *link = &dp->link; |
| 296 | + int ret; |
| 297 | + |
| 298 | + while (true) { |
| 299 | + ret = hibmc_dp_link_training_cr_pre(dp); |
| 300 | + if (ret) |
| 301 | + goto err; |
| 302 | + |
| 303 | + ret = hibmc_dp_link_training_cr(dp); |
| 304 | + if (ret) |
| 305 | + goto err; |
| 306 | + |
| 307 | + if (!link->status.clock_recovered) { |
| 308 | + ret = hibmc_dp_link_downgrade_training_cr(dp); |
| 309 | + if (ret) |
| 310 | + goto err; |
| 311 | + continue; |
| 312 | + } |
| 313 | + |
| 314 | + ret = hibmc_dp_link_training_channel_eq(dp); |
| 315 | + if (ret) |
| 316 | + goto err; |
| 317 | + |
| 318 | + if (!link->status.channel_equalized) { |
| 319 | + ret = hibmc_dp_link_downgrade_training_eq(dp); |
| 320 | + if (ret) |
| 321 | + goto err; |
| 322 | + continue; |
| 323 | + } |
| 324 | + |
| 325 | + return 0; |
| 326 | + } |
| 327 | + |
| 328 | +err: |
| 329 | + hibmc_dp_link_set_pattern(dp, DP_TRAINING_PATTERN_DISABLE); |
| 330 | + |
| 331 | + return ret; |
| 332 | +} |
0 commit comments