43
43
"0dB", "3.5dB", "6dB", "9.5dB"
46
static const int dp_clocks[] = {
47
54000, /* 1 lane, 1.62 Ghz */
48
90000, /* 1 lane, 2.70 Ghz */
49
108000, /* 2 lane, 1.62 Ghz */
50
180000, /* 2 lane, 2.70 Ghz */
51
216000, /* 4 lane, 1.62 Ghz */
52
360000, /* 4 lane, 2.70 Ghz */
46
/***** radeon AUX functions *****/
47
union aux_channel_transaction {
48
PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
49
PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
55
static const int num_dp_clocks = sizeof(dp_clocks) / sizeof(int);
57
/* common helper functions */
58
static int dp_lanes_for_mode_clock(u8 dpcd[DP_DPCD_SIZE], int mode_clock)
67
max_link_bw = dpcd[DP_MAX_LINK_RATE];
68
max_lane_count = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
70
switch (max_link_bw) {
73
for (i = 0; i < num_dp_clocks; i++) {
76
switch (max_lane_count) {
89
if (dp_clocks[i] > mode_clock) {
100
for (i = 0; i < num_dp_clocks; i++) {
101
switch (max_lane_count) {
114
if (dp_clocks[i] > mode_clock) {
129
static int dp_link_clock_for_mode_clock(u8 dpcd[DP_DPCD_SIZE], int mode_clock)
138
max_link_bw = dpcd[DP_MAX_LINK_RATE];
139
max_lane_count = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
141
switch (max_link_bw) {
142
case DP_LINK_BW_1_62:
144
for (i = 0; i < num_dp_clocks; i++) {
147
switch (max_lane_count) {
160
if (dp_clocks[i] > mode_clock)
165
for (i = 0; i < num_dp_clocks; i++) {
166
switch (max_lane_count) {
179
if (dp_clocks[i] > mode_clock)
180
return (i % 2) ? 270000 : 162000;
187
int dp_mode_valid(u8 dpcd[DP_DPCD_SIZE], int mode_clock)
189
int lanes = dp_lanes_for_mode_clock(dpcd, mode_clock);
190
int dp_clock = dp_link_clock_for_mode_clock(dpcd, mode_clock);
192
if ((lanes == 0) || (dp_clock == 0))
193
return MODE_CLOCK_HIGH;
52
static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
53
u8 *send, int send_bytes,
54
u8 *recv, int recv_size,
57
struct drm_device *dev = chan->dev;
58
struct radeon_device *rdev = dev->dev_private;
59
union aux_channel_transaction args;
60
int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
64
memset(&args, 0, sizeof(args));
66
base = (unsigned char *)rdev->mode_info.atom_context->scratch;
68
memcpy(base, send, send_bytes);
70
args.v1.lpAuxRequest = 0;
71
args.v1.lpDataOut = 16;
72
args.v1.ucDataOutLen = 0;
73
args.v1.ucChannelID = chan->rec.i2c_id;
74
args.v1.ucDelay = delay / 10;
75
if (ASIC_IS_DCE4(rdev))
76
args.v2.ucHPD_ID = chan->rec.hpd;
78
atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
80
*ack = args.v1.ucReplyStatus;
83
if (args.v1.ucReplyStatus == 1) {
84
DRM_DEBUG_KMS("dp_aux_ch timeout\n");
89
if (args.v1.ucReplyStatus == 2) {
90
DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
95
if (args.v1.ucReplyStatus == 3) {
96
DRM_DEBUG_KMS("dp_aux_ch error\n");
100
recv_bytes = args.v1.ucDataOutLen;
101
if (recv_bytes > recv_size)
102
recv_bytes = recv_size;
104
if (recv && recv_size)
105
memcpy(recv, base + 16, recv_bytes);
110
static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
111
u16 address, u8 *send, u8 send_bytes, u8 delay)
113
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
116
int msg_bytes = send_bytes + 4;
123
msg[1] = address >> 8;
124
msg[2] = AUX_NATIVE_WRITE << 4;
125
msg[3] = (msg_bytes << 4) | (send_bytes - 1);
126
memcpy(&msg[4], send, send_bytes);
129
ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
130
msg, msg_bytes, NULL, 0, delay, &ack);
133
if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
135
else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
144
static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
145
u16 address, u8 *recv, int recv_bytes, u8 delay)
147
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
154
msg[1] = address >> 8;
155
msg[2] = AUX_NATIVE_READ << 4;
156
msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
159
ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
160
msg, msg_bytes, recv, recv_bytes, delay, &ack);
165
if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
167
else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
174
static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
177
radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0);
180
static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector,
185
radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0);
190
int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
191
u8 write_byte, u8 *read_byte)
193
struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
194
struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
195
u16 address = algo_data->address;
204
/* Set up the command byte */
205
if (mode & MODE_I2C_READ)
206
msg[2] = AUX_I2C_READ << 4;
208
msg[2] = AUX_I2C_WRITE << 4;
210
if (!(mode & MODE_I2C_STOP))
211
msg[2] |= AUX_I2C_MOT << 4;
214
msg[1] = address >> 8;
219
msg[3] = msg_bytes << 4;
224
msg[3] = msg_bytes << 4;
232
for (retry = 0; retry < 4; retry++) {
233
ret = radeon_process_aux_ch(auxch,
234
msg, msg_bytes, reply, reply_bytes, 0, &ack);
236
DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
240
switch (ack & AUX_NATIVE_REPLY_MASK) {
241
case AUX_NATIVE_REPLY_ACK:
242
/* I2C-over-AUX Reply field is only valid
243
* when paired with AUX ACK.
246
case AUX_NATIVE_REPLY_NACK:
247
DRM_DEBUG_KMS("aux_ch native nack\n");
249
case AUX_NATIVE_REPLY_DEFER:
250
DRM_DEBUG_KMS("aux_ch native defer\n");
254
DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
258
switch (ack & AUX_I2C_REPLY_MASK) {
259
case AUX_I2C_REPLY_ACK:
260
if (mode == MODE_I2C_READ)
261
*read_byte = reply[0];
263
case AUX_I2C_REPLY_NACK:
264
DRM_DEBUG_KMS("aux_i2c nack\n");
266
case AUX_I2C_REPLY_DEFER:
267
DRM_DEBUG_KMS("aux_i2c defer\n");
271
DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
276
DRM_ERROR("aux i2c too many retries, giving up\n");
280
/***** general DP utility functions *****/
198
282
static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r)
321
391
train_set[lane] = v | p;
324
union aux_channel_transaction {
325
PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
326
PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
329
/* radeon aux chan functions */
330
bool radeon_process_aux_ch(struct radeon_i2c_chan *chan, u8 *req_bytes,
331
int num_bytes, u8 *read_byte,
332
u8 read_buf_len, u8 delay)
334
struct drm_device *dev = chan->dev;
335
struct radeon_device *rdev = dev->dev_private;
336
union aux_channel_transaction args;
337
int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
341
memset(&args, 0, sizeof(args));
343
base = (unsigned char *)rdev->mode_info.atom_context->scratch;
346
memcpy(base, req_bytes, num_bytes);
348
args.v1.lpAuxRequest = 0;
349
args.v1.lpDataOut = 16;
350
args.v1.ucDataOutLen = 0;
351
args.v1.ucChannelID = chan->rec.i2c_id;
352
args.v1.ucDelay = delay / 10;
353
if (ASIC_IS_DCE4(rdev))
354
args.v2.ucHPD_ID = chan->rec.hpd;
356
atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
358
if (args.v1.ucReplyStatus && !args.v1.ucDataOutLen) {
359
if (args.v1.ucReplyStatus == 0x20 && retry_count++ < 10)
361
DRM_DEBUG_KMS("failed to get auxch %02x%02x %02x %02x 0x%02x %02x after %d retries\n",
362
req_bytes[1], req_bytes[0], req_bytes[2], req_bytes[3],
363
chan->rec.i2c_id, args.v1.ucReplyStatus, retry_count);
367
if (args.v1.ucDataOutLen && read_byte && read_buf_len) {
368
if (read_buf_len < args.v1.ucDataOutLen) {
369
DRM_ERROR("Buffer to small for return answer %d %d\n",
370
read_buf_len, args.v1.ucDataOutLen);
374
int len = min(read_buf_len, args.v1.ucDataOutLen);
375
memcpy(read_byte, base + 16, len);
381
bool radeon_dp_aux_native_write(struct radeon_connector *radeon_connector, uint16_t address,
382
uint8_t send_bytes, uint8_t *send)
384
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
386
u8 msg_len, dp_msg_len;
391
msg[1] = address >> 8;
392
msg[2] = AUX_NATIVE_WRITE << 4;
393
dp_msg_len += send_bytes;
394
msg[3] = (dp_msg_len << 4) | (send_bytes - 1);
399
memcpy(&msg[4], send, send_bytes);
400
msg_len = 4 + send_bytes;
401
ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, msg, msg_len, NULL, 0, 0);
405
bool radeon_dp_aux_native_read(struct radeon_connector *radeon_connector, uint16_t address,
406
uint8_t delay, uint8_t expected_bytes,
409
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
411
u8 msg_len, dp_msg_len;
416
msg[1] = address >> 8;
417
msg[2] = AUX_NATIVE_READ << 4;
418
msg[3] = (dp_msg_len) << 4;
419
msg[3] |= expected_bytes - 1;
421
ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, msg, msg_len, read_p, expected_bytes, delay);
425
/* radeon dp functions */
426
static u8 radeon_dp_encoder_service(struct radeon_device *rdev, int action, int dp_clock,
427
uint8_t ucconfig, uint8_t lane_num)
394
/* convert bits per color to bits per pixel */
395
/* get bpc from the EDID */
396
static int convert_bpc_to_bpp(int bpc)
404
/* get the max pix clock supported by the link rate and lane num */
405
static int dp_get_max_dp_pix_clock(int link_rate,
409
return (link_rate * lane_num * 8) / bpp;
412
static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE])
414
switch (dpcd[DP_MAX_LINK_RATE]) {
415
case DP_LINK_BW_1_62:
425
static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE])
427
return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
430
static u8 dp_get_dp_link_rate_coded(int link_rate)
435
return DP_LINK_BW_1_62;
437
return DP_LINK_BW_2_7;
439
return DP_LINK_BW_5_4;
443
/***** radeon specific DP functions *****/
445
/* First get the min lane# when low rate is used according to pixel clock
446
* (prefer low rate), second check max lane# supported by DP panel,
447
* if the max lane# < low rate lane# then use max lane# instead.
449
static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
450
u8 dpcd[DP_DPCD_SIZE],
453
int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
454
int max_link_rate = dp_get_max_link_rate(dpcd);
455
int max_lane_num = dp_get_max_lane_number(dpcd);
457
int max_dp_pix_clock;
459
for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
460
max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
461
if (pix_clock <= max_dp_pix_clock)
468
static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
469
u8 dpcd[DP_DPCD_SIZE],
472
int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
473
int lane_num, max_pix_clock;
475
if (radeon_connector_encoder_is_dp_bridge(connector))
478
lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
479
max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
480
if (pix_clock <= max_pix_clock)
482
max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
483
if (pix_clock <= max_pix_clock)
485
if (radeon_connector_is_dp12_capable(connector)) {
486
max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
487
if (pix_clock <= max_pix_clock)
491
return dp_get_max_link_rate(dpcd);
494
static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
495
int action, int dp_clock,
496
u8 ucconfig, u8 lane_num)
429
498
DP_ENCODER_SERVICE_PARAMETERS args;
430
499
int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
455
524
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
459
ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, 0, 8, msg);
528
ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0);
461
530
memcpy(dig_connector->dpcd, msg, 8);
464
DRM_DEBUG_KMS("DPCD: ");
465
for (i = 0; i < 8; i++)
466
DRM_DEBUG_KMS("%02x ", msg[i]);
531
DRM_DEBUG_KMS("DPCD: ");
532
for (i = 0; i < 8; i++)
533
DRM_DEBUG_KMS("%02x ", msg[i]);
471
537
dig_connector->dpcd[0] = 0;
541
static void radeon_dp_set_panel_mode(struct drm_encoder *encoder,
542
struct drm_connector *connector)
544
struct drm_device *dev = encoder->dev;
545
struct radeon_device *rdev = dev->dev_private;
546
int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
548
if (!ASIC_IS_DCE4(rdev))
551
if (radeon_connector_encoder_is_dp_bridge(connector))
552
panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
554
atombios_dig_encoder_setup(encoder,
555
ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
475
559
void radeon_dp_set_link_config(struct drm_connector *connector,
476
560
struct drm_display_mode *mode)
478
struct radeon_connector *radeon_connector;
562
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
479
563
struct radeon_connector_atom_dig *dig_connector;
481
if ((connector->connector_type != DRM_MODE_CONNECTOR_DisplayPort) &&
482
(connector->connector_type != DRM_MODE_CONNECTOR_eDP))
485
radeon_connector = to_radeon_connector(connector);
486
565
if (!radeon_connector->con_priv)
488
567
dig_connector = radeon_connector->con_priv;
490
dig_connector->dp_clock =
491
dp_link_clock_for_mode_clock(dig_connector->dpcd, mode->clock);
492
dig_connector->dp_lane_count =
493
dp_lanes_for_mode_clock(dig_connector->dpcd, mode->clock);
569
if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
570
(dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
571
dig_connector->dp_clock =
572
radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
573
dig_connector->dp_lane_count =
574
radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
496
int radeon_dp_mode_valid_helper(struct radeon_connector *radeon_connector,
578
int radeon_dp_mode_valid_helper(struct drm_connector *connector,
497
579
struct drm_display_mode *mode)
499
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
501
return dp_mode_valid(dig_connector->dpcd, mode->clock);
581
struct radeon_connector *radeon_connector = to_radeon_connector(connector);
582
struct radeon_connector_atom_dig *dig_connector;
585
if (!radeon_connector->con_priv)
586
return MODE_CLOCK_HIGH;
587
dig_connector = radeon_connector->con_priv;
590
radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
592
if ((dp_clock == 540000) &&
593
(!radeon_connector_is_dp12_capable(connector)))
594
return MODE_CLOCK_HIGH;
504
static bool atom_dp_get_link_status(struct radeon_connector *radeon_connector,
505
u8 link_status[DP_LINK_STATUS_SIZE])
599
static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
600
u8 link_status[DP_LINK_STATUS_SIZE])
508
ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS, 100,
509
DP_LINK_STATUS_SIZE, link_status);
603
ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
604
link_status, DP_LINK_STATUS_SIZE, 100);
511
606
DRM_ERROR("displayport link status failed\n");
521
bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
523
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
524
u8 link_status[DP_LINK_STATUS_SIZE];
526
if (!atom_dp_get_link_status(radeon_connector, link_status))
528
if (dp_channel_eq_ok(link_status, dig_connector->dp_lane_count))
533
static void dp_set_power(struct radeon_connector *radeon_connector, u8 power_state)
535
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
537
if (dig_connector->dpcd[0] >= 0x11) {
538
radeon_dp_aux_native_write(radeon_connector, DP_SET_POWER, 1,
543
static void dp_set_downspread(struct radeon_connector *radeon_connector, u8 downspread)
545
radeon_dp_aux_native_write(radeon_connector, DP_DOWNSPREAD_CTRL, 1,
549
static void dp_set_link_bw_lanes(struct radeon_connector *radeon_connector,
550
u8 link_configuration[DP_LINK_CONFIGURATION_SIZE])
552
radeon_dp_aux_native_write(radeon_connector, DP_LINK_BW_SET, 2,
556
static void dp_update_dpvs_emph(struct radeon_connector *radeon_connector,
557
struct drm_encoder *encoder,
560
struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
563
for (i = 0; i < dig_connector->dp_lane_count; i++)
564
atombios_dig_transmitter_setup(encoder,
565
ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
568
radeon_dp_aux_native_write(radeon_connector, DP_TRAINING_LANE0_SET,
569
dig_connector->dp_lane_count, train_set);
572
static void dp_set_training(struct radeon_connector *radeon_connector,
575
radeon_dp_aux_native_write(radeon_connector, DP_TRAINING_PATTERN_SET,
579
void dp_link_train(struct drm_encoder *encoder,
580
struct drm_connector *connector)
582
struct drm_device *dev = encoder->dev;
583
struct radeon_device *rdev = dev->dev_private;
584
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
585
struct radeon_encoder_atom_dig *dig;
616
struct radeon_dp_link_train_info {
617
struct radeon_device *rdev;
618
struct drm_encoder *encoder;
619
struct drm_connector *connector;
586
620
struct radeon_connector *radeon_connector;
587
struct radeon_connector_atom_dig *dig_connector;
589
bool clock_recovery, channel_eq;
590
u8 link_status[DP_LINK_STATUS_SIZE];
591
u8 link_configuration[DP_LINK_CONFIGURATION_SIZE];
596
if ((connector->connector_type != DRM_MODE_CONNECTOR_DisplayPort) &&
597
(connector->connector_type != DRM_MODE_CONNECTOR_eDP))
600
if (!radeon_encoder->enc_priv)
602
dig = radeon_encoder->enc_priv;
604
radeon_connector = to_radeon_connector(connector);
605
if (!radeon_connector->con_priv)
607
dig_connector = radeon_connector->con_priv;
609
if (dig->dig_encoder)
610
enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
612
enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
614
enc_id |= ATOM_DP_CONFIG_LINK_B;
616
enc_id |= ATOM_DP_CONFIG_LINK_A;
618
memset(link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
619
if (dig_connector->dp_clock == 270000)
620
link_configuration[0] = DP_LINK_BW_2_7;
622
link_configuration[0] = DP_LINK_BW_1_62;
623
link_configuration[1] = dig_connector->dp_lane_count;
624
if (dig_connector->dpcd[0] >= 0x11)
625
link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
628
u8 link_status[DP_LINK_STATUS_SIZE];
632
static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
634
/* set the initial vs/emph on the source */
635
atombios_dig_transmitter_setup(dp_info->encoder,
636
ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
637
0, dp_info->train_set[0]); /* sets all lanes at once */
639
/* set the vs/emph on the sink */
640
radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET,
641
dp_info->train_set, dp_info->dp_lane_count, 0);
644
static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
648
/* set training pattern on the source */
649
if (ASIC_IS_DCE4(dp_info->rdev)) {
651
case DP_TRAINING_PATTERN_1:
652
rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
654
case DP_TRAINING_PATTERN_2:
655
rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
657
case DP_TRAINING_PATTERN_3:
658
rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
661
atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
664
case DP_TRAINING_PATTERN_1:
667
case DP_TRAINING_PATTERN_2:
671
radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
672
dp_info->dp_clock, dp_info->enc_id, rtp);
675
/* enable training pattern on the sink */
676
radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp);
679
static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
627
683
/* power up the sink */
628
dp_set_power(radeon_connector, DP_SET_POWER_D0);
629
/* disable the training pattern on the sink */
630
dp_set_training(radeon_connector, DP_TRAINING_PATTERN_DISABLE);
631
/* set link bw and lanes on the sink */
632
dp_set_link_bw_lanes(radeon_connector, link_configuration);
633
/* disable downspread on the sink */
634
dp_set_downspread(radeon_connector, 0);
635
if (ASIC_IS_DCE4(rdev)) {
636
/* start training on the source */
637
atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_START);
638
/* set training pattern 1 on the source */
639
atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1);
641
/* start training on the source */
642
radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_START,
643
dig_connector->dp_clock, enc_id, 0);
644
/* set training pattern 1 on the source */
645
radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
646
dig_connector->dp_clock, enc_id, 0);
649
/* set initial vs/emph */
650
memset(train_set, 0, 4);
652
/* set training pattern 1 on the sink */
653
dp_set_training(radeon_connector, DP_TRAINING_PATTERN_1);
655
dp_update_dpvs_emph(radeon_connector, encoder, train_set);
684
if (dp_info->dpcd[0] >= 0x11)
685
radeon_write_dpcd_reg(dp_info->radeon_connector,
686
DP_SET_POWER, DP_SET_POWER_D0);
688
/* possibly enable downspread on the sink */
689
if (dp_info->dpcd[3] & 0x1)
690
radeon_write_dpcd_reg(dp_info->radeon_connector,
691
DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
693
radeon_write_dpcd_reg(dp_info->radeon_connector,
694
DP_DOWNSPREAD_CTRL, 0);
696
radeon_dp_set_panel_mode(dp_info->encoder, dp_info->connector);
698
/* set the lane count on the sink */
699
tmp = dp_info->dp_lane_count;
700
if (dp_info->dpcd[0] >= 0x11)
701
tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
702
radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
704
/* set the link rate on the sink */
705
tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock);
706
radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp);
708
/* start training on the source */
709
if (ASIC_IS_DCE4(dp_info->rdev))
710
atombios_dig_encoder_setup(dp_info->encoder,
711
ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
713
radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
714
dp_info->dp_clock, dp_info->enc_id, 0);
716
/* disable the training pattern on the sink */
717
radeon_write_dpcd_reg(dp_info->radeon_connector,
718
DP_TRAINING_PATTERN_SET,
719
DP_TRAINING_PATTERN_DISABLE);
724
static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
728
/* disable the training pattern on the sink */
729
radeon_write_dpcd_reg(dp_info->radeon_connector,
730
DP_TRAINING_PATTERN_SET,
731
DP_TRAINING_PATTERN_DISABLE);
733
/* disable the training pattern on the source */
734
if (ASIC_IS_DCE4(dp_info->rdev))
735
atombios_dig_encoder_setup(dp_info->encoder,
736
ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
738
radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
739
dp_info->dp_clock, dp_info->enc_id, 0);
744
static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
750
radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
751
memset(dp_info->train_set, 0, 4);
752
radeon_dp_update_vs_emph(dp_info);
657
756
/* clock recovery loop */
658
757
clock_recovery = false;
663
if (!atom_dp_get_link_status(radeon_connector, link_status))
761
if (dp_info->rd_interval == 0)
764
mdelay(dp_info->rd_interval * 4);
766
if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
666
if (dp_clock_recovery_ok(link_status, dig_connector->dp_lane_count)) {
769
if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
667
770
clock_recovery = true;
671
for (i = 0; i < dig_connector->dp_lane_count; i++) {
672
if ((train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
774
for (i = 0; i < dp_info->dp_lane_count; i++) {
775
if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
675
if (i == dig_connector->dp_lane_count) {
778
if (i == dp_info->dp_lane_count) {
676
779
DRM_ERROR("clock recovery reached max voltage\n");
680
if ((train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
783
if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
785
if (dp_info->tries == 5) {
683
786
DRM_ERROR("clock recovery tried 5 times\n");
689
voltage = train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
792
voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
691
794
/* Compute new train_set as requested by sink */
692
dp_get_adjust_train(link_status, dig_connector->dp_lane_count, train_set);
693
dp_update_dpvs_emph(radeon_connector, encoder, train_set);
795
dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
797
radeon_dp_update_vs_emph(dp_info);
799
if (!clock_recovery) {
696
800
DRM_ERROR("clock recovery failed\n");
698
803
DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
699
train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
700
(train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
804
dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
805
(dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
701
806
DP_TRAIN_PRE_EMPHASIS_SHIFT);
704
/* set training pattern 2 on the sink */
705
dp_set_training(radeon_connector, DP_TRAINING_PATTERN_2);
706
/* set training pattern 2 on the source */
707
if (ASIC_IS_DCE4(rdev))
708
atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2);
811
static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
815
if (dp_info->tp3_supported)
816
radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
710
radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
711
dig_connector->dp_clock, enc_id, 1);
818
radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
713
820
/* channel equalization loop */
715
822
channel_eq = false;
718
if (!atom_dp_get_link_status(radeon_connector, link_status))
824
if (dp_info->rd_interval == 0)
827
mdelay(dp_info->rd_interval * 4);
829
if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
721
if (dp_channel_eq_ok(link_status, dig_connector->dp_lane_count)) {
832
if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
722
833
channel_eq = true;
726
837
/* Try 5 times */
838
if (dp_info->tries > 5) {
728
839
DRM_ERROR("channel eq failed: 5 tries\n");
732
843
/* Compute new train_set as requested by sink */
733
dp_get_adjust_train(link_status, dig_connector->dp_lane_count, train_set);
734
dp_update_dpvs_emph(radeon_connector, encoder, train_set);
844
dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
846
radeon_dp_update_vs_emph(dp_info);
740
851
DRM_ERROR("channel eq failed\n");
742
854
DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
743
train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
744
(train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
855
dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
856
(dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
745
857
>> DP_TRAIN_PRE_EMPHASIS_SHIFT);
747
/* disable the training pattern on the sink */
748
dp_set_training(radeon_connector, DP_TRAINING_PATTERN_DISABLE);
750
/* disable the training pattern on the source */
751
if (ASIC_IS_DCE4(rdev))
752
atombios_dig_encoder_setup(encoder, ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE);
754
radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
755
dig_connector->dp_clock, enc_id, 0);
758
int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
759
uint8_t write_byte, uint8_t *read_byte)
862
void radeon_dp_link_train(struct drm_encoder *encoder,
863
struct drm_connector *connector)
761
struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
762
struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
764
uint16_t address = algo_data->address;
767
int msg_len, dp_msg_len;
770
/* Set up the command byte */
771
if (mode & MODE_I2C_READ)
772
msg[2] = AUX_I2C_READ << 4;
774
msg[2] = AUX_I2C_WRITE << 4;
776
if (!(mode & MODE_I2C_STOP))
777
msg[2] |= AUX_I2C_MOT << 4;
780
msg[1] = address >> 8;
799
msg[3] = (dp_msg_len) << 4;
800
ret = radeon_process_aux_ch(auxch, msg, msg_len, reply, reply_bytes, 0);
804
*read_byte = reply[0];
865
struct drm_device *dev = encoder->dev;
866
struct radeon_device *rdev = dev->dev_private;
867
struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
868
struct radeon_encoder_atom_dig *dig;
869
struct radeon_connector *radeon_connector;
870
struct radeon_connector_atom_dig *dig_connector;
871
struct radeon_dp_link_train_info dp_info;
874
if (!radeon_encoder->enc_priv)
876
dig = radeon_encoder->enc_priv;
878
radeon_connector = to_radeon_connector(connector);
879
if (!radeon_connector->con_priv)
881
dig_connector = radeon_connector->con_priv;
883
if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
884
(dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
888
if (dig->dig_encoder)
889
dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
891
dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
893
dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
895
dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
897
dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL);
898
tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT);
899
if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
900
dp_info.tp3_supported = true;
902
dp_info.tp3_supported = false;
904
memcpy(dp_info.dpcd, dig_connector->dpcd, 8);
906
dp_info.encoder = encoder;
907
dp_info.connector = connector;
908
dp_info.radeon_connector = radeon_connector;
909
dp_info.dp_lane_count = dig_connector->dp_lane_count;
910
dp_info.dp_clock = dig_connector->dp_clock;
912
if (radeon_dp_link_train_init(&dp_info))
914
if (radeon_dp_link_train_cr(&dp_info))
916
if (radeon_dp_link_train_ce(&dp_info))
919
if (radeon_dp_link_train_finish(&dp_info))