blob: 1405b29cb8b904da628e73bd37bc5349269550f1 [file] [log] [blame]
Michal Simekab2829a2022-02-23 15:52:02 +01001// SPDX-License-Identifier: GPL-2.0
2/*
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02003 * Copyright (C) 2021 - 2022, Xilinx Inc.
4 * Copyright (C) 2022 - 2023, Advanced Micro Devices, Inc.
5 *
6 * Xilinx displayport(DP) Tx Subsytem driver
Michal Simekab2829a2022-02-23 15:52:02 +01007 */
8
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02009#include <clk.h>
Michal Simekab2829a2022-02-23 15:52:02 +010010#include <cpu_func.h>
11#include <dm.h>
12#include <errno.h>
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +020013#include <generic-phy.h>
14#include <stdlib.h>
Michal Simekab2829a2022-02-23 15:52:02 +010015#include <video.h>
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +020016#include <wait_bit.h>
Michal Simekab2829a2022-02-23 15:52:02 +010017#include <dm/device_compat.h>
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +020018#include <asm/io.h>
19#include <linux/delay.h>
20#include <linux/ioport.h>
21#include <dm/device_compat.h>
22#include <asm/global_data.h>
23
24#include "zynqmp_dpsub.h"
25
26DECLARE_GLOBAL_DATA_PTR;
27
28/* Maximum supported resolution */
Michal Simek0f465a42023-05-17 10:42:12 +020029#define WIDTH 1024
30#define HEIGHT 768
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +020031
32static struct dp_dma dp_dma;
33static struct dp_dma_descriptor cur_desc __aligned(256);
34
35static void dma_init_video_descriptor(struct udevice *dev)
36{
37 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
38 struct dp_dma_frame_buffer *frame_buffer = &dp_sub->frame_buffer;
39
40 cur_desc.control = DPDMA_DESC_PREAMBLE | DPDMA_DESC_IGNR_DONE |
41 DPDMA_DESC_LAST_FRAME;
42 cur_desc.dscr_id = 0;
43 cur_desc.xfer_size = frame_buffer->size;
44 cur_desc.line_size_stride = ((frame_buffer->stride >> 4) <<
45 DPDMA_DESCRIPTOR_LINE_SIZE_STRIDE_SHIFT) |
46 (frame_buffer->line_size);
47 cur_desc.addr_ext = (((u32)(frame_buffer->address >>
48 DPDMA_DESCRIPTOR_SRC_ADDR_WIDTH) <<
49 DPDMA_DESCRIPTOR_ADDR_EXT_SRC_ADDR_EXT_SHIFT) |
50 (upper_32_bits((u64)&cur_desc)));
51 cur_desc.next_desr = lower_32_bits((u64)&cur_desc);
52 cur_desc.src_addr = lower_32_bits((u64)gd->fb_base);
53}
54
55static void dma_set_descriptor_address(struct udevice *dev)
56{
57 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
58
59 flush_dcache_range((u64)&cur_desc,
60 ALIGN(((u64)&cur_desc + sizeof(cur_desc)),
61 CONFIG_SYS_CACHELINE_SIZE));
62 writel(upper_32_bits((u64)&cur_desc), dp_sub->dp_dma->base_addr +
63 DPDMA_CH3_DSCR_STRT_ADDRE);
64 writel(lower_32_bits((u64)&cur_desc), dp_sub->dp_dma->base_addr +
65 DPDMA_CH3_DSCR_STRT_ADDR);
66}
67
68static void dma_setup_channel(struct udevice *dev)
69{
70 dma_init_video_descriptor(dev);
71 dma_set_descriptor_address(dev);
72}
Michal Simekab2829a2022-02-23 15:52:02 +010073
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +020074static void dma_set_channel_state(struct udevice *dev)
75{
76 u32 mask = 0, regval = 0;
77 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
78
79 mask = DPDMA_CH_CNTL_EN_MASK | DPDMA_CH_CNTL_PAUSE_MASK;
80 regval = DPDMA_CH_CNTL_EN_MASK;
81
82 clrsetbits_le32(dp_sub->dp_dma->base_addr + DPDMA_CH3_CNTL,
83 mask, regval);
84}
85
86static void dma_trigger(struct udevice *dev)
87{
88 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
89 u32 trigger;
90
91 trigger = DPDMA_GBL_TRG_CH3_MASK;
92 dp_sub->dp_dma->gfx.trigger_status = DPDMA_TRIGGER_DONE;
93 writel(trigger, dp_sub->dp_dma->base_addr + DPDMA_GBL);
94}
95
96static void dma_vsync_intr_handler(struct udevice *dev)
97{
98 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
99
100 dma_setup_channel(dev);
101 dma_set_channel_state(dev);
102 dma_trigger(dev);
103
104 /* Clear VSync Interrupt */
105 writel(DPDMA_ISR_VSYNC_INT_MASK, dp_sub->dp_dma->base_addr + DPDMA_ISR);
106}
Michal Simekab2829a2022-02-23 15:52:02 +0100107
108/**
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +0200109 * wait_phy_ready() - Wait for the DisplayPort PHY to come out of reset
110 * @dev: The DP device
111 *
112 * Return: 0 if wait succeeded, -ve if error occurred
Michal Simekab2829a2022-02-23 15:52:02 +0100113 */
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +0200114static int wait_phy_ready(struct udevice *dev)
115{
116 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
117 u32 timeout = 100, phy_status;
118 u8 phy_ready_mask = DP_PHY_STATUS_RESET_LANE_0_DONE_MASK |
119 DP_PHY_STATUS_GT_PLL_LOCK_MASK;
120
121 /* Wait until the PHY is ready. */
122 do {
123 udelay(20);
124 phy_status = readl(dp_sub->base_addr + DP_PHY_STATUS);
125 phy_status &= phy_ready_mask;
126 /* Protect against an infinite loop. */
127 if (!timeout--)
128 return -ETIMEDOUT;
129 } while (phy_status != phy_ready_mask);
130
131 return 0;
132}
133
134static int init_dp_tx(struct udevice *dev)
135{
136 u32 status, phyval, regval, rate;
137 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
138
139 phyval = readl(dp_sub->base_addr + DP_PHY_CONFIG);
140 writel(DP_SOFT_RESET_EN, dp_sub->base_addr + DP_SOFT_RESET);
141 status = readl(dp_sub->base_addr + DP_SOFT_RESET);
142 writel(DP_DISABLE, dp_sub->base_addr + DP_ENABLE);
143
144 regval = (readl(dp_sub->base_addr + DP_AUX_CLK_DIVIDER) &
145 ~DP_AUX_CLK_DIVIDER_VAL_MASK) |
146 (60 << 8) |
147 (dp_sub->clock / 1000000);
148 writel(regval, dp_sub->base_addr + DP_AUX_CLK_DIVIDER);
149
150 writel(DP_PHY_CLOCK_SELECT_540GBPS, dp_sub->base_addr + DP_PHY_CLOCK_SELECT);
151
152 regval = phyval & ~DP_PHY_CONFIG_GT_ALL_RESET_MASK;
153 writel(regval, dp_sub->base_addr + DP_PHY_CONFIG);
154 status = wait_phy_ready(dev);
155 if (status)
156 return -EINVAL;
157
158 writel(DP_ENABLE, dp_sub->base_addr + DP_ENABLE);
159
160 rate = ~DP_INTR_HPD_PULSE_DETECTED_MASK & ~DP_INTR_HPD_EVENT_MASK
161 & ~DP_INTR_HPD_IRQ_MASK;
162 writel(rate, dp_sub->base_addr + DP_INTR_MASK);
163 return 0;
164}
165
166static int set_nonlive_gfx_format(struct udevice *dev, enum av_buf_video_format format)
167{
168 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
169 struct av_buf_vid_attribute *ptr = (struct av_buf_vid_attribute *)avbuf_supported_formats;
170
171 while (1) {
172 dev_dbg(dev, "Format %d\n", ptr->video_format);
173
174 if (!ptr->video_format)
175 return -EINVAL;
176
177 if (ptr->video_format == format) {
178 dp_sub->non_live_graphics = ptr;
179 break;
180 }
181 ptr++;
182 }
183 dev_dbg(dev, "Video format found. BPP %d\n", dp_sub->non_live_graphics->bpp);
184 return 0;
185}
186
187/* DP dma setup */
188static void set_qos(struct udevice *dev, u8 qos)
189{
190 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
191 u8 index;
192 u32 regval = 0, mask;
193
194 regval = (((u32)qos << DPDMA_CH_CNTL_QOS_DATA_RD_SHIFT) |
195 ((u32)qos << DPDMA_CH_CNTL_QOS_DSCR_RD_SHIFT) |
196 ((u32)qos << DPDMA_CH_CNTL_QOS_DSCR_WR_SHIFT));
197
198 mask = DPDMA_CH_CNTL_QOS_DATA_RD_MASK |
199 DPDMA_CH_CNTL_QOS_DSCR_RD_MASK |
200 DPDMA_CH_CNTL_QOS_DSCR_WR_MASK;
201 for (index = 0; index <= DPDMA_AUDIO_CHANNEL1; index++) {
202 clrsetbits_le32(dp_sub->dp_dma->base_addr +
203 DPDMA_CH0_CNTL +
204 (DPDMA_CH_OFFSET * (u32)index),
205 mask, regval);
206 }
207}
208
209static void enable_gfx_buffers(struct udevice *dev, u8 enable)
210{
211 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
212 u32 regval = 0;
213
214 regval = (0xF << AVBUF_CHBUF3_BURST_LEN_SHIFT) |
215 AVBUF_CHBUF3_FLUSH_MASK;
216 writel(regval, dp_sub->base_addr + AVBUF_CHBUF3);
217 if (enable) {
218 regval = (0xF << AVBUF_CHBUF3_BURST_LEN_SHIFT) |
219 AVBUF_CHBUF0_EN_MASK;
220 writel(regval, dp_sub->base_addr + AVBUF_CHBUF3);
221 }
222}
223
224static void avbuf_video_select(struct udevice *dev, enum av_buf_video_stream vid_stream,
225 enum av_buf_gfx_stream gfx_stream)
226{
227 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
228
229 dp_sub->av_mode.video_src = vid_stream;
230 dp_sub->av_mode.gfx_src = gfx_stream;
231
232 clrsetbits_le32(dp_sub->base_addr +
233 AVBUF_BUF_OUTPUT_AUD_VID_SELECT,
234 AVBUF_BUF_OUTPUT_AUD_VID_SELECT_VID_STREAM2_SEL_MASK |
235 AVBUF_BUF_OUTPUT_AUD_VID_SELECT_VID_STREAM1_SEL_MASK,
236 vid_stream | gfx_stream);
237}
238
239static void config_gfx_pipeline(struct udevice *dev)
240{
241 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
242 u16 *csc_matrix, *offset_matrix;
243 u32 regval = 0, index = 0, *scaling_factors = NULL;
244 u16 rgb_coeffs[] = { 0x1000, 0x0000, 0x0000,
245 0x0000, 0x1000, 0x0000,
246 0x0000, 0x0000, 0x1000 };
247 u16 rgb_offset[] = { 0x0000, 0x0000, 0x0000 };
248 struct av_buf_vid_attribute *video = dp_sub->non_live_graphics;
249
250 scaling_factors = video->sf;
251
252 clrsetbits_le32(dp_sub->base_addr + AVBUF_BUF_FORMAT,
253 AVBUF_BUF_FORMAT_NL_GRAPHX_FORMAT_MASK,
254 (video->value) << AVBUF_BUF_FORMAT_NL_GRAPHX_FORMAT_SHIFT);
255
256 for (index = 0; index < 3; index++) {
257 writel(scaling_factors[index], dp_sub->base_addr +
258 AVBUF_BUF_GRAPHICS_COMP0_SCALE_FACTOR + (index * 4));
259 }
260 regval = (video->is_rgb << AVBUF_V_BLEND_LAYER0_CONTROL_RGB_MODE_SHIFT) |
261 video->sampling_en;
262 writel(regval, dp_sub->base_addr + AVBUF_V_BLEND_LAYER1_CONTROL);
263
264 if (video->is_rgb) {
265 csc_matrix = rgb_coeffs;
266 offset_matrix = rgb_offset;
267 }
268 /* Program Colorspace conversion coefficients */
269 for (index = 9; index < 12; index++) {
270 writel(offset_matrix[index - 9], dp_sub->base_addr +
271 AVBUF_V_BLEND_IN2CSC_COEFF0 + (index * 4));
272 }
273
274 /* Program Colorspace conversion matrix */
275 for (index = 0; index < 9; index++) {
276 writel(csc_matrix[index], dp_sub->base_addr +
277 AVBUF_V_BLEND_IN2CSC_COEFF0 + (index * 4));
278 }
279}
280
281static void set_blender_alpha(struct udevice *dev, u8 alpha, u8 enable)
282{
283 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
284 u32 regval;
285
286 regval = enable;
287 regval |= alpha << AVBUF_V_BLEND_SET_GLOBAL_ALPHA_REG_VALUE_SHIFT;
288 writel(regval, dp_sub->base_addr +
289 AVBUF_V_BLEND_SET_GLOBAL_ALPHA_REG);
290}
291
292static void config_output_video(struct udevice *dev)
293{
294 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
295 u32 regval = 0, index;
296 u16 rgb_coeffs[] = { 0x1000, 0x0000, 0x0000,
297 0x0000, 0x1000, 0x0000,
298 0x0000, 0x0000, 0x1000 };
299 u16 rgb_offset[] = { 0x0000, 0x0000, 0x0000 };
300 u16 *matrix_coeff = rgb_coeffs, *matrix_offset = rgb_offset;
301
302 struct av_buf_vid_attribute *output_video = dp_sub->non_live_graphics;
303
304 regval |= output_video->sampling_en <<
305 AVBUF_V_BLEND_OUTPUT_VID_FORMAT_EN_DOWNSAMPLE_SHIFT;
306 regval |= output_video->value;
307 writel(regval, dp_sub->base_addr + AVBUF_V_BLEND_OUTPUT_VID_FORMAT);
308
309 for (index = 0; index < 9; index++) {
310 writel(matrix_coeff[index], dp_sub->base_addr +
311 AVBUF_V_BLEND_RGB2YCBCR_COEFF0 + (index * 4));
312 }
313
314 for (index = 0; index < 3; index++) {
315 writel((matrix_offset[index] <<
316 AVBUF_V_BLEND_LUMA_IN1CSC_OFFSET_POST_OFFSET_SHIFT),
317 dp_sub->base_addr +
318 AVBUF_V_BLEND_LUMA_OUTCSC_OFFSET
319 + (index * 4));
320 }
321
322 set_blender_alpha(dev, 0, 0);
323}
324
325static void config_msa_sync_clk_mode(struct udevice *dev, u8 enable)
326{
327 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
328 struct main_stream_attributes *msa_config;
329
330 msa_config = &dp_sub->msa_config;
331 msa_config->synchronous_clock_mode = enable;
332
333 if (enable == 1) {
334 msa_config->misc0 |= (1 <<
335 DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_SHIFT);
336 } else {
337 msa_config->misc0 &= ~(1 <<
338 DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_SHIFT);
339 }
340}
341
342static void av_buf_soft_reset(struct udevice *dev)
343{
344 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
345
346 writel(AVBUF_BUF_SRST_REG_VID_RST_MASK,
347 dp_sub->base_addr + AVBUF_BUF_SRST_REG);
348 writel(0, dp_sub->base_addr + AVBUF_BUF_SRST_REG);
349}
350
351static void set_video_clk_source(struct udevice *dev, u8 video_clk, u8 audio_clk)
352{
353 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
354 u32 regval = 0;
355
356 if (dp_sub->av_mode.video_src != AVBUF_VIDSTREAM1_LIVE &&
357 dp_sub->av_mode.gfx_src != AVBUF_VIDSTREAM2_LIVE_GFX) {
358 regval = 1 << AVBUF_BUF_AUD_VID_CLK_SOURCE_VID_TIMING_SRC_SHIFT;
359 } else if (dp_sub->av_mode.video_src == AVBUF_VIDSTREAM1_LIVE ||
360 dp_sub->av_mode.gfx_src == AVBUF_VIDSTREAM2_LIVE_GFX) {
361 video_clk = AVBUF_PL_CLK;
362 }
363
364 regval |= (video_clk << AVBUF_BUF_AUD_VID_CLK_SOURCE_VID_CLK_SRC_SHIFT) |
365 (audio_clk << AVBUF_BUF_AUD_VID_CLK_SOURCE_AUD_CLK_SRC_SHIFT);
366 writel(regval, dp_sub->base_addr + AVBUF_BUF_AUD_VID_CLK_SOURCE);
367
368 av_buf_soft_reset(dev);
369}
370
371static int init_dpdma_subsys(struct udevice *dev)
372{
373 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
374
375 dp_sub->dp_dma->base_addr = DPDMA_BASE_ADDRESS;
376 dp_sub->dp_dma->gfx.channel.cur = NULL;
377 dp_sub->dp_dma->gfx.trigger_status = DPDMA_TRIGGER_DONE;
378
379 set_qos(dev, 11);
380 return 0;
381}
382
383/**
384 * is_dp_connected() - Check if there is a connected RX device
385 * @dev: The DP device
386 *
387 *
388 * Return: true if a connected RX device was detected, false otherwise
389 */
390static bool is_dp_connected(struct udevice *dev)
391{
392 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
393 u32 status;
394 u8 retries = 0;
395
396 do {
397 status = readl(dp_sub->base_addr +
398 DP_INTERRUPT_SIG_STATE)
399 & DP_INTERRUPT_SIG_STATE_HPD_STATE_MASK;
400
401 if (retries > DP_IS_CONNECTED_MAX_TIMEOUT_COUNT)
402 return 0;
403
404 retries++;
405 udelay(1000);
406 } while (status == 0);
407
408 return 1;
409}
410
411/**
412 * aux_wait_ready() - Wait until another request is no longer in progress
413 * @dev: The DP device
414 *
415 * Return: 0 if wait succeeded, -ve if error occurred
416 */
417static int aux_wait_ready(struct udevice *dev)
418{
419 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
420 u32 status, timeout = 100;
421
422 do {
423 status = readl(dp_sub->base_addr +
424 DP_INTERRUPT_SIG_STATE);
425 if (!timeout--)
426 return -ETIMEDOUT;
427
428 udelay(20);
429 } while (status & DP_REPLY_STATUS_REPLY_IN_PROGRESS_MASK);
430
431 return 0;
432}
433
434/**
435 * aux_wait_reply() - Wait for reply on AUX channel
436 * @dev: The DP device
437 *
438 * Wait for a reply indicating that the most recent AUX request
439 * has been received by the RX device.
440 *
441 * Return: 0 if wait succeeded, -ve if error occurred
442 */
443static int aux_wait_reply(struct udevice *dev)
444{
445 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
446 u32 timeout = DP_AUX_MAX_WAIT, status;
447
448 while (timeout > 0) {
449 status = readl(dp_sub->base_addr + DP_REPLY_STATUS);
450 if (status & DP_REPLY_STATUS_REPLY_ERROR_MASK)
451 return -ETIMEDOUT;
452
453 if ((status & DP_REPLY_STATUS_REPLY_RECEIVED_MASK) &&
454 !(status & DP_REPLY_STATUS_REQUEST_IN_PROGRESS_MASK) &&
455 !(status & DP_REPLY_STATUS_REPLY_IN_PROGRESS_MASK)) {
456 return 0;
457 }
458 timeout--;
459 udelay(20);
460 }
461 return -ETIMEDOUT;
462}
463
464/**
465 * aux_request_send() - Send request on the AUX channel
466 * @dev: The DP device
467 * @request: The request to send
468 *
469 * Submit the supplied AUX request to the RX device over the AUX
470 * channel by writing the command, the destination address, (the write buffer
471 * for write commands), and the data size to the DisplayPort TX core.
472 *
473 * This is the lower-level sending routine, which is called by aux_request().
474 *
475 * Return: 0 if request was sent successfully, -ve on error
476 */
477static int aux_request_send(struct udevice *dev, struct aux_transaction *request)
478{
479 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
480 u32 timeout_count = 0, status;
481 u8 index;
482
483 do {
484 status = readl(dp_sub->base_addr +
485 DP_REPLY_STATUS);
486
487 udelay(20);
488 timeout_count++;
489 if (timeout_count >= DP_AUX_MAX_TIMEOUT_COUNT)
490 return -ETIMEDOUT;
491
492 } while ((status & DP_REPLY_STATUS_REQUEST_IN_PROGRESS_MASK) ||
493 (status & DP_REPLY_STATUS_REPLY_IN_PROGRESS_MASK));
494 /* Set the address for the request. */
495 writel(request->address, dp_sub->base_addr + DP_AUX_ADDRESS);
496
497 if (request->cmd_code == DP_AUX_CMD_WRITE ||
498 request->cmd_code == DP_AUX_CMD_I2C_WRITE ||
499 request->cmd_code == DP_AUX_CMD_I2C_WRITE_MOT) {
500 /* Feed write data into the DisplayPort TX core's write FIFO. */
501 for (index = 0; index < request->num_bytes; index++) {
502 writel(request->data[index],
503 dp_sub->base_addr +
504 DP_AUX_WRITE_FIFO);
505 }
506 }
507
508 status = ((request->cmd_code << DP_AUX_CMD_SHIFT) |
509 ((request->num_bytes - 1) &
510 DP_AUX_CMD_NBYTES_TRANSFER_MASK));
511
512 /* Submit the command and the data size. */
513 writel(((request->cmd_code << DP_AUX_CMD_SHIFT) |
514 ((request->num_bytes - 1) & DP_AUX_CMD_NBYTES_TRANSFER_MASK)),
515 dp_sub->base_addr + DP_AUX_CMD);
516
517 /* Check for a reply from the RX device to the submitted request. */
518 status = aux_wait_reply(dev);
519 if (status)
520 /* Waiting for a reply timed out. */
521 return -ETIMEDOUT;
522
523 /* Analyze the reply. */
524 status = readl(dp_sub->base_addr + DP_AUX_REPLY_CODE);
525 if (status == DP_AUX_REPLY_CODE_DEFER ||
526 status == DP_AUX_REPLY_CODE_I2C_DEFER) {
527 /* The request was deferred. */
528 return -EAGAIN;
529 } else if (status == DP_AUX_REPLY_CODE_NACK ||
530 status == DP_AUX_REPLY_CODE_I2C_NACK) {
531 /* The request was not acknowledged. */
532 return -EIO;
533 }
534
535 /* The request was acknowledged. */
536 if (request->cmd_code == DP_AUX_CMD_READ ||
537 request->cmd_code == DP_AUX_CMD_I2C_READ ||
538 request->cmd_code == DP_AUX_CMD_I2C_READ_MOT) {
539 /* Wait until all data has been received. */
540 timeout_count = 0;
541 do {
542 status = readl(dp_sub->base_addr +
543 DP_REPLY_DATA_COUNT);
544 udelay(100);
545 timeout_count++;
546 if (timeout_count >= DP_AUX_MAX_TIMEOUT_COUNT)
547 return -ETIMEDOUT;
548 } while (status != request->num_bytes);
549
550 /* Obtain the read data from the reply FIFO. */
551 for (index = 0; index < request->num_bytes; index++) {
552 request->data[index] = readl(dp_sub->base_addr +
553 DP_AUX_REPLY_DATA);
554 }
555 }
556 return 0;
557}
558
559/**
560 * aux_request() - Submit request on the AUX channel
561 * @dev: The DP device
562 * @request: The request to submit
563 *
564 * Submit the supplied AUX request to the RX device over the AUX
565 * channel. If waiting for a reply times out, or if the DisplayPort TX core
566 * indicates that the request was deferred, the request is sent again (up to a
567 * maximum specified by DP_AUX_MAX_DEFER_COUNT|DP_AUX_MAX_TIMEOUT_COUNT).
568 *
569 * Return: 0 if request was submitted successfully, -ve on error
570 */
571static int aux_request(struct udevice *dev, struct aux_transaction *request)
572{
573 u32 status, defer_count = 0, timeout_count = 0;
574
575 do {
576 status = aux_wait_ready(dev);
577 if (status) {
578 /* The RX device isn't ready yet. */
579 timeout_count++;
580 continue;
581 }
582 /* Send the request. */
583 status = aux_request_send(dev, request);
584 if (status == -EAGAIN) {
585 /* The request was deferred. */
586 defer_count++;
587 } else if (status == -ETIMEDOUT) {
588 /* Waiting for a reply timed out. */
589 timeout_count++;
590 } else {
591 return status;
592 }
593
594 udelay(100);
595 } while ((defer_count < DP_AUX_MAX_DEFER_COUNT) &&
596 (timeout_count < DP_AUX_MAX_TIMEOUT_COUNT));
597
598 /* The request was not successfully received by the RX device. */
599 return -ETIMEDOUT;
600}
601
602/**
603 * aux_common() - Common (read/write) AUX communication transmission
604 * @dev: The DP device
605 * @cmd_type: Command code of the transaction
606 * @address: The DPCD address of the transaction
607 * @num_bytes: Number of bytes in the payload data
608 * @data: The payload data of the AUX command
609 *
610 * Common sequence of submitting an AUX command for AUX read, AUX write,
611 * I2C-over-AUX read, and I2C-over-AUX write transactions. If required, the
612 * reads and writes are split into multiple requests, each acting on a maximum
613 * of 16 bytes.
614 *
615 * Return: 0 if OK, -ve on error
616 */
617static int aux_common(struct udevice *dev, u32 cmd_type, u32 address,
618 u32 num_bytes, u8 *data)
619{
620 u32 status, bytes_left;
621 struct aux_transaction request;
622
623 if (!is_dp_connected(dev))
624 return -ENODEV;
625
626 /*
627 * Set the start address for AUX transactions. For I2C transactions,
628 * this is the address of the I2C bus.
629 */
630 request.address = address;
631 bytes_left = num_bytes;
632 while (bytes_left > 0) {
633 request.cmd_code = cmd_type;
634
635 if (cmd_type == DP_AUX_CMD_READ ||
636 cmd_type == DP_AUX_CMD_WRITE) {
637 /* Increment address for normal AUX transactions. */
638 request.address = address + (num_bytes - bytes_left);
639 }
640
641 /* Increment the pointer to the supplied data buffer. */
642 request.data = &data[num_bytes - bytes_left];
643
644 if (bytes_left > 16)
645 request.num_bytes = 16;
646 else
647 request.num_bytes = bytes_left;
648
649 bytes_left -= request.num_bytes;
650
651 if (cmd_type == DP_AUX_CMD_I2C_READ && bytes_left > 0) {
652 /*
653 * Middle of a transaction I2C read request. Override
654 * the command code that was set to CmdType.
655 */
656 request.cmd_code = DP_AUX_CMD_I2C_READ_MOT;
657 } else if (cmd_type == DP_AUX_CMD_I2C_WRITE && bytes_left > 0) {
658 /*
659 * Middle of a transaction I2C write request. Override
660 * the command code that was set to CmdType.
661 */
662 request.cmd_code = DP_AUX_CMD_I2C_WRITE_MOT;
663 }
664
665 status = aux_request(dev, &request);
666 if (status)
667 return status;
668 }
669 return 0;
670}
671
672/**
673 * aux_write() - Issue AUX write request
674 * @dev: The DP device
675 * @dpcd_address: The DPCD address to write to
676 * @bytes_to_write: Number of bytes to write
677 * @write_data: Buffer containig data to be written
678 *
679 * Issue a write request over the AUX channel that will write to
680 * the RX device's DisplayPort Configuration data (DPCD) address space. The
681 * write message will be divided into multiple transactions which write a
682 * maximum of 16 bytes each.
683 *
684 * Return: 0 if write operation was successful, -ve on error
685 */
686static int aux_write(struct udevice *dev, u32 dpcd_address, u32 bytes_to_write,
687 void *write_data)
688{
689 return aux_common(dev, DP_AUX_CMD_WRITE, dpcd_address,
690 bytes_to_write, (u8 *)write_data);
691}
692
693/**
694 * aux_read() - Issue AUX read request
695 * @dev: The DP device
696 * @dpcd_address: The DPCD address to read from
697 * @bytes_to_read: Number of bytes to read
698 * @read_data: Buffer to receive the read data
699 *
700 * Issue a read request over the AUX channel that will read from the RX
701 * device's DisplayPort Configuration data (DPCD) address space. The read
702 * message will be divided into multiple transactions which read a maximum of
703 * 16 bytes each.
704 *
705 * Return: 0 if read operation was successful, -ve on error
706 */
707static int aux_read(struct udevice *dev, u32 dpcd_address, u32 bytes_to_read, void *read_data)
708{
709 return aux_common(dev, DP_AUX_CMD_READ, dpcd_address,
710 bytes_to_read, (u8 *)read_data);
711}
712
713static int dp_tx_wakeup(struct udevice *dev)
714{
715 u32 status;
716 u8 aux_data;
717
718 aux_data = 0x1;
719 status = aux_write(dev, DP_DPCD_SET_POWER_DP_PWR_VOLTAGE, 1, &aux_data);
720 if (status)
721 debug("! 1st power wake-up - AUX write failed.\n");
722 status = aux_write(dev, DP_DPCD_SET_POWER_DP_PWR_VOLTAGE, 1, &aux_data);
723 if (status)
724 debug("! 2nd power wake-up - AUX write failed.\n");
725
726 return status;
727}
728
729/**
730 * enable_main_link() - Switch on main link for a device
731 * @dev: The DP device
732 */
733static void enable_main_link(struct udevice *dev, u8 enable)
734{
735 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
736
737 /* Reset the scrambler. */
738 writel(1, dp_sub->base_addr + DP_FORCE_SCRAMBLER_RESET);
739 /* Enable the main stream. */
740 writel(enable, dp_sub->base_addr + DP_ENABLE_MAIN_STREAM);
741}
742
743/**
744 * get_rx_capabilities() - Check if capabilities of RX device are valid for TX
745 * device
746 * @dev: The DP device
747 *
748 * Return: 0 if the capabilities of the RX device are valid for the TX device,
749 * -ve if not, of an error occurred during capability determination
750 */
751static int get_rx_capabilities(struct udevice *dev)
752{
753 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
754 u8 rx_max_link_rate, rx_max_lane_count, *dpcd = NULL;
755 u32 status;
756 struct link_config *link_config = NULL;
757
758 dpcd = dp_sub->dpcd_rx_caps;
759 link_config = &dp_sub->link_config;
760
761 status = aux_read(dev, DP_DPCD_RECEIVER_CAP_FIELD_START, 16, dpcd);
762 if (status)
763 return status;
764
765 rx_max_link_rate = dpcd[DP_DPCD_MAX_LINK_RATE];
766 rx_max_lane_count = dpcd[DP_DPCD_MAX_LANE_COUNT] & DP_DPCD_MAX_LANE_COUNT_MASK;
767 link_config->max_link_rate = (rx_max_link_rate > DP_0_LINK_RATE) ?
768 DP_0_LINK_RATE : rx_max_link_rate;
769 link_config->max_lane_count = (rx_max_lane_count > DP_0_LANE_COUNT) ?
770 DP_0_LANE_COUNT : rx_max_lane_count;
771 link_config->support_enhanced_framing_mode = dpcd[DP_DPCD_MAX_LANE_COUNT] &
772 DP_DPCD_ENHANCED_FRAME_SUPPORT_MASK;
773 link_config->support_downspread_control = dpcd[DP_DPCD_MAX_DOWNSPREAD] &
774 DP_DPCD_MAX_DOWNSPREAD_MASK;
775
776 return 0;
777}
778
779/**
780 * set_enhanced_frame_mode() - Enable/Disable enhanced frame mode
781 * @dev: The DP device
782 * @enable: Flag to determine whether to enable (1) or disable (0) the enhanced
783 * frame mode
784 *
785 * Enable or disable the enhanced framing symbol sequence for
786 * both the DisplayPort TX core and the RX device.
787 *
788 * Return: 0 if enabling/disabling the enhanced frame mode was successful, -ve
789 * on error
790 */
791static int set_enhanced_frame_mode(struct udevice *dev, u8 enable)
792{
793 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
794 u32 status;
795 u8 regval;
796
797 dp_sub->link_config.enhanced_framing_mode = enable;
798 /* Write enhanced frame mode enable to the DisplayPort TX core. */
799 writel(dp_sub->link_config.enhanced_framing_mode,
800 dp_sub->base_addr + DP_ENHANCED_FRAME_EN);
801
802 /* Preserve the current RX device settings. */
803 status = aux_read(dev, DP_DPCD_LANE_COUNT_SET, 0x1, &regval);
804 if (status)
805 return status;
806
807 if (dp_sub->link_config.enhanced_framing_mode)
808 regval |= DP_DPCD_ENHANCED_FRAME_EN_MASK;
809 else
810 regval &= ~DP_DPCD_ENHANCED_FRAME_EN_MASK;
811
812 /* Write enhanced frame mode enable to the RX device. */
813 return aux_write(dev, DP_DPCD_LANE_COUNT_SET, 0x1, &regval);
814}
815
816/**
817 * set_lane_count() - Set the lane count
818 * @dev: The DP device
819 * @lane_count: Lane count to set
820 *
821 * Set the number of lanes to be used by the main link for both
822 * the DisplayPort TX core and the RX device.
823 *
824 * Return: 0 if setting the lane count was successful, -ve on error
825 */
826static int set_lane_count(struct udevice *dev, u8 lane_count)
827{
828 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
829 u32 status;
830 u8 regval;
831
832 dp_sub->link_config.lane_count = lane_count;
833 /* Write the new lane count to the DisplayPort TX core. */
834 writel(dp_sub->link_config.lane_count,
835 dp_sub->base_addr + DP_LANE_COUNT_SET);
836
837 /* Preserve the current RX device settings. */
838 status = aux_read(dev, DP_DPCD_LANE_COUNT_SET, 0x1, &regval);
839 if (status)
840 return status;
841
842 regval &= ~DP_DPCD_LANE_COUNT_SET_MASK;
843 regval |= dp_sub->link_config.lane_count;
844
845 /* Write the new lane count to the RX device. */
846 return aux_write(dev, DP_DPCD_LANE_COUNT_SET, 0x1, &regval);
847}
848
849/**
850 * set_clk_speed() - Set DP phy clock speed
851 * @dev: The DP device
852 * @speed: The clock frquency to set (one of PHY_CLOCK_SELECT_*)
853 *
854 * Set the clock frequency for the DisplayPort PHY corresponding to a desired
855 * data rate.
856 *
857 * Return: 0 if setting the DP phy clock speed was successful, -ve on error
858 */
859static int set_clk_speed(struct udevice *dev, u32 speed)
860{
861 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
862 u32 regval;
863
864 /* Disable the DisplayPort TX core first. */
865 regval = readl(dp_sub->base_addr + DP_ENABLE);
866 writel(0, dp_sub->base_addr + DP_ENABLE);
867
868 /* Change speed of the feedback clock. */
869 writel(speed, dp_sub->base_addr + DP_PHY_CLOCK_SELECT);
870
871 /* Re-enable the DisplayPort TX core if it was previously enabled. */
872 if (regval)
873 writel(regval, dp_sub->base_addr + DP_ENABLE);
874
875 /* Wait until the PHY is ready. */
876 return wait_phy_ready(dev);
877}
878
879/**
880 * set_link_rate() - Set the link rate
881 * @dev: The DP device
882 * @link_rate: The link rate to set (one of LINK_BW_SET_*)
883 *
884 * Set the data rate to be used by the main link for both the DisplayPort TX
885 * core and the RX device.
886 *
887 * Return: 0 if setting the link rate was successful, -ve on error
888 */
889static int set_link_rate(struct udevice *dev, u8 link_rate)
890{
891 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
892 u32 status;
893
894 /* Write a corresponding clock frequency to the DisplayPort TX core. */
895 switch (link_rate) {
896 case DP_LINK_BW_SET_162GBPS:
897 status = set_clk_speed(dev, DP_PHY_CLOCK_SELECT_162GBPS);
898 break;
899 case DP_LINK_BW_SET_270GBPS:
900 status = set_clk_speed(dev, DP_PHY_CLOCK_SELECT_270GBPS);
901 break;
902 case DP_LINK_BW_SET_540GBPS:
903 status = set_clk_speed(dev, DP_PHY_CLOCK_SELECT_540GBPS);
904 break;
905 default:
906 status = -EINVAL;
907 break;
908 }
909 if (status)
910 return status;
911
912 dp_sub->link_config.link_rate = link_rate;
913 /* Write new link rate to the DisplayPort TX core. */
914 writel(dp_sub->link_config.link_rate,
915 dp_sub->base_addr +
916 DP_LINK_BW_SET);
917
918 /* Write new link rate to the RX device. */
919 return aux_write(dev, DP_DPCD_LINK_BW_SET, 0x1,
920 &dp_sub->link_config.link_rate);
921}
922
923static int set_downspread(struct udevice *dev, u8 enable)
924{
925 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
926 u32 status;
927 u8 regval;
928
929 dp_sub->link_config.support_downspread_control = enable;
930 /* Write downspread enable to the DisplayPort TX core. */
931 writel(dp_sub->link_config.support_downspread_control,
932 dp_sub->base_addr + DP_DOWNSPREAD_CTRL);
933
934 /* Preserve the current RX device settings. */
935 status = aux_read(dev, DP_DPCD_DOWNSPREAD_CTRL, 0x1, &regval);
936 if (status)
937 return status;
938
939 if (dp_sub->link_config.support_downspread_control)
940 regval |= DP_DPCD_SPREAD_AMP_MASK;
941 else
942 regval &= ~DP_DPCD_SPREAD_AMP_MASK;
943
944 /* Write downspread enable to the RX device. */
945 return aux_write(dev, DP_DPCD_DOWNSPREAD_CTRL, 0x1, &regval);
946}
947
948static void set_serdes_vswing_preemp(struct udevice *dev)
949{
950 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
951 u8 index;
952 u8 vs_level_rx = dp_sub->link_config.vs_level;
953 u8 pe_level_rx = dp_sub->link_config.pe_level;
954
955 for (index = 0; index < dp_sub->link_config.lane_count; index++) {
956 /* Write new voltage swing levels to the TX registers. */
957 writel(vs[pe_level_rx][vs_level_rx], (ulong)SERDES_BASEADDR +
958 SERDES_L0_TX_MARGININGF + index * SERDES_LANE_OFFSET);
959 /* Write new pre-emphasis levels to the TX registers. */
960 writel(pe[pe_level_rx][vs_level_rx], (ulong)SERDES_BASEADDR +
961 SERDES_L0_TX_DEEMPHASIS + index * SERDES_LANE_OFFSET);
962 }
963}
964
965/**
966 * set_vswing_preemp() - Build AUX data to set voltage swing and pre-emphasis
967 * @dev: The DP device
968 * @aux_data: Buffer to receive the built AUX data
969 *
970 * Build AUX data to set current voltage swing and pre-emphasis level settings;
971 * the necessary data is taken from the link_config structure.
972 */
973static void set_vswing_preemp(struct udevice *dev, u8 *aux_data)
974{
975 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
976 u8 data = 0;
977 u8 vs_level_rx = dp_sub->link_config.vs_level;
978 u8 pe_level_rx = dp_sub->link_config.pe_level;
979
980 if (vs_level_rx >= DP_MAXIMUM_VS_LEVEL)
981 data |= DP_DPCD_TRAINING_LANEX_SET_MAX_VS_MASK;
982
983 /* The maximum pre-emphasis level has been reached. */
984 if (pe_level_rx >= DP_MAXIMUM_PE_LEVEL)
985 data |= DP_DPCD_TRAINING_LANEX_SET_MAX_PE_MASK;
986
987 /* Set up the data buffer for writing to the RX device. */
988 data |= (pe_level_rx << DP_DPCD_TRAINING_LANEX_SET_PE_SHIFT) |
989 vs_level_rx;
990 memset(aux_data, data, 4);
991
992 set_serdes_vswing_preemp(dev);
993}
994
995static int set_training_pattern(struct udevice *dev, u32 pattern)
996{
997 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
998 u8 aux_data[5];
999
1000 writel(pattern, dp_sub->base_addr + TRAINING_PATTERN_SET);
1001
1002 aux_data[0] = pattern;
1003 switch (pattern) {
1004 case TRAINING_PATTERN_SET_OFF:
1005 writel(0, dp_sub->base_addr + SCRAMBLING_DISABLE);
1006 dp_sub->link_config.scrambler_en = 1;
1007 break;
1008 case TRAINING_PATTERN_SET_TP1:
1009 case TRAINING_PATTERN_SET_TP2:
1010 case TRAINING_PATTERN_SET_TP3:
1011 aux_data[0] |= DP_DPCD_TP_SET_SCRAMB_DIS_MASK;
1012 writel(1, dp_sub->base_addr + SCRAMBLING_DISABLE);
1013 dp_sub->link_config.scrambler_en = 0;
1014 break;
1015 default:
1016 break;
1017 }
1018 /*
1019 * Make the adjustments to both the DisplayPort TX core and the RX
1020 * device.
1021 */
1022 set_vswing_preemp(dev, &aux_data[1]);
1023 /*
1024 * Write the voltage swing and pre-emphasis levels for each lane to the
1025 * RX device.
1026 */
1027 if (pattern == TRAINING_PATTERN_SET_OFF)
1028 return aux_write(dev, DP_DPCD_TP_SET, 1, aux_data);
1029 else
1030 return aux_write(dev, DP_DPCD_TP_SET, 5, aux_data);
1031}
1032
1033static int get_lane_status_adj_reqs(struct udevice *dev)
1034{
1035 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1036 u32 status;
1037 u8 aux_data[8];
1038
1039 status = aux_read(dev, DP_DPCD_SINK_COUNT, 8, aux_data);
1040 if (status)
1041 return status;
1042
1043 /* Save XDPPSU_DPCD_SINK_COUNT contents. */
1044 dp_sub->sink_count =
1045 ((aux_data[0] & DP_DPCD_SINK_COUNT_HIGH_MASK) >>
1046 DP_DPCD_SINK_COUNT_HIGH_LOW_SHIFT) |
1047 (aux_data[0] & DP_DPCD_SINK_COUNT_LOW_MASK);
1048 memcpy(dp_sub->lane_status_ajd_reqs, &aux_data[2], 6);
1049 return 0;
1050}
1051
1052/**
1053 * check_clock_recovery() - Check clock recovery success
1054 * @dev: The LogiCore DP TX device in question
1055 * @lane_count: The number of lanes for which to check clock recovery success
1056 *
1057 * Check if the RX device's DisplayPort Configuration data (DPCD) indicates
1058 * that the clock recovery sequence during link training was successful - the
1059 * RX device's link clock and data recovery unit has realized and maintained
1060 * the frequency lock for all lanes currently in use.
1061 *
1062 * Return: 0 if clock recovery was successful on all lanes in question, -ve if
1063 * not
1064 */
1065static int check_clock_recovery(struct udevice *dev, u8 lane_count)
1066{
1067 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1068 u8 *lane_status = dp_sub->lane_status_ajd_reqs;
1069
1070 switch (lane_count) {
1071 case DP_LANE_COUNT_SET_2:
1072 if (!(lane_status[0] & DP_DPCD_STATUS_LANE_1_CR_DONE_MASK))
1073 return -EINVAL;
1074 case DP_LANE_COUNT_SET_1:
1075 if (!(lane_status[0] & DP_DPCD_STATUS_LANE_0_CR_DONE_MASK))
1076 return -EINVAL;
1077 default:
1078 /* All (LaneCount) lanes have achieved clock recovery. */
1079 break;
1080 }
1081 return 0;
1082}
1083
1084/**
1085 * adj_vswing_preemp() - Adjust voltage swing and pre-emphasis
1086 * @dev: The DP device
1087 *
1088 * Set new voltage swing and pre-emphasis levels using the
1089 * adjustment requests obtained from the RX device.
1090 *
1091 * Return: 0 if voltage swing and pre-emphasis could be adjusted successfully,
1092 * -ve on error
1093 */
1094static int adj_vswing_preemp(struct udevice *dev)
1095{
1096 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1097 u8 index, vs_level_adj_req[4], pe_level_adj_req[4];
1098 u8 aux_data[4];
1099 u8 *adj_reqs = &dp_sub->lane_status_ajd_reqs[4];
1100
1101 /*
1102 * Analyze the adjustment requests for changes in voltage swing and
1103 * pre-emphasis levels.
1104 */
1105 vs_level_adj_req[0] = adj_reqs[0] & DP_DPCD_ADJ_REQ_LANE_0_2_VS_MASK;
1106 vs_level_adj_req[1] = (adj_reqs[0] & DP_DPCD_ADJ_REQ_LANE_1_3_VS_MASK) >>
1107 DP_DPCD_ADJ_REQ_LANE_1_3_VS_SHIFT;
1108 pe_level_adj_req[0] = (adj_reqs[0] & DP_DPCD_ADJ_REQ_LANE_0_2_PE_MASK) >>
1109 DP_DPCD_ADJ_REQ_LANE_0_2_PE_SHIFT;
1110 pe_level_adj_req[1] = (adj_reqs[0] & DP_DPCD_ADJ_REQ_LANE_1_3_PE_MASK) >>
1111 DP_DPCD_ADJ_REQ_LANE_1_3_PE_SHIFT;
1112
1113 /*
1114 * Change the drive settings to match the adjustment requests. Use the
1115 * greatest level requested.
1116 */
1117 dp_sub->link_config.vs_level = 0;
1118 dp_sub->link_config.pe_level = 0;
1119 for (index = 0; index < dp_sub->link_config.lane_count; index++) {
1120 if (vs_level_adj_req[index] > dp_sub->link_config.vs_level)
1121 dp_sub->link_config.vs_level = vs_level_adj_req[index];
1122
1123 if (pe_level_adj_req[index] > dp_sub->link_config.pe_level)
1124 dp_sub->link_config.pe_level = pe_level_adj_req[index];
1125 }
1126
1127 if (dp_sub->link_config.pe_level > DP_MAXIMUM_PE_LEVEL)
1128 dp_sub->link_config.pe_level = DP_MAXIMUM_PE_LEVEL;
1129
1130 if (dp_sub->link_config.vs_level > DP_MAXIMUM_VS_LEVEL)
1131 dp_sub->link_config.vs_level = DP_MAXIMUM_VS_LEVEL;
1132
1133 if (dp_sub->link_config.pe_level >
1134 (4 - dp_sub->link_config.vs_level)) {
1135 dp_sub->link_config.pe_level =
1136 4 - dp_sub->link_config.vs_level;
1137 }
1138 /*
1139 * Make the adjustments to both the DisplayPort TX core and the RX
1140 * device.
1141 */
1142 set_vswing_preemp(dev, aux_data);
1143 /*
1144 * Write the voltage swing and pre-emphasis levels for each lane to the
1145 * RX device.
1146 */
1147 return aux_write(dev, DP_DPCD_TRAINING_LANE0_SET, 2, aux_data);
1148}
1149
1150/**
1151 * get_training_delay() - Get training delay
1152 * @dev: The DP device
1153 * @training_state: The training state for which the required training delay
1154 * should be queried
1155 *
1156 * Determine what the RX device's required training delay is for
1157 * link training.
1158 *
1159 * Return: The training delay in us
1160 */
1161static u32 get_training_delay(struct udevice *dev)
1162{
1163 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1164 u8 *dpcd = dp_sub->dpcd_rx_caps;
1165
1166 if (dpcd[DP_DPCD_TRAIN_AUX_RD_INTERVAL])
1167 return 400 * dpcd[DP_DPCD_TRAIN_AUX_RD_INTERVAL] * 10;
1168
1169 return 400;
1170}
1171
1172/**
1173 * training_state_clock_recovery() - Run clock recovery part of link training
1174 * @dev: The DP device
1175 *
1176 * Run the clock recovery sequence as part of link training. The
1177 * sequence is as follows:
1178 *
1179 * 0) Start signaling at the minimum voltage swing, pre-emphasis, and
1180 * post- cursor levels.
1181 * 1) Transmit training pattern 1 over the main link with symbol
1182 * scrambling disabled.
1183 * 2) The clock recovery loop. If clock recovery is unsuccessful after
1184 * MaxIterations loop iterations, return.
1185 * 2a) Wait for at least the period of time specified in the RX device's
1186 * DisplayPort Configuration data (DPCD) register,
1187 * TRAINING_AUX_RD_INTERVAL.
1188 * 2b) Check if all lanes have achieved clock recovery lock. If so,
1189 * return.
1190 * 2c) Check if the same voltage swing level has been used 5 consecutive
1191 * times or if the maximum level has been reached. If so, return.
1192 * 2d) Adjust the voltage swing, pre-emphasis, and post-cursor levels as
1193 * requested by the RX device.
1194 * 2e) Loop back to 2a.
1195 *
1196 * For a more detailed description of the clock recovery sequence, see section
1197 * 3.5.1.2.1 of the DisplayPort 1.2a specification document.
1198 *
1199 * Return: The next state machine state to advance to
1200 */
1201static enum link_training_states training_state_clock_recovery(struct udevice *dev)
1202{
1203 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1204 u32 status, delay_us;
1205 u8 prev_vs_level = 0, same_vs_level_count = 0;
1206 struct link_config *link_config = &dp_sub->link_config;
1207
1208 delay_us = get_training_delay(dev);
1209 /* Start CRLock. */
1210 /* Start from minimal voltage swing and pre-emphasis levels. */
1211 dp_sub->link_config.vs_level = 0;
1212 dp_sub->link_config.pe_level = 0;
1213 /* Transmit training pattern 1. */
1214 status = set_training_pattern(dev, TRAINING_PATTERN_SET_TP1);
1215 if (status)
1216 return TS_FAILURE;
1217
1218 while (1) {
1219 /* Wait delay specified in TRAINING_AUX_RD_INTERVAL. */
1220 udelay(delay_us);
1221 /* Get lane and adjustment requests. */
1222 status = get_lane_status_adj_reqs(dev);
1223 if (status)
1224 /* The AUX read failed. */
1225 return TS_FAILURE;
1226
1227 /*
1228 * Check if all lanes have realized and maintained the frequency
1229 * lock and get adjustment requests.
1230 */
1231 status = check_clock_recovery(dev, dp_sub->link_config.lane_count);
1232 if (status == 0)
1233 return TS_CHANNEL_EQUALIZATION;
1234 /*
1235 * Check if the same voltage swing for each lane has been used 5
1236 * consecutive times.
1237 */
1238 if (prev_vs_level == link_config->vs_level) {
1239 same_vs_level_count++;
1240 } else {
1241 same_vs_level_count = 0;
1242 prev_vs_level = link_config->vs_level;
1243 }
1244 if (same_vs_level_count >= 5)
1245 break;
1246
1247 /* Only try maximum voltage swing once. */
1248 if (link_config->vs_level == DP_MAXIMUM_VS_LEVEL)
1249 break;
1250
1251 /* Adjust the drive settings as requested by the RX device. */
1252 status = adj_vswing_preemp(dev);
1253 if (status)
1254 /* The AUX write failed. */
1255 return TS_FAILURE;
1256 }
1257 return TS_ADJUST_LINK_RATE;
1258}
1259
1260/**
1261 * check_channel_equalization() - Check channel equalization success
1262 * @dev: The DP device
1263 * @lane_count: The number of lanes for which to check channel equalization
1264 * success
1265 *
1266 * Check if the RX device's DisplayPort Configuration data (DPCD) indicates
1267 * that the channel equalization sequence during link training was successful -
1268 * the RX device has achieved channel equalization, symbol lock, and interlane
1269 * alignment for all lanes currently in use.
1270 *
1271 * Return: 0 if channel equalization was successful on all lanes in question,
1272 * -ve if not
1273 */
1274static int check_channel_equalization(struct udevice *dev, u8 lane_count)
1275{
1276 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1277 u8 *lane_status = dp_sub->lane_status_ajd_reqs;
1278
1279 /* Check that all LANEx_CHANNEL_EQ_DONE bits are set. */
1280 switch (lane_count) {
1281 case DP_LANE_COUNT_SET_2:
1282 if (!(lane_status[0] & DP_DPCD_STATUS_LANE_1_CE_DONE_MASK))
1283 return -EINVAL;
1284 case DP_LANE_COUNT_SET_1:
1285 if (!(lane_status[0] & DP_DPCD_STATUS_LANE_0_CE_DONE_MASK))
1286 return -EINVAL;
1287 default:
1288 /* All (LaneCount) lanes have achieved channel equalization. */
1289 break;
1290 }
1291
1292 /* Check that all LANEx_SYMBOL_LOCKED bits are set. */
1293 switch (lane_count) {
1294 case DP_LANE_COUNT_SET_2:
1295 if (!(lane_status[0] & DP_DPCD_STATUS_LANE_1_SL_DONE_MASK))
1296 return -EINVAL;
1297 case DP_LANE_COUNT_SET_1:
1298 if (!(lane_status[0] & DP_DPCD_STATUS_LANE_0_SL_DONE_MASK))
1299 return -EINVAL;
1300 default:
1301 /* All (LaneCount) lanes have achieved symbol lock. */
1302 break;
1303 }
1304
1305 /* Check that interlane alignment is done. */
1306 if (!(lane_status[2] & DP_DPCD_LANE_ALIGN_STATUS_UPDATED_IA_DONE_MASK))
1307 return -EINVAL;
1308 return 0;
1309}
1310
1311/**
1312 * training_state_channel_equalization() - Run channel equalization part of
1313 * link training
1314 * @dev: The DP device
1315 *
1316 * Run the channel equalization sequence as part of link
1317 * training. The sequence is as follows:
1318 *
1319 * 0) Start signaling with the same drive settings used at the end of the
1320 * clock recovery sequence.
1321 * 1) Transmit training pattern 2 (or 3) over the main link with symbol
1322 * scrambling disabled.
1323 * 2) The channel equalization loop. If channel equalization is
1324 * unsuccessful after 5 loop iterations, return.
1325 * 2a) Wait for at least the period of time specified in the RX device's
1326 * DisplayPort Configuration data (DPCD) register,
1327 * TRAINING_AUX_RD_INTERVAL.
1328 * 2b) Check if all lanes have achieved channel equalization, symbol lock,
1329 * and interlane alignment. If so, return.
1330 * 2c) Check if the same voltage swing level has been used 5 consecutive
1331 * times or if the maximum level has been reached. If so, return.
1332 * 2d) Adjust the voltage swing, pre-emphasis, and post-cursor levels as
1333 * requested by the RX device.
1334 * 2e) Loop back to 2a.
1335 *
1336 * For a more detailed description of the channel equalization sequence, see
1337 * section 3.5.1.2.2 of the DisplayPort 1.2a specification document.
1338 *
1339 * Return: The next state machine state to advance to
1340 */
1341static enum link_training_states training_state_channel_equalization(struct udevice *dev)
1342{
1343 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1344 u32 status, delay_us = 400, iteration_count = 0;
1345
1346 /* Write the current drive settings. */
1347 /* Transmit training pattern 2/3. */
1348 if (dp_sub->dpcd_rx_caps[DP_DPCD_MAX_LANE_COUNT] &
1349 DP_DPCD_TPS3_SUPPORT_MASK)
1350 status = set_training_pattern(dev, TRAINING_PATTERN_SET_TP3);
1351 else
1352 status = set_training_pattern(dev, TRAINING_PATTERN_SET_TP2);
1353
1354 if (status)
1355 return TS_FAILURE;
1356
1357 while (iteration_count < 5) {
1358 /* Wait delay specified in TRAINING_AUX_RD_INTERVAL. */
1359 udelay(delay_us);
1360
1361 /* Get lane and adjustment requests. */
1362 status = get_lane_status_adj_reqs(dev);
1363 if (status)
1364 /* The AUX read failed. */
1365 return TS_FAILURE;
1366
1367 /* Adjust the drive settings as requested by the RX device. */
1368 status = adj_vswing_preemp(dev);
1369 if (status)
1370 /* The AUX write failed. */
1371 return TS_FAILURE;
1372
1373 /* Check that all lanes still have their clocks locked. */
1374 status = check_clock_recovery(dev, dp_sub->link_config.lane_count);
1375 if (status)
1376 break;
1377 /*
1378 * Check that all lanes have accomplished channel
1379 * equalization, symbol lock, and interlane alignment.
1380 */
1381 status = check_channel_equalization(dev, dp_sub->link_config.lane_count);
1382 if (status == 0)
1383 return TS_SUCCESS;
1384 iteration_count++;
1385 }
1386
1387 /*
1388 * Tried 5 times with no success. Try a reduced bitrate first, then
1389 * reduce the number of lanes.
1390 */
1391 return TS_ADJUST_LINK_RATE;
1392}
1393
1394static int check_lane_align(struct udevice *dev)
1395{
1396 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1397 u8 *lane_status = dp_sub->lane_status_ajd_reqs;
1398
1399 if (!(lane_status[2] & DP_DPCD_LANE_ALIGN_STATUS_UPDATED_IA_DONE_MASK))
1400 return -EINVAL;
1401 return 0;
1402}
1403
1404/**
1405 * check_link_status() - Check status of link
1406 * @dev: The DP device
1407 * @lane_count: The lane count to use for the check
1408 *
1409 * Check if the receiver's DisplayPort Configuration data (DPCD) indicates the
1410 * receiver has achieved and maintained clock recovery, channel equalization,
1411 * symbol lock, and interlane alignment for all lanes currently in use.
1412 *
1413 * Return: 0 if the link status is OK, -ve if a error occurred during checking
1414 */
1415static int check_link_status(struct udevice *dev, u8 lane_count)
1416{
1417 u32 status;
1418
1419 status = get_lane_status_adj_reqs(dev);
1420 if (status)
1421 /* The AUX read failed. */
1422 return status;
1423
1424 /* Check if the link needs training. */
1425 if ((check_clock_recovery(dev, lane_count) == 0) &&
1426 (check_channel_equalization(dev, lane_count) == 0) &&
1427 (check_lane_align(dev) == 0)) {
1428 return 0;
1429 }
1430 return -EINVAL;
1431}
1432
1433/**
1434 * run_training() - Run link training
1435 * @dev: The DP device
1436 *
1437 * Run the link training process. It is implemented as a state machine, with
1438 * each state returning the next state. First, the clock recovery sequence will
1439 * be run; if successful, the channel equalization sequence will run. If either
1440 * the clock recovery or channel equalization sequence failed, the link rate or
1441 * the number of lanes used will be reduced and training will be re-attempted.
1442 * If training fails at the minimal data rate, 1.62 Gbps with a single lane,
1443 * training will no longer re-attempt and fail.
1444 *
1445 * There are undocumented timeout constraints in the link training process. In
1446 * DP v1.2a spec, Chapter 3.5.1.2.2 a 10ms limit for the complete training
1447 * process is mentioned. Which individual timeouts are derived and implemented
1448 * by sink manufacturers is unknown. So each step should be as short as
1449 * possible and link training should start as soon as possible after HPD.
1450 *
1451 * Return: 0 if the training sequence ran successfully, -ve if a error occurred
1452 * or the training failed
1453 */
1454static int run_training(struct udevice *dev)
1455{
1456 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1457 u32 status;
1458 enum link_training_states training_state = TS_CLOCK_RECOVERY;
1459
1460 while (1) {
1461 switch (training_state) {
1462 case TS_CLOCK_RECOVERY:
1463 training_state = training_state_clock_recovery(dev);
1464 break;
1465 case TS_CHANNEL_EQUALIZATION:
1466 training_state = training_state_channel_equalization(dev);
1467 break;
1468 default:
1469 break;
1470 }
1471
1472 if (training_state == TS_SUCCESS)
1473 break;
1474 else if (training_state == TS_FAILURE)
1475 return -EINVAL;
1476
1477 if (training_state == TS_ADJUST_LANE_COUNT ||
1478 training_state == TS_ADJUST_LINK_RATE) {
1479 status = set_training_pattern(dev, TRAINING_PATTERN_SET_OFF);
1480 if (status)
1481 return -EINVAL;
1482 }
1483 }
1484
1485 /* Final status check. */
1486 return check_link_status(dev, dp_sub->link_config.lane_count);
1487}
1488
1489void reset_dp_phy(struct udevice *dev, u32 reset)
1490{
1491 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1492 u32 phyval, regval;
1493
1494 writel(0, dp_sub->base_addr + DP_ENABLE);
1495 phyval = readl(dp_sub->base_addr + DP_PHY_CONFIG);
1496 regval = phyval | reset;
1497 writel(regval, dp_sub->base_addr + DP_PHY_CONFIG);
1498 /* Remove the reset. */
1499 writel(phyval, dp_sub->base_addr + DP_PHY_CONFIG);
1500 /* Wait for the PHY to be ready. */
1501 wait_phy_ready(dev);
1502
1503 writel(1, dp_sub->base_addr + DP_ENABLE);
1504}
1505
1506/**
1507 * establish_link() - Establish a link
1508 * @dev: The DP device
1509 *
1510 * Check if the link needs training and run the training sequence if training
1511 * is required.
1512 *
1513 * Return: 0 if the link was established successfully, -ve on error
1514 */
1515static int establish_link(struct udevice *dev)
1516{
1517 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1518 u32 status, re_enable_main_link;
1519
1520 reset_dp_phy(dev, DP_PHY_CONFIG_TX_PHY_8B10BEN_MASK |
1521 DP_PHY_CONFIG_PHY_RESET_MASK);
1522
1523 re_enable_main_link = readl(dp_sub->base_addr + DP_ENABLE_MAIN_STREAM);
1524 if (re_enable_main_link)
1525 enable_main_link(dev, 0);
1526
1527 status = run_training(dev);
1528 if (status)
1529 return status;
1530
1531 status = set_training_pattern(dev, TRAINING_PATTERN_SET_OFF);
1532 if (status)
1533 return status;
1534
1535 if (re_enable_main_link)
1536 enable_main_link(dev, 1);
1537
1538 return check_link_status(dev, dp_sub->link_config.lane_count);
1539}
1540
1541static int dp_hpd_train(struct udevice *dev)
1542{
1543 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1544 struct link_config *link_config = &dp_sub->link_config;
1545 u32 status;
1546
1547 status = get_rx_capabilities(dev);
1548 if (status) {
1549 debug("! Error getting RX caps.\n");
1550 return status;
1551 }
1552
1553 status = set_enhanced_frame_mode(dev, link_config->support_enhanced_framing_mode ? 1 : 0);
1554 if (status) {
1555 debug("! EFM set failed.\n");
1556 return status;
1557 }
1558
1559 status = set_lane_count(dev, (dp_sub->use_max_lane_count) ?
1560 link_config->max_lane_count : dp_sub->lane_count);
1561 if (status) {
1562 debug("! Lane count set failed.\n");
1563 return status;
1564 }
1565
1566 status = set_link_rate(dev, (dp_sub->use_max_link_rate) ?
1567 link_config->max_link_rate : dp_sub->link_rate);
1568 if (status) {
1569 debug("! Link rate set failed.\n");
1570 return status;
1571 }
1572
1573 status = set_downspread(dev, link_config->support_downspread_control);
1574 if (status) {
1575 debug("! Setting downspread failed.\n");
1576 return status;
1577 }
1578
1579 debug("Lane count =%d\n", dp_sub->link_config.lane_count);
1580 debug("Link rate =%d\n", dp_sub->link_config.link_rate);
1581
1582 debug("Starting Training...\n");
1583 status = establish_link(dev);
1584 if (status == 0)
1585 debug("! Training succeeded.\n");
1586 else
1587 debug("! Training failed.\n");
1588
1589 return status;
1590}
1591
1592static void display_gfx_frame_buffer(struct udevice *dev)
1593{
1594 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1595
1596 if (!dp_sub->dp_dma->gfx.channel.cur)
1597 dp_sub->dp_dma->gfx.trigger_status = DPDMA_TRIGGER_EN;
1598}
1599
1600static void set_color_encode(struct udevice *dev)
1601{
1602 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1603 struct main_stream_attributes *msa_config = &dp_sub->msa_config;
1604
1605 msa_config->y_cb_cr_colorimetry = 0;
1606 msa_config->dynamic_range = 0;
1607 msa_config->component_format = 0;
1608 msa_config->misc0 = 0;
1609 msa_config->misc1 = 0;
1610 msa_config->component_format = DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_RGB;
1611}
1612
1613static void config_msa_recalculate(struct udevice *dev)
1614{
1615 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1616 u32 video_bw, link_bw, words_per_line;
1617 u8 bits_per_pixel;
1618 struct main_stream_attributes *msa_config;
1619 struct link_config *link_config;
1620
1621 msa_config = &dp_sub->msa_config;
1622 link_config = &dp_sub->link_config;
1623
1624 msa_config->user_pixel_width = 1;
1625
1626 /* Compute the rest of the MSA values. */
1627 msa_config->n_vid = 27 * 1000 * link_config->link_rate;
1628 msa_config->h_start = msa_config->vid_timing_mode.video_timing.h_sync_width +
1629 msa_config->vid_timing_mode.video_timing.h_back_porch;
1630 msa_config->v_start = msa_config->vid_timing_mode.video_timing.f0_pv_sync_width +
1631 msa_config->vid_timing_mode.video_timing.f0_pv_back_porch;
1632
1633 /* Miscellaneous attributes. */
1634 if (msa_config->bits_per_color == 6)
1635 msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_6BPC;
1636 else if (msa_config->bits_per_color == 8)
1637 msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_8BPC;
1638 else if (msa_config->bits_per_color == 10)
1639 msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_10BPC;
1640 else if (msa_config->bits_per_color == 12)
1641 msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_12BPC;
1642 else if (msa_config->bits_per_color == 16)
1643 msa_config->misc0 = DP_MAIN_STREAM_MISC0_BDC_16BPC;
1644
1645 msa_config->misc0 <<= DP_MAIN_STREAM_MISC0_BDC_SHIFT;
1646
1647 /* Need to set this. */
1648 msa_config->misc0 |= msa_config->component_format <<
1649 DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_SHIFT;
1650
1651 msa_config->misc0 |= msa_config->dynamic_range <<
1652 DP_MAIN_STREAM_MISC0_DYNAMIC_RANGE_SHIFT;
1653
1654 msa_config->misc0 |= msa_config->y_cb_cr_colorimetry <<
1655 DP_MAIN_STREAM_MISC0_YCBCR_COLORIMETRY_SHIFT;
1656
1657 msa_config->misc0 |= msa_config->synchronous_clock_mode;
1658 /*
1659 * Determine the number of bits per pixel for the specified color
1660 * component format.
1661 */
1662 if (msa_config->misc1 == DP_MAIN_STREAM_MISC1_Y_ONLY_EN_MASK)
1663 bits_per_pixel = msa_config->bits_per_color;
1664 else if (msa_config->component_format ==
1665 DP_MAIN_STREAM_MISC0_COMPONENT_FORMAT_YCBCR422)
1666 /* YCbCr422 color component format. */
1667 bits_per_pixel = msa_config->bits_per_color * 2;
1668 else
1669 /* RGB or YCbCr 4:4:4 color component format. */
1670 bits_per_pixel = msa_config->bits_per_color * 3;
1671
1672 /* Calculate the data per lane. */
1673 words_per_line = msa_config->vid_timing_mode.video_timing.h_active * bits_per_pixel;
1674 if (words_per_line % 16)
1675 words_per_line += 16;
1676
1677 words_per_line /= 16;
1678 msa_config->data_per_lane = words_per_line - link_config->lane_count;
1679 if (words_per_line % link_config->lane_count)
1680 msa_config->data_per_lane += (words_per_line % link_config->lane_count);
1681
1682 /* Allocate a fixed size for single-stream transport (SST) operation. */
1683 msa_config->transfer_unit_size = 64;
1684
1685 /*
1686 * Calculate the average number of bytes per transfer unit.
1687 * Note: Both the integer and the fractional part is stored in
1688 * AvgBytesPerTU.
1689 */
1690 video_bw = ((msa_config->pixel_clock_hz / 1000) * bits_per_pixel) / 8;
1691 link_bw = (link_config->lane_count * link_config->link_rate * 27);
1692 msa_config->avg_bytes_per_tu = ((10 *
1693 (video_bw * msa_config->transfer_unit_size)
1694 / link_bw) + 5) / 10;
1695 /*
1696 * The number of initial wait cycles at the start of a new line by the
1697 * framing logic. This allows enough data to be buffered in the input
1698 * FIFO before video is sent.
1699 */
1700 if ((msa_config->avg_bytes_per_tu / 1000) <= 4)
1701 msa_config->init_wait = 64;
1702 else
1703 msa_config->init_wait = msa_config->transfer_unit_size -
1704 (msa_config->avg_bytes_per_tu / 1000);
1705}
1706
1707static void set_msa_bpc(struct udevice *dev, u8 bits_per_color)
1708{
1709 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1710
1711 dp_sub->msa_config.bits_per_color = bits_per_color;
1712 /* Calculate the rest of the MSA values. */
1713 config_msa_recalculate(dev);
1714}
1715
1716const struct video_timing_mode *get_video_mode_data(enum video_mode vm_id)
1717{
1718 if (vm_id < VIDC_VM_NUM_SUPPORTED)
1719 return &vidc_video_timing_modes[vm_id];
1720
1721 return NULL;
1722}
1723
1724static u64 get_pixelclk_by_vmid(enum video_mode vm_id)
1725{
1726 const struct video_timing_mode *vm;
1727 u64 clk_hz;
1728
1729 vm = get_video_mode_data(vm_id);
1730 /* For progressive mode, use only frame 0 vertical total. */
1731 clk_hz = vm->video_timing.f0_pv_total;
1732 /* Multiply the number of pixels by the frame rate. */
1733 clk_hz *= vm->frame_rate;
1734
1735 /*
1736 * Multiply the vertical total by the horizontal total for number of
1737 * pixels.
1738 */
1739 clk_hz *= vm->video_timing.h_total;
1740
1741 return clk_hz;
1742}
1743
1744/**
1745 * config_msa_video_mode() - Enable video output
1746 * @dev: The DP device
1747 * @msa: The MSA values to set for the device
1748 *
1749 * Return: 0 if the video was enabled successfully, -ve on error
1750 */
1751static void config_msa_video_mode(struct udevice *dev, enum video_mode videomode)
1752{
1753 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1754 struct main_stream_attributes *msa_config;
1755
1756 msa_config = &dp_sub->msa_config;
1757
1758 /* Configure the MSA values from the display monitor DMT table. */
1759 msa_config->vid_timing_mode.vid_mode = vidc_video_timing_modes[videomode].vid_mode;
1760 msa_config->vid_timing_mode.frame_rate = vidc_video_timing_modes[videomode].frame_rate;
1761 msa_config->vid_timing_mode.video_timing.h_active =
1762 vidc_video_timing_modes[videomode].video_timing.h_active;
1763 msa_config->vid_timing_mode.video_timing.h_front_porch =
1764 vidc_video_timing_modes[videomode].video_timing.h_front_porch;
1765 msa_config->vid_timing_mode.video_timing.h_sync_width =
1766 vidc_video_timing_modes[videomode].video_timing.h_sync_width;
1767 msa_config->vid_timing_mode.video_timing.h_back_porch =
1768 vidc_video_timing_modes[videomode].video_timing.h_back_porch;
1769 msa_config->vid_timing_mode.video_timing.h_total =
1770 vidc_video_timing_modes[videomode].video_timing.h_total;
1771 msa_config->vid_timing_mode.video_timing.h_sync_polarity =
1772 vidc_video_timing_modes[videomode].video_timing.h_sync_polarity;
1773 msa_config->vid_timing_mode.video_timing.v_active =
1774 vidc_video_timing_modes[videomode].video_timing.v_active;
1775 msa_config->vid_timing_mode.video_timing.f0_pv_front_porch =
1776 vidc_video_timing_modes[videomode].video_timing.f0_pv_front_porch;
1777 msa_config->vid_timing_mode.video_timing.f0_pv_sync_width =
1778 vidc_video_timing_modes[videomode].video_timing.f0_pv_sync_width;
1779 msa_config->vid_timing_mode.video_timing.f0_pv_back_porch =
1780 vidc_video_timing_modes[videomode].video_timing.f0_pv_back_porch;
1781 msa_config->vid_timing_mode.video_timing.f0_pv_total =
1782 vidc_video_timing_modes[videomode].video_timing.f0_pv_total;
1783 msa_config->vid_timing_mode.video_timing.f1_v_front_porch =
1784 vidc_video_timing_modes[videomode].video_timing.f1_v_front_porch;
1785 msa_config->vid_timing_mode.video_timing.f1_v_sync_width =
1786 vidc_video_timing_modes[videomode].video_timing.f1_v_sync_width;
1787 msa_config->vid_timing_mode.video_timing.f1_v_back_porch =
1788 vidc_video_timing_modes[videomode].video_timing.f1_v_back_porch;
1789 msa_config->vid_timing_mode.video_timing.f1_v_total =
1790 vidc_video_timing_modes[videomode].video_timing.f1_v_total;
1791 msa_config->vid_timing_mode.video_timing.v_sync_polarity =
1792 vidc_video_timing_modes[videomode].video_timing.v_sync_polarity;
1793 msa_config->pixel_clock_hz = get_pixelclk_by_vmid(msa_config->vid_timing_mode.vid_mode);
1794
1795 /* Calculate the rest of the MSA values. */
1796 config_msa_recalculate(dev);
1797}
1798
1799static void set_pixel_clock(u64 freq_hz)
1800{
1801 u64 ext_divider, vco, vco_int_frac;
1802 u32 pll_assigned, frac_int_fb_div, fraction, regpll = 0;
1803 u8 pll;
1804
1805 pll_assigned = readl(CLK_FPD_BASEADDR + VIDEO_REF_CTRL) & VIDEO_REF_CTRL_SRCSEL_MASK;
1806 if (pll_assigned)
1807 pll = VPLL;
1808
1809 ext_divider = PLL_OUT_FREQ / freq_hz;
1810 vco = freq_hz * ext_divider * 2;
1811 vco_int_frac = (vco * INPUT_FREQ_PRECISION * SHIFT_DECIMAL) /
1812 AVBUF_INPUT_REF_CLK;
1813 frac_int_fb_div = vco_int_frac >> PRECISION;
1814 fraction = vco_int_frac & AVBUF_DECIMAL;
1815
1816 regpll |= ENABLE_BIT << PLL_CTRL_BYPASS_SHIFT;
1817 regpll |= frac_int_fb_div << PLL_CTRL_FBDIV_SHIFT;
1818 regpll |= (1 << PLL_CTRL_DIV2_SHIFT);
1819 regpll |= (PSS_REF_CLK << PLL_CTRL_PRE_SRC_SHIFT);
1820 writel(regpll, CLK_FPD_BASEADDR + VPLL_CTRL);
1821
1822 regpll = 0;
1823 regpll |= VPLL_CFG_CP << PLL_CFG_CP_SHIFT;
1824 regpll |= VPLL_CFG_RES << PLL_CFG_RES_SHIFT;
1825 regpll |= VPLL_CFG_LFHF << PLL_CFG_LFHF_SHIFT;
1826 regpll |= VPLL_CFG_LOCK_DLY << PLL_CFG_LOCK_DLY_SHIFT;
1827 regpll |= VPLL_CFG_LOCK_CNT << PLL_CFG_LOCK_CNT_SHIFT;
1828 writel(regpll, CLK_FPD_BASEADDR + VPLL_CFG);
1829
1830 regpll = (1U << PLL_FRAC_CFG_ENABLED_SHIFT) |
1831 (fraction << PLL_FRAC_CFG_DATA_SHIFT);
1832 writel(regpll, CLK_FPD_BASEADDR + VPLL_FRAC_CFG);
1833
1834 clrsetbits_le32(CLK_FPD_BASEADDR + VPLL_CTRL,
1835 PLL_CTRL_RESET_MASK,
1836 (ENABLE_BIT << PLL_CTRL_RESET_SHIFT));
1837
1838 /* Deassert reset to the PLL. */
1839 clrsetbits_le32(CLK_FPD_BASEADDR + VPLL_CTRL,
1840 PLL_CTRL_RESET_MASK,
1841 (DISABLE_BIT << PLL_CTRL_RESET_SHIFT));
1842
1843 while (!(readl(CLK_FPD_BASEADDR + PLL_STATUS) &
1844 (1 << PLL_STATUS_VPLL_LOCK)))
1845 ;
1846
1847 /* Deassert Bypass. */
1848 clrsetbits_le32(CLK_FPD_BASEADDR + VPLL_CTRL,
1849 PLL_CTRL_BYPASS_MASK,
1850 (DISABLE_BIT << PLL_CTRL_BYPASS_SHIFT));
1851 udelay(1);
1852
1853 clrsetbits_le32(CLK_FPD_BASEADDR + VIDEO_REF_CTRL,
1854 VIDEO_REF_CTRL_CLKACT_MASK,
1855 (DISABLE_BIT << VIDEO_REF_CTRL_CLKACT_SHIFT));
1856
1857 clrsetbits_le32(CLK_FPD_BASEADDR + VIDEO_REF_CTRL,
1858 VIDEO_REF_CTRL_DIVISOR1_MASK,
1859 (ENABLE_BIT << VIDEO_REF_CTRL_DIVISOR1_SHIFT));
1860
1861 clrsetbits_le32(CLK_FPD_BASEADDR + VIDEO_REF_CTRL,
1862 VIDEO_REF_CTRL_DIVISOR0_MASK,
1863 (ext_divider << VIDEO_REF_CTRL_DIVISOR0_SHIFT));
1864
1865 clrsetbits_le32(CLK_FPD_BASEADDR + VIDEO_REF_CTRL,
1866 VIDEO_REF_CTRL_CLKACT_MASK,
1867 (ENABLE_BIT << VIDEO_REF_CTRL_CLKACT_SHIFT));
1868}
1869
1870/**
1871 * set_msa_values() - Set MSA values
1872 * @dev: The DP device
1873 *
1874 * Set the main stream attributes registers of the DisplayPort TX
1875 * core with the values specified in the main stream attributes configuration
1876 * structure.
1877 */
1878static void set_msa_values(struct udevice *dev)
1879{
1880 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1881 struct main_stream_attributes *msa_config;
1882
1883 msa_config = &dp_sub->msa_config;
1884
1885 /*
1886 * Set the main stream attributes to the associated DisplayPort TX core
1887 * registers.
1888 */
1889 writel(msa_config->vid_timing_mode.video_timing.h_total,
1890 dp_sub->base_addr + DP_MAIN_STREAM_HTOTAL);
1891 writel(msa_config->vid_timing_mode.video_timing.f0_pv_total,
1892 dp_sub->base_addr + DP_MAIN_STREAM_VTOTAL);
1893 writel(msa_config->vid_timing_mode.video_timing.h_sync_polarity |
1894 (msa_config->vid_timing_mode.video_timing.v_sync_polarity
1895 << DP_MAIN_STREAM_POLARITY_VSYNC_POL_SHIFT),
1896 dp_sub->base_addr + DP_MAIN_STREAM_POLARITY);
1897 writel(msa_config->vid_timing_mode.video_timing.h_sync_width,
1898 dp_sub->base_addr + DP_MAIN_STREAM_HSWIDTH);
1899 writel(msa_config->vid_timing_mode.video_timing.f0_pv_sync_width,
1900 dp_sub->base_addr + DP_MAIN_STREAM_VSWIDTH);
1901 writel(msa_config->vid_timing_mode.video_timing.h_active,
1902 dp_sub->base_addr + DP_MAIN_STREAM_HRES);
1903 writel(msa_config->vid_timing_mode.video_timing.v_active,
1904 dp_sub->base_addr + DP_MAIN_STREAM_VRES);
1905 writel(msa_config->h_start, dp_sub->base_addr + DP_MAIN_STREAM_HSTART);
1906 writel(msa_config->v_start, dp_sub->base_addr + DP_MAIN_STREAM_VSTART);
1907 writel(msa_config->misc0, dp_sub->base_addr + DP_MAIN_STREAM_MISC0);
1908 writel(msa_config->misc1, dp_sub->base_addr + DP_MAIN_STREAM_MISC1);
1909 writel(msa_config->pixel_clock_hz / 1000, dp_sub->base_addr + DP_M_VID);
1910 writel(msa_config->n_vid, dp_sub->base_addr + DP_N_VID);
1911 writel(msa_config->user_pixel_width, dp_sub->base_addr + DP_USER_PIXEL_WIDTH);
1912 writel(msa_config->data_per_lane, dp_sub->base_addr + DP_USER_DATA_COUNT_PER_LANE);
1913 /*
1914 * Set the transfer unit values to the associated DisplayPort TX core
1915 * registers.
1916 */
1917 writel(msa_config->transfer_unit_size, dp_sub->base_addr + DP_TU_SIZE);
1918 writel(msa_config->avg_bytes_per_tu / 1000,
1919 dp_sub->base_addr + DP_MIN_BYTES_PER_TU);
1920 writel((msa_config->avg_bytes_per_tu % 1000) * 1000,
1921 dp_sub->base_addr + DP_FRAC_BYTES_PER_TU);
1922 writel(msa_config->init_wait, dp_sub->base_addr + DP_INIT_WAIT);
1923}
1924
1925static void setup_video_stream(struct udevice *dev)
1926{
1927 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1928 struct main_stream_attributes *msa_config = &dp_sub->msa_config;
1929
1930 set_color_encode(dev);
1931 set_msa_bpc(dev, dp_sub->bpc);
1932 config_msa_video_mode(dev, dp_sub->video_mode);
1933
1934 /* Set pixel clock. */
1935 dp_sub->pix_clk = msa_config->pixel_clock_hz;
1936 set_pixel_clock(dp_sub->pix_clk);
1937
1938 /* Reset the transmitter. */
1939 writel(1, dp_sub->base_addr + DP_SOFT_RESET);
1940 udelay(10);
1941 writel(0, dp_sub->base_addr + DP_SOFT_RESET);
1942
1943 set_msa_values(dev);
1944
1945 /* Issuing a soft-reset (AV_BUF_SRST_REG). */
1946 writel(3, dp_sub->base_addr + AVBUF_BUF_SRST_REG); // Assert reset.
1947 udelay(10);
1948 writel(0, dp_sub->base_addr + AVBUF_BUF_SRST_REG); // De-ssert reset.
1949
1950 enable_main_link(dev, 1);
1951
1952 debug("DONE!\n");
1953}
1954
1955static int dp_tx_start_link_training(struct udevice *dev)
1956{
1957 u32 status;
1958 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1959
1960 enable_main_link(dev, 0);
1961
1962 if (!is_dp_connected(dev)) {
1963 debug("! Disconnected.\n");
1964 return -ENODEV;
1965 }
1966
1967 status = dp_tx_wakeup(dev);
1968 if (status) {
1969 debug("! Wakeup failed.\n");
1970 return -EIO;
1971 }
1972
1973 do {
1974 mdelay(100);
1975 status = dp_hpd_train(dev);
1976 if (status == -EINVAL) {
1977 debug("Lost connection\n\r");
1978 return -EIO;
1979 } else if (status) {
1980 continue;
1981 }
1982 display_gfx_frame_buffer(dev);
1983 setup_video_stream(dev);
1984 status = check_link_status(dev, dp_sub->link_config.lane_count);
1985 if (status == -EINVAL)
1986 return -EIO;
1987 } while (status != 0);
1988
1989 return 0;
1990}
1991
1992static void init_run_config(struct udevice *dev)
1993{
1994 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
1995
1996 dp_sub->dp_dma = &dp_dma;
Michal Simek0f465a42023-05-17 10:42:12 +02001997 dp_sub->video_mode = VIDC_VM_1024x768_60_P;
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02001998 dp_sub->bpc = VIDC_BPC_8;
1999 dp_sub->color_encode = DP_CENC_RGB;
2000 dp_sub->use_max_cfg_caps = 1;
2001 dp_sub->lane_count = LANE_COUNT_1;
2002 dp_sub->link_rate = LINK_RATE_540GBPS;
2003 dp_sub->en_sync_clk_mode = 0;
2004 dp_sub->use_max_lane_count = 1;
2005 dp_sub->use_max_link_rate = 1;
2006}
2007
2008static int dpdma_setup(struct udevice *dev)
2009{
2010 int status;
2011 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
2012
2013 writel(DPDMA_ISR_VSYNC_INT_MASK, dp_sub->dp_dma->base_addr + DPDMA_IEN);
2014 status = wait_for_bit_le32((u32 *)dp_sub->dp_dma->base_addr + DPDMA_ISR,
2015 DPDMA_ISR_VSYNC_INT_MASK, false, 1000, false);
2016 if (status) {
2017 debug("%s: INTR TIMEDOUT\n", __func__);
2018 return status;
2019 }
2020 debug("INTR dma_vsync_intr_handler called...\n");
2021 dma_vsync_intr_handler(dev);
2022
2023 return 0;
2024}
2025
2026static int zynqmp_dpsub_init(struct udevice *dev)
2027{
2028 int status;
2029 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
2030
2031 /* Initialize the dpdma configuration */
2032 status = init_dpdma_subsys(dev);
2033 if (status)
2034 return -EINVAL;
2035
2036 config_msa_sync_clk_mode(dev, dp_sub->en_sync_clk_mode);
2037 set_video_clk_source(dev, AVBUF_PS_CLK, AVBUF_PS_CLK);
2038
2039 return 0;
2040}
2041
2042static int dp_tx_run(struct udevice *dev)
2043{
2044 u32 interrupt_signal_state, interrupt_status, hpd_state, hpd_event;
2045 u32 hpd_pulse_detected, hpd_duration, status;
2046 int attempts = 0;
2047 struct zynqmp_dpsub_priv *dp_sub = dev_get_priv(dev);
2048
2049 /* Continuously poll for HPD events. */
2050 while (attempts < 5) {
2051 /* Read interrupt registers. */
2052 interrupt_signal_state = readl(dp_sub->base_addr + DP_INTERRUPT_SIG_STATE);
2053 interrupt_status = readl(dp_sub->base_addr + DP_INTR_STATUS);
2054 /* Check for HPD events. */
2055 hpd_state = interrupt_signal_state & DP_INTERRUPT_SIG_STATE_HPD_STATE_MASK;
2056 hpd_event = interrupt_status & DP_INTR_HPD_EVENT_MASK;
2057 hpd_pulse_detected = interrupt_status & DP_INTR_HPD_PULSE_DETECTED_MASK;
2058 if (hpd_pulse_detected)
2059 hpd_duration = readl(dp_sub->base_addr + DP_HPD_DURATION);
2060 else
2061 attempts++;
2062
2063 /* HPD event handling. */
2064 if (hpd_state && hpd_event) {
2065 debug("+===> HPD connection event detected.\n");
2066 /* Initiate link training. */
2067 status = dp_tx_start_link_training(dev);
2068 if (status) {
2069 debug("Link training failed\n");
2070 return status;
2071 }
2072 return 0;
2073 } else if (hpd_state && hpd_pulse_detected && (hpd_duration >= 250)) {
2074 debug("===> HPD pulse detected.\n");
2075 /* Re-train if needed. */
2076 status = dp_tx_start_link_training(dev);
2077 if (status) {
2078 debug("HPD pulse detection failed\n");
2079 return status;
2080 }
2081 return 0;
2082 } else if (!hpd_state && hpd_event) {
2083 debug("+===> HPD disconnection event detected.\n\n");
2084 /* Disable main link. */
2085 enable_main_link(dev, 0);
2086 break;
2087 }
2088 }
2089 return -EINVAL;
2090}
Michal Simekab2829a2022-02-23 15:52:02 +01002091
2092static int zynqmp_dpsub_probe(struct udevice *dev)
2093{
2094 struct video_priv *uc_priv = dev_get_uclass_priv(dev);
2095 struct zynqmp_dpsub_priv *priv = dev_get_priv(dev);
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02002096 struct clk clk;
2097 int ret;
2098 int mode = RGBA8888;
Michal Simekab2829a2022-02-23 15:52:02 +01002099
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02002100 ret = clk_get_by_name(dev, "dp_apb_clk", &clk);
2101 if (ret < 0) {
2102 dev_err(dev, "failed to get clock\n");
2103 return ret;
2104 }
Michal Simekab2829a2022-02-23 15:52:02 +01002105
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02002106 priv->clock = clk_get_rate(&clk);
2107 if (IS_ERR_VALUE(priv->clock)) {
2108 dev_err(dev, "failed to get rate\n");
2109 return priv->clock;
2110 }
Michal Simekab2829a2022-02-23 15:52:02 +01002111
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02002112 ret = clk_enable(&clk);
2113 if (ret) {
2114 dev_err(dev, "failed to enable clock\n");
2115 return ret;
2116 }
2117
2118 dev_dbg(dev, "Base addr 0x%x, clock %d\n", (u32)priv->base_addr,
2119 priv->clock);
2120
2121 /* Initialize the DisplayPort TX core. */
2122 ret = init_dp_tx(dev);
2123 if (ret)
2124 return -EINVAL;
2125
2126 /* Initialize the runtime configuration */
2127 init_run_config(dev);
2128 /* Set the format graphics frame for Video Pipeline */
2129 ret = set_nonlive_gfx_format(dev, mode);
2130 if (ret)
2131 return ret;
2132
2133 uc_priv->bpix = ffs(priv->non_live_graphics->bpp) - 1;
2134 dev_dbg(dev, "BPP in bits %d, bpix %d\n",
2135 priv->non_live_graphics->bpp, uc_priv->bpix);
2136
2137 uc_priv->fb = (void *)gd->fb_base;
2138 uc_priv->xsize = vidc_video_timing_modes[priv->video_mode].video_timing.h_active;
2139 uc_priv->ysize = vidc_video_timing_modes[priv->video_mode].video_timing.v_active;
2140 /* Calculated by core but need it for my own setup */
2141 uc_priv->line_length = uc_priv->xsize * VNBYTES(uc_priv->bpix);
2142 /* Will be calculated again in video_post_probe() but I need that value now */
2143 uc_priv->fb_size = uc_priv->line_length * uc_priv->ysize;
2144
2145 switch (mode) {
2146 case RGBA8888:
2147 uc_priv->format = VIDEO_RGBA8888;
2148 break;
2149 default:
2150 debug("Unsupported mode\n");
2151 return -EINVAL;
2152 }
2153
2154 video_set_flush_dcache(dev, true);
2155 debug("Video: WIDTH[%d]xHEIGHT[%d]xBPP[%d/%d] -- line length %d\n", uc_priv->xsize,
2156 uc_priv->ysize, uc_priv->bpix, VNBYTES(uc_priv->bpix), uc_priv->line_length);
2157
2158 enable_gfx_buffers(dev, 1);
2159 avbuf_video_select(dev, AVBUF_VIDSTREAM1_NONE, AVBUF_VIDSTREAM2_NONLIVE_GFX);
2160 config_gfx_pipeline(dev);
2161 config_output_video(dev);
2162
2163 ret = zynqmp_dpsub_init(dev);
2164 if (ret)
2165 return ret;
2166
2167 /* Populate the FrameBuffer structure with the frame attributes */
2168 priv->frame_buffer.stride = uc_priv->line_length;
2169 priv->frame_buffer.line_size = priv->frame_buffer.stride;
2170 priv->frame_buffer.size = priv->frame_buffer.line_size * uc_priv->ysize;
2171
2172 ret = dp_tx_run(dev);
2173 if (ret)
2174 return ret;
2175
2176 return dpdma_setup(dev);
Michal Simekab2829a2022-02-23 15:52:02 +01002177}
2178
2179static int zynqmp_dpsub_bind(struct udevice *dev)
2180{
2181 struct video_uc_plat *plat = dev_get_uclass_plat(dev);
2182
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02002183 /* This is maximum size to allocate - it depends on BPP setting */
2184 plat->size = WIDTH * HEIGHT * 4;
2185 /* plat->align is not defined that's why 1MB alignment is used */
2186
2187 /*
2188 * plat->base can be used for allocating own location for FB
2189 * if not defined then it is allocated by u-boot itself
2190 */
Michal Simekab2829a2022-02-23 15:52:02 +01002191
2192 return 0;
2193}
2194
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02002195static int zynqmp_dpsub_of_to_plat(struct udevice *dev)
2196{
2197 struct zynqmp_dpsub_priv *priv = dev_get_priv(dev);
2198 struct resource res;
2199 int ret;
2200
2201 ret = dev_read_resource_byname(dev, "dp", &res);
2202 if (ret)
2203 return ret;
2204
2205 priv->base_addr = res.start;
2206
2207 return 0;
2208}
Michal Simekab2829a2022-02-23 15:52:02 +01002209
2210static const struct udevice_id zynqmp_dpsub_ids[] = {
2211 { .compatible = "xlnx,zynqmp-dpsub-1.7" },
2212 { }
2213};
2214
2215U_BOOT_DRIVER(zynqmp_dpsub_video) = {
2216 .name = "zynqmp_dpsub_video",
2217 .id = UCLASS_VIDEO,
2218 .of_match = zynqmp_dpsub_ids,
Michal Simekab2829a2022-02-23 15:52:02 +01002219 .plat_auto = sizeof(struct video_uc_plat),
2220 .bind = zynqmp_dpsub_bind,
2221 .probe = zynqmp_dpsub_probe,
2222 .priv_auto = sizeof(struct zynqmp_dpsub_priv),
Venkatesh Yadav Abbarapued3e0042023-05-17 10:42:10 +02002223 .of_to_plat = zynqmp_dpsub_of_to_plat,
Michal Simekab2829a2022-02-23 15:52:02 +01002224};