Tom Rini | 10e4779 | 2018-05-06 17:58:06 -0400 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) Marvell International Ltd. and its affiliates |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 4 | */ |
| 5 | |
| 6 | #ifndef _DDR3_TRAINING_IP_H_ |
| 7 | #define _DDR3_TRAINING_IP_H_ |
| 8 | |
| 9 | #include "ddr3_training_ip_def.h" |
| 10 | #include "ddr_topology_def.h" |
| 11 | #include "ddr_training_ip_db.h" |
| 12 | |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 13 | #define MAX_CS_NUM 4 |
| 14 | #define MAX_TOTAL_BUS_NUM (MAX_INTERFACE_NUM * MAX_BUS_NUM) |
Chris Packham | 1a07d21 | 2018-05-10 13:28:29 +1200 | [diff] [blame^] | 15 | #define TIP_ENG_LOCK 0x02000000 |
| 16 | #define TIP_TX_DLL_RANGE_MAX 64 |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 17 | |
| 18 | #define GET_MIN(arg1, arg2) ((arg1) < (arg2)) ? (arg1) : (arg2) |
| 19 | #define GET_MAX(arg1, arg2) ((arg1) < (arg2)) ? (arg2) : (arg1) |
| 20 | |
| 21 | #define INIT_CONTROLLER_MASK_BIT 0x00000001 |
| 22 | #define STATIC_LEVELING_MASK_BIT 0x00000002 |
| 23 | #define SET_LOW_FREQ_MASK_BIT 0x00000004 |
| 24 | #define LOAD_PATTERN_MASK_BIT 0x00000008 |
| 25 | #define SET_MEDIUM_FREQ_MASK_BIT 0x00000010 |
| 26 | #define WRITE_LEVELING_MASK_BIT 0x00000020 |
| 27 | #define LOAD_PATTERN_2_MASK_BIT 0x00000040 |
| 28 | #define READ_LEVELING_MASK_BIT 0x00000080 |
| 29 | #define SW_READ_LEVELING_MASK_BIT 0x00000100 |
| 30 | #define WRITE_LEVELING_SUPP_MASK_BIT 0x00000200 |
| 31 | #define PBS_RX_MASK_BIT 0x00000400 |
| 32 | #define PBS_TX_MASK_BIT 0x00000800 |
| 33 | #define SET_TARGET_FREQ_MASK_BIT 0x00001000 |
| 34 | #define ADJUST_DQS_MASK_BIT 0x00002000 |
| 35 | #define WRITE_LEVELING_TF_MASK_BIT 0x00004000 |
| 36 | #define LOAD_PATTERN_HIGH_MASK_BIT 0x00008000 |
| 37 | #define READ_LEVELING_TF_MASK_BIT 0x00010000 |
| 38 | #define WRITE_LEVELING_SUPP_TF_MASK_BIT 0x00020000 |
| 39 | #define DM_PBS_TX_MASK_BIT 0x00040000 |
Chris Packham | 1a07d21 | 2018-05-10 13:28:29 +1200 | [diff] [blame^] | 40 | #define RL_DQS_BURST_MASK_BIT 0x00080000 |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 41 | #define CENTRALIZATION_RX_MASK_BIT 0x00100000 |
| 42 | #define CENTRALIZATION_TX_MASK_BIT 0x00200000 |
| 43 | #define TX_EMPHASIS_MASK_BIT 0x00400000 |
| 44 | #define PER_BIT_READ_LEVELING_TF_MASK_BIT 0x00800000 |
| 45 | #define VREF_CALIBRATION_MASK_BIT 0x01000000 |
Chris Packham | 1a07d21 | 2018-05-10 13:28:29 +1200 | [diff] [blame^] | 46 | #define WRITE_LEVELING_LF_MASK_BIT 0x02000000 |
| 47 | |
| 48 | /* DDR4 Specific Training Mask bits */ |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 49 | |
| 50 | enum hws_result { |
| 51 | TEST_FAILED = 0, |
| 52 | TEST_SUCCESS = 1, |
| 53 | NO_TEST_DONE = 2 |
| 54 | }; |
| 55 | |
| 56 | enum hws_training_result { |
| 57 | RESULT_PER_BIT, |
| 58 | RESULT_PER_BYTE |
| 59 | }; |
| 60 | |
| 61 | enum auto_tune_stage { |
| 62 | INIT_CONTROLLER, |
| 63 | STATIC_LEVELING, |
| 64 | SET_LOW_FREQ, |
| 65 | LOAD_PATTERN, |
| 66 | SET_MEDIUM_FREQ, |
| 67 | WRITE_LEVELING, |
| 68 | LOAD_PATTERN_2, |
| 69 | READ_LEVELING, |
| 70 | WRITE_LEVELING_SUPP, |
| 71 | PBS_RX, |
| 72 | PBS_TX, |
| 73 | SET_TARGET_FREQ, |
| 74 | ADJUST_DQS, |
| 75 | WRITE_LEVELING_TF, |
| 76 | READ_LEVELING_TF, |
| 77 | WRITE_LEVELING_SUPP_TF, |
| 78 | DM_PBS_TX, |
| 79 | VREF_CALIBRATION, |
| 80 | CENTRALIZATION_RX, |
| 81 | CENTRALIZATION_TX, |
| 82 | TX_EMPHASIS, |
| 83 | LOAD_PATTERN_HIGH, |
| 84 | PER_BIT_READ_LEVELING_TF, |
Chris Packham | 1a07d21 | 2018-05-10 13:28:29 +1200 | [diff] [blame^] | 85 | WRITE_LEVELING_LF, |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 86 | MAX_STAGE_LIMIT |
| 87 | }; |
| 88 | |
| 89 | enum hws_access_type { |
| 90 | ACCESS_TYPE_UNICAST = 0, |
| 91 | ACCESS_TYPE_MULTICAST = 1 |
| 92 | }; |
| 93 | |
| 94 | enum hws_algo_type { |
| 95 | ALGO_TYPE_DYNAMIC, |
| 96 | ALGO_TYPE_STATIC |
| 97 | }; |
| 98 | |
| 99 | struct init_cntr_param { |
| 100 | int is_ctrl64_bit; |
| 101 | int do_mrs_phy; |
| 102 | int init_phy; |
| 103 | int msys_init; |
| 104 | }; |
| 105 | |
| 106 | struct pattern_info { |
| 107 | u8 num_of_phases_tx; |
| 108 | u8 tx_burst_size; |
| 109 | u8 delay_between_bursts; |
| 110 | u8 num_of_phases_rx; |
| 111 | u32 start_addr; |
| 112 | u8 pattern_len; |
| 113 | }; |
| 114 | |
| 115 | /* CL value for each frequency */ |
| 116 | struct cl_val_per_freq { |
Chris Packham | 1a07d21 | 2018-05-10 13:28:29 +1200 | [diff] [blame^] | 117 | u8 cl_val[DDR_FREQ_LAST]; |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 118 | }; |
| 119 | |
| 120 | struct cs_element { |
| 121 | u8 cs_num; |
| 122 | u8 num_of_cs; |
| 123 | }; |
| 124 | |
| 125 | struct mode_info { |
| 126 | /* 32 bits representing MRS bits */ |
| 127 | u32 reg_mr0[MAX_INTERFACE_NUM]; |
| 128 | u32 reg_mr1[MAX_INTERFACE_NUM]; |
| 129 | u32 reg_mr2[MAX_INTERFACE_NUM]; |
| 130 | u32 reg_m_r3[MAX_INTERFACE_NUM]; |
| 131 | /* |
| 132 | * Each element in array represent read_data_sample register delay for |
| 133 | * a specific interface. |
| 134 | * Each register, 4 bits[0+CS*8 to 4+CS*8] represent Number of DDR |
| 135 | * cycles from read command until data is ready to be fetched from |
| 136 | * the PHY, when accessing CS. |
| 137 | */ |
| 138 | u32 read_data_sample[MAX_INTERFACE_NUM]; |
| 139 | /* |
| 140 | * Each element in array represent read_data_sample register delay for |
| 141 | * a specific interface. |
| 142 | * Each register, 4 bits[0+CS*8 to 4+CS*8] represent the total delay |
| 143 | * from read command until opening the read mask, when accessing CS. |
| 144 | * This field defines the delay in DDR cycles granularity. |
| 145 | */ |
| 146 | u32 read_data_ready[MAX_INTERFACE_NUM]; |
| 147 | }; |
| 148 | |
| 149 | struct hws_tip_freq_config_info { |
| 150 | u8 is_supported; |
| 151 | u8 bw_per_freq; |
| 152 | u8 rate_per_freq; |
| 153 | }; |
| 154 | |
| 155 | struct hws_cs_config_info { |
| 156 | u32 cs_reg_value; |
| 157 | u32 cs_cbe_value; |
| 158 | }; |
| 159 | |
| 160 | struct dfx_access { |
| 161 | u8 pipe; |
| 162 | u8 client; |
| 163 | }; |
| 164 | |
| 165 | struct hws_xsb_info { |
| 166 | struct dfx_access *dfx_table; |
| 167 | }; |
| 168 | |
| 169 | int ddr3_tip_register_dq_table(u32 dev_num, u32 *table); |
| 170 | int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable); |
| 171 | int hws_ddr3_tip_init_controller(u32 dev_num, |
| 172 | struct init_cntr_param *init_cntr_prm); |
| 173 | int hws_ddr3_tip_load_topology_map(u32 dev_num, |
Chris Packham | 1a07d21 | 2018-05-10 13:28:29 +1200 | [diff] [blame^] | 174 | struct mv_ddr_topology_map *topology); |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 175 | int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type); |
| 176 | int hws_ddr3_tip_mode_read(u32 dev_num, struct mode_info *mode_info); |
Chris Packham | 1a07d21 | 2018-05-10 13:28:29 +1200 | [diff] [blame^] | 177 | int hws_ddr3_tip_read_training_result(u32 dev_num, |
| 178 | enum hws_result result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM]); |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 179 | int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode); |
| 180 | u8 ddr3_tip_get_buf_min(u8 *buf_ptr); |
| 181 | u8 ddr3_tip_get_buf_max(u8 *buf_ptr); |
Chris Packham | 1a07d21 | 2018-05-10 13:28:29 +1200 | [diff] [blame^] | 182 | uint64_t mv_ddr_get_memory_size_per_cs_in_bits(void); |
| 183 | uint64_t mv_ddr_get_total_memory_size_in_bits(void); |
Stefan Roese | 5ffceb8 | 2015-03-26 15:36:56 +0100 | [diff] [blame] | 184 | #endif /* _DDR3_TRAINING_IP_H_ */ |