blob: f8fee2623d046814f8168a5abbb10c50fac4ea88 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Stefan Roeseae6223d2015-01-19 11:33:40 +01002/*
3 * Copyright (C) Marvell International Ltd. and its affiliates
Stefan Roeseae6223d2015-01-19 11:33:40 +01004 */
5
Stefan Roeseae6223d2015-01-19 11:33:40 +01006#include <i2c.h>
7#include <spl.h>
8#include <asm/io.h>
9#include <asm/arch/cpu.h>
10#include <asm/arch/soc.h>
11
12#include "ddr3_hw_training.h"
13#include "xor.h"
14#include "xor_regs.h"
15
16static void ddr3_flush_l1_line(u32 line);
17
18extern u32 pbs_pattern[2][LEN_16BIT_PBS_PATTERN];
19extern u32 pbs_pattern_32b[2][LEN_PBS_PATTERN];
20#if defined(MV88F78X60)
21extern u32 pbs_pattern_64b[2][LEN_PBS_PATTERN];
22#endif
Marek BehĂșnc61e5e42021-03-07 00:00:34 +010023extern u32 pbs_dq_mapping[PUP_NUM_64BIT + 1][DQ_NUM];
Stefan Roeseae6223d2015-01-19 11:33:40 +010024
25#if defined(MV88F78X60) || defined(MV88F672X)
26/* PBS locked dq (per pup) */
27u32 pbs_locked_dq[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
28u32 pbs_locked_dm[MAX_PUP_NUM] = { 0 };
29u32 pbs_locked_value[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
30
31int per_bit_data[MAX_PUP_NUM][DQ_NUM];
32#endif
33
34static u32 sdram_data[LEN_KILLER_PATTERN] __aligned(32) = { 0 };
35
36static struct crc_dma_desc dma_desc __aligned(32) = { 0 };
37
38#define XOR_TIMEOUT 0x8000000
39
40struct xor_channel_t {
41 struct crc_dma_desc *desc;
42 unsigned long desc_phys_addr;
43};
44
45#define XOR_CAUSE_DONE_MASK(chan) ((0x1 | 0x2) << (chan * 16))
46
47void xor_waiton_eng(int chan)
48{
49 int timeout;
50
51 timeout = 0;
52 while (!(reg_read(XOR_CAUSE_REG(XOR_UNIT(chan))) &
53 XOR_CAUSE_DONE_MASK(XOR_CHAN(chan)))) {
54 if (timeout > XOR_TIMEOUT)
55 goto timeout;
56
57 timeout++;
58 }
59
60 timeout = 0;
61 while (mv_xor_state_get(chan) != MV_IDLE) {
62 if (timeout > XOR_TIMEOUT)
63 goto timeout;
64
65 timeout++;
66 }
67
68 /* Clear int */
69 reg_write(XOR_CAUSE_REG(XOR_UNIT(chan)),
70 ~(XOR_CAUSE_DONE_MASK(XOR_CHAN(chan))));
71
72timeout:
73 return;
74}
75
76static int special_compare_pattern(u32 uj)
77{
78 if ((uj == 30) || (uj == 31) || (uj == 61) || (uj == 62) ||
79 (uj == 93) || (uj == 94) || (uj == 126) || (uj == 127))
80 return 1;
81
82 return 0;
83}
84
85/*
86 * Compare code extracted as its used by multiple functions. This
87 * reduces code-size and makes it easier to maintain it. Additionally
88 * the code is not indented that much and therefore easier to read.
89 */
90static void compare_pattern_v1(u32 uj, u32 *pup, u32 *pattern,
91 u32 pup_groups, int debug_dqs)
92{
93 u32 val;
94 u32 uk;
95 u32 var1;
96 u32 var2;
97 __maybe_unused u32 dq;
98
99 if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0xFF)) {
100 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
101 val = CMP_BYTE_SHIFT * uk;
102 var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
103 var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
104
105 if (var1 != var2) {
106 *pup |= (1 << (uk + (PUP_NUM_32BIT *
107 (uj % pup_groups))));
108
109#ifdef MV_DEBUG_DQS
110 if (!debug_dqs)
111 continue;
112
113 for (dq = 0; dq < DQ_NUM; dq++) {
114 val = uk + (PUP_NUM_32BIT *
115 (uj % pup_groups));
116 if (((var1 >> dq) & 0x1) !=
117 ((var2 >> dq) & 0x1))
118 per_bit_data[val][dq] = 1;
119 else
120 per_bit_data[val][dq] = 0;
121 }
122#endif
123 }
124 }
125 }
126}
127
128static void compare_pattern_v2(u32 uj, u32 *pup, u32 *pattern)
129{
130 u32 val;
131 u32 uk;
132 u32 var1;
133 u32 var2;
134
135 if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0x3)) {
136 /* Found error */
137 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
138 val = CMP_BYTE_SHIFT * uk;
139 var1 = (sdram_data[uj] >> val) & CMP_BYTE_MASK;
140 var2 = (pattern[uj] >> val) & CMP_BYTE_MASK;
141 if (var1 != var2)
142 *pup |= (1 << (uk % PUP_NUM_16BIT));
143 }
144 }
145}
146
147/*
148 * Name: ddr3_sdram_compare
149 * Desc: Execute compare per PUP
150 * Args: unlock_pup Bit array of the unlock pups
151 * new_locked_pup Output bit array of the pups with failed compare
152 * pattern Pattern to compare
153 * pattern_len Length of pattern (in bytes)
154 * sdram_offset offset address to the SDRAM
155 * write write to the SDRAM before read
156 * mask compare pattern with mask;
157 * mask_pattern Mask to compare pattern
158 *
159 * Notes:
160 * Returns: MV_OK if success, other error code if fail.
161 */
162int ddr3_sdram_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
163 u32 *new_locked_pup, u32 *pattern,
164 u32 pattern_len, u32 sdram_offset, int write,
165 int mask, u32 *mask_pattern,
166 int special_compare)
167{
168 u32 uj;
169 __maybe_unused u32 pup_groups;
170 __maybe_unused u32 dq;
171
172#if !defined(MV88F67XX)
173 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
174 pup_groups = 2;
175 else
176 pup_groups = 1;
177#endif
178
179 ddr3_reset_phy_read_fifo();
180
181 /* Check if need to write to sdram before read */
182 if (write == 1)
183 ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
184
185 ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
186
187 /* Compare read result to write */
188 for (uj = 0; uj < pattern_len; uj++) {
189 if (special_compare && special_compare_pattern(uj))
190 continue;
191
192#if defined(MV88F78X60) || defined(MV88F672X)
193 compare_pattern_v1(uj, new_locked_pup, pattern, pup_groups, 1);
194#elif defined(MV88F67XX)
195 compare_pattern_v2(uj, new_locked_pup, pattern);
196#endif
197 }
198
199 return MV_OK;
200}
201
202#if defined(MV88F78X60) || defined(MV88F672X)
203/*
204 * Name: ddr3_sdram_dm_compare
205 * Desc: Execute compare per PUP
206 * Args: unlock_pup Bit array of the unlock pups
207 * new_locked_pup Output bit array of the pups with failed compare
208 * pattern Pattern to compare
209 * pattern_len Length of pattern (in bytes)
210 * sdram_offset offset address to the SDRAM
211 * write write to the SDRAM before read
212 * mask compare pattern with mask;
213 * mask_pattern Mask to compare pattern
214 *
215 * Notes:
216 * Returns: MV_OK if success, other error code if fail.
217 */
218int ddr3_sdram_dm_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
219 u32 *new_locked_pup, u32 *pattern,
220 u32 sdram_offset)
221{
222 u32 uj, uk, var1, var2, pup_groups;
223 u32 val;
224 u32 pup = 0;
225
226 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
227 pup_groups = 2;
228 else
229 pup_groups = 1;
230
231 ddr3_dram_sram_burst((u32)pattern, SDRAM_PBS_TX_OFFS,
232 LEN_PBS_PATTERN);
233 ddr3_dram_sram_burst(SDRAM_PBS_TX_OFFS, (u32)sdram_data,
234 LEN_PBS_PATTERN);
235
236 /* Validate the correctness of the results */
237 for (uj = 0; uj < LEN_PBS_PATTERN; uj++)
238 compare_pattern_v1(uj, &pup, pattern, pup_groups, 0);
239
240 /* Test the DM Signals */
241 *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10) = 0x12345678;
242 *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14) = 0x12345678;
243
244 sdram_data[0] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10);
245 sdram_data[1] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14);
246
247 for (uj = 0; uj < 2; uj++) {
248 if (((sdram_data[uj]) != (pattern[uj])) &&
249 (*new_locked_pup != 0xFF)) {
250 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
251 val = CMP_BYTE_SHIFT * uk;
252 var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
253 var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
254 if (var1 != var2) {
255 *new_locked_pup |= (1 << (uk +
256 (PUP_NUM_32BIT * (uj % pup_groups))));
257 *new_locked_pup |= pup;
258 }
259 }
260 }
261 }
262
263 return MV_OK;
264}
265
266/*
267 * Name: ddr3_sdram_pbs_compare
268 * Desc: Execute SRAM compare per PUP and DQ.
269 * Args: pup_locked bit array of locked pups
270 * is_tx Indicate whether Rx or Tx
271 * pbs_pattern_idx Index of PBS pattern
272 * pbs_curr_val The PBS value
273 * pbs_lock_val The value to set to locked PBS
274 * skew_array Global array to update with the compare results
275 * ai_unlock_pup_dq_array bit array of the locked / unlocked pups per dq.
276 * Notes:
277 * Returns: MV_OK if success, other error code if fail.
278 */
279int ddr3_sdram_pbs_compare(MV_DRAM_INFO *dram_info, u32 pup_locked,
280 int is_tx, u32 pbs_pattern_idx,
281 u32 pbs_curr_val, u32 pbs_lock_val,
282 u32 *skew_array, u8 *unlock_pup_dq_array,
283 u32 ecc)
284{
285 /* bit array failed dq per pup for current compare */
286 u32 pbs_write_pup[DQ_NUM] = { 0 };
287 u32 update_pup; /* pup as HW convention */
288 u32 max_pup; /* maximal pup index */
289 u32 pup_addr;
290 u32 ui, dq, pup;
291 int var1, var2;
292 u32 sdram_offset, pup_groups, tmp_pup;
293 u32 *pattern_ptr;
294 u32 val;
295
296 /* Choose pattern */
297 switch (dram_info->ddr_width) {
298#if defined(MV88F672X)
299 case 16:
300 pattern_ptr = (u32 *)&pbs_pattern[pbs_pattern_idx];
301 break;
302#endif
303 case 32:
304 pattern_ptr = (u32 *)&pbs_pattern_32b[pbs_pattern_idx];
305 break;
306#if defined(MV88F78X60)
307 case 64:
308 pattern_ptr = (u32 *)&pbs_pattern_64b[pbs_pattern_idx];
309 break;
310#endif
311 default:
312 return MV_FAIL;
313 }
314
315 max_pup = dram_info->num_of_std_pups;
316
317 sdram_offset = SDRAM_PBS_I_OFFS + pbs_pattern_idx * SDRAM_PBS_NEXT_OFFS;
318
319 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
320 pup_groups = 2;
321 else
322 pup_groups = 1;
323
324 ddr3_reset_phy_read_fifo();
325
326 /* Check if need to write to sdram before read */
327 if (is_tx == 1) {
328 ddr3_dram_sram_burst((u32)pattern_ptr, sdram_offset,
329 LEN_PBS_PATTERN);
330 }
331
332 ddr3_dram_sram_read(sdram_offset, (u32)sdram_data, LEN_PBS_PATTERN);
333
334 /* Compare read result to write */
335 for (ui = 0; ui < LEN_PBS_PATTERN; ui++) {
336 if ((sdram_data[ui]) != (pattern_ptr[ui])) {
337 /* found error */
338 /* error in low pup group */
339 for (pup = 0; pup < PUP_NUM_32BIT; pup++) {
340 val = CMP_BYTE_SHIFT * pup;
341 var1 = ((sdram_data[ui] >> val) &
342 CMP_BYTE_MASK);
343 var2 = ((pattern_ptr[ui] >> val) &
344 CMP_BYTE_MASK);
345
346 if (var1 != var2) {
347 if (dram_info->ddr_width > 16) {
348 tmp_pup = (pup + PUP_NUM_32BIT *
349 (ui % pup_groups));
350 } else {
351 tmp_pup = (pup % PUP_NUM_16BIT);
352 }
353
354 update_pup = (1 << tmp_pup);
355 if (ecc && (update_pup != 0x1))
356 continue;
357
358 /*
359 * Pup is failed - Go over all DQs and
360 * look for failures
361 */
362 for (dq = 0; dq < DQ_NUM; dq++) {
363 val = tmp_pup * (1 - ecc) +
364 ecc * ECC_PUP;
365 if (((var1 >> dq) & 0x1) !=
366 ((var2 >> dq) & 0x1)) {
367 if (pbs_locked_dq[val][dq] == 1 &&
368 pbs_locked_value[val][dq] != pbs_curr_val)
369 continue;
370
371 /*
372 * Activate write to
373 * update PBS to
374 * pbs_lock_val
375 */
376 pbs_write_pup[dq] |=
377 update_pup;
378
379 /*
380 * Update the
381 * unlock_pup_dq_array
382 */
383 unlock_pup_dq_array[dq] &=
384 ~update_pup;
385
386 /*
387 * Lock PBS value for
388 * failed bits in
389 * compare operation
390 */
391 skew_array[tmp_pup * DQ_NUM + dq] =
392 pbs_curr_val;
393 }
394 }
395 }
396 }
397 }
398 }
399
400 pup_addr = (is_tx == 1) ? PUP_PBS_TX : PUP_PBS_RX;
401
402 /* Set last failed bits PBS to min / max pbs value */
403 for (dq = 0; dq < DQ_NUM; dq++) {
404 for (pup = 0; pup < max_pup; pup++) {
405 if (pbs_write_pup[dq] & (1 << pup)) {
406 val = pup * (1 - ecc) + ecc * ECC_PUP;
407 if (pbs_locked_dq[val][dq] == 1 &&
408 pbs_locked_value[val][dq] != pbs_curr_val)
409 continue;
410
411 /* Mark the dq as locked */
412 pbs_locked_dq[val][dq] = 1;
413 pbs_locked_value[val][dq] = pbs_curr_val;
414 ddr3_write_pup_reg(pup_addr +
415 pbs_dq_mapping[val][dq],
416 CS0, val, 0, pbs_lock_val);
417 }
418 }
419 }
420
421 return MV_OK;
422}
423#endif
424
425/*
426 * Name: ddr3_sdram_direct_compare
427 * Desc: Execute compare per PUP without DMA (no burst mode)
428 * Args: unlock_pup Bit array of the unlock pups
429 * new_locked_pup Output bit array of the pups with failed compare
430 * pattern Pattern to compare
431 * pattern_len Length of pattern (in bytes)
432 * sdram_offset offset address to the SDRAM
433 * write write to the SDRAM before read
434 * mask compare pattern with mask;
435 * auiMaskPatter Mask to compare pattern
436 *
437 * Notes:
438 * Returns: MV_OK if success, other error code if fail.
439 */
440int ddr3_sdram_direct_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
441 u32 *new_locked_pup, u32 *pattern,
442 u32 pattern_len, u32 sdram_offset,
443 int write, int mask, u32 *mask_pattern)
444{
445 u32 uj, uk, pup_groups;
446 u32 *sdram_addr; /* used to read from SDRAM */
447
448 sdram_addr = (u32 *)sdram_offset;
449
450 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
451 pup_groups = 2;
452 else
453 pup_groups = 1;
454
455 /* Check if need to write before read */
456 if (write == 1) {
457 for (uk = 0; uk < pattern_len; uk++) {
458 *sdram_addr = pattern[uk];
459 sdram_addr++;
460 }
461 }
462
463 sdram_addr = (u32 *)sdram_offset;
464
465 for (uk = 0; uk < pattern_len; uk++) {
466 sdram_data[uk] = *sdram_addr;
467 sdram_addr++;
468 }
469
470 /* Compare read result to write */
471 for (uj = 0; uj < pattern_len; uj++) {
472 if (dram_info->ddr_width > 16) {
473 compare_pattern_v1(uj, new_locked_pup, pattern,
474 pup_groups, 0);
475 } else {
476 compare_pattern_v2(uj, new_locked_pup, pattern);
477 }
478 }
479
480 return MV_OK;
481}
482
483/*
484 * Name: ddr3_dram_sram_burst
485 * Desc: Read from the SDRAM in burst of 64 bytes
486 * Args: src
487 * dst
488 * Notes: Using the XOR mechanism
489 * Returns: MV_OK if success, other error code if fail.
490 */
491int ddr3_dram_sram_burst(u32 src, u32 dst, u32 len)
492{
493 u32 chan, byte_count, cs_num, byte;
494 struct xor_channel_t channel;
495
496 chan = 0;
497 byte_count = len * 4;
498
499 /* Wait for previous transfer completion */
500 while (mv_xor_state_get(chan) != MV_IDLE)
501 ;
502
503 /* Build the channel descriptor */
504 channel.desc = &dma_desc;
505
506 /* Enable Address Override and set correct src and dst */
507 if (src < SRAM_BASE) {
508 /* src is DRAM CS, dst is SRAM */
509 cs_num = (src / (1 + SDRAM_CS_SIZE));
510 reg_write(XOR_ADDR_OVRD_REG(0, 0),
511 ((cs_num << 1) | (1 << 0)));
512 channel.desc->src_addr0 = (src % (1 + SDRAM_CS_SIZE));
513 channel.desc->dst_addr = dst;
514 } else {
515 /* src is SRAM, dst is DRAM CS */
516 cs_num = (dst / (1 + SDRAM_CS_SIZE));
517 reg_write(XOR_ADDR_OVRD_REG(0, 0),
518 ((cs_num << 25) | (1 << 24)));
519 channel.desc->src_addr0 = (src);
520 channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
521 channel.desc->src_addr0 = src;
522 channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
523 }
524
525 channel.desc->src_addr1 = 0;
526 channel.desc->byte_cnt = byte_count;
527 channel.desc->next_desc_ptr = 0;
528 channel.desc->status = 1 << 31;
529 channel.desc->desc_cmd = 0x0;
530 channel.desc_phys_addr = (unsigned long)&dma_desc;
531
532 ddr3_flush_l1_line((u32)&dma_desc);
533
534 /* Issue the transfer */
535 if (mv_xor_transfer(chan, MV_DMA, channel.desc_phys_addr) != MV_OK)
536 return MV_FAIL;
537
538 /* Wait for completion */
539 xor_waiton_eng(chan);
540
541 if (dst > SRAM_BASE) {
542 for (byte = 0; byte < byte_count; byte += 0x20)
543 cache_inv(dst + byte);
544 }
545
546 return MV_OK;
547}
548
549/*
550 * Name: ddr3_flush_l1_line
551 * Desc:
552 * Args:
553 * Notes:
554 * Returns: MV_OK if success, other error code if fail.
555 */
556static void ddr3_flush_l1_line(u32 line)
557{
558 u32 reg;
559
560#if defined(MV88F672X)
561 reg = 1;
562#else
563 reg = reg_read(REG_SAMPLE_RESET_LOW_ADDR) &
564 (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
565#ifdef MV88F67XX
566 reg = ~reg & (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
567#endif
568#endif
569
570 if (reg) {
571 /* V7 Arch mode */
572 flush_l1_v7(line);
573 flush_l1_v7(line + CACHE_LINE_SIZE);
574 } else {
575 /* V6 Arch mode */
576 flush_l1_v6(line);
577 flush_l1_v6(line + CACHE_LINE_SIZE);
578 }
579}
580
581int ddr3_dram_sram_read(u32 src, u32 dst, u32 len)
582{
583 u32 ui;
584 u32 *dst_ptr, *src_ptr;
585
586 dst_ptr = (u32 *)dst;
587 src_ptr = (u32 *)src;
588
589 for (ui = 0; ui < len; ui++) {
590 *dst_ptr = *src_ptr;
591 dst_ptr++;
592 src_ptr++;
593 }
594
595 return MV_OK;
596}
597
598int ddr3_sdram_dqs_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
599 u32 *new_locked_pup, u32 *pattern,
600 u32 pattern_len, u32 sdram_offset, int write,
601 int mask, u32 *mask_pattern,
602 int special_compare)
603{
604 u32 uj, pup_groups;
605
606 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
607 pup_groups = 2;
608 else
609 pup_groups = 1;
610
611 ddr3_reset_phy_read_fifo();
612
613 /* Check if need to write to sdram before read */
614 if (write == 1)
615 ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
616
617 ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
618
619 /* Compare read result to write */
620 for (uj = 0; uj < pattern_len; uj++) {
621 if (special_compare && special_compare_pattern(uj))
622 continue;
623
624 if (dram_info->ddr_width > 16) {
625 compare_pattern_v1(uj, new_locked_pup, pattern,
626 pup_groups, 1);
627 } else {
628 compare_pattern_v2(uj, new_locked_pup, pattern);
629 }
630 }
631
632 return MV_OK;
633}
634
635void ddr3_reset_phy_read_fifo(void)
636{
637 u32 reg;
638
639 /* reset read FIFO */
640 reg = reg_read(REG_DRAM_TRAINING_ADDR);
641 /* Start Auto Read Leveling procedure */
642 reg |= (1 << REG_DRAM_TRAINING_RL_OFFS);
643
644 /* 0x15B0 - Training Register */
645 reg_write(REG_DRAM_TRAINING_ADDR, reg);
646
647 reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
648 reg |= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS) +
649 (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS));
650
651 /* [0] = 1 - Enable SW override, [4] = 1 - FIFO reset */
652 /* 0x15B8 - Training SW 2 Register */
653 reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
654
655 do {
656 reg = reg_read(REG_DRAM_TRAINING_2_ADDR) &
657 (1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS);
658 } while (reg); /* Wait for '0' */
659
660 reg = reg_read(REG_DRAM_TRAINING_ADDR);
661
662 /* Clear Auto Read Leveling procedure */
663 reg &= ~(1 << REG_DRAM_TRAINING_RL_OFFS);
664
665 /* 0x15B0 - Training Register */
666 reg_write(REG_DRAM_TRAINING_ADDR, reg);
667}