1 /* SPDX-License-Identifier: BSD-3-Clause */
2
3 //-----------------------------------------------------------------------------
4 // Include files
5 //-----------------------------------------------------------------------------
6 #include "dramc_top.h"
7 #include "dramc_common.h"
8 #include "dramc_int_global.h" //for gu1BroadcastIsLP4
9 #include "dramc_dv_init.h"
10 #include "dramc_actiming.h"
11 #include "x_hal_io.h"
12 #if __ETT__
13 #include <barriers.h>
14 #endif
15 #include "emi.h"
16
17 //-----------------------------------------------------------------------------
18 // Global variables
19 //-----------------------------------------------------------------------------
20
21 #if (fcFOR_CHIP_ID == fcA60868)
22 U8 u1EnterRuntime;
23 #endif
24
u1IsLP4Family(DRAM_DRAM_TYPE_T dram_type)25 U8 u1IsLP4Family(DRAM_DRAM_TYPE_T dram_type)
26 {
27 if (dram_type == TYPE_LPDDR5)
28 return FALSE;
29 else
30 return TRUE;
31 }
32
is_lp5_family(DRAMC_CTX_T * p)33 u8 is_lp5_family(DRAMC_CTX_T *p)
34 {
35 return p->dram_type == TYPE_LPDDR5? TRUE: FALSE;
36 }
37
is_heff_mode(DRAMC_CTX_T * p)38 u8 is_heff_mode(DRAMC_CTX_T *p)
39 {
40 u8 res = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_COMMON0),
41 SHU_COMMON0_LP5HEFF_MODE);
42 msg5("HEFF Mode: %d\n", res);
43 return res? TRUE: FALSE;
44 }
45
46 static u8 lp5heff;
47
lp5heff_save_disable(DRAMC_CTX_T * p)48 u8 lp5heff_save_disable(DRAMC_CTX_T *p)
49 {
50 /* save it */
51 lp5heff = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_COMMON0),
52 SHU_COMMON0_LP5HEFF_MODE);
53
54 /* disable it */
55 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_COMMON0),
56 P_Fld(0, SHU_COMMON0_LP5HEFF_MODE));
57 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RKCFG),
58 0, RKCFG_CKE2RANK);
59
60
61 return lp5heff;
62 }
63
lp5heff_restore(DRAMC_CTX_T * p)64 void lp5heff_restore(DRAMC_CTX_T *p)
65 {
66 /* restore it */
67 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_COMMON0),
68 P_Fld(lp5heff, SHU_COMMON0_LP5HEFF_MODE));
69 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RKCFG),
70 lp5heff, RKCFG_CKE2RANK);
71 }
72
73 #if FOR_DV_SIMULATION_USED
74 U8 u1BroadcastOnOff = 0;
75 #endif
GetDramcBroadcast(void)76 U32 GetDramcBroadcast(void)
77 {
78 #if (fcFOR_CHIP_ID == fcA60868)
79 return 0;
80 #endif
81
82 #if (FOR_DV_SIMULATION_USED == 0)
83 return *((volatile unsigned int *)(DRAMC_WBR));
84 #else
85 return u1BroadcastOnOff;
86 #endif
87 }
88
DramcBroadcastOnOff(U32 bOnOff)89 void DramcBroadcastOnOff(U32 bOnOff)
90 {
91 #if (FOR_DV_SIMULATION_USED==0 && SW_CHANGE_FOR_SIMULATION==0)
92
93 #if (fcFOR_CHIP_ID == fcA60868)
94 return; //disable broadcast in A60868
95 #endif
96
97 #if __Petrus_TO_BE_PORTING__
98 U8 u1BroadcastStatus = 0;
99 // INFRA_RSVD3[9:8] = protect_set_clr_mask
100 u1BroadcastStatus = (*((volatile unsigned int *)(INFRA_RSVD3)) >> 8) & 0x3;
101 if (u1BroadcastStatus & 0x1) // Enable new infra access by Preloader
102 {
103 if (bOnOff == DRAMC_BROADCAST_ON)
104 *((volatile unsigned int *)(DRAMC_WBR_SET)) = DRAMC_BROADCAST_SET;
105 else
106 *((volatile unsigned int *)(DRAMC_WBR_CLR)) = DRAMC_BROADCAST_CLR;
107 }
108 else
109 #endif
110 *((volatile unsigned int *)(DRAMC_WBR)) = bOnOff;
111 dsb();
112 #endif
113
114 #if (FOR_DV_SIMULATION_USED == 1)
115 if (gu1BroadcastIsLP4 == TRUE)
116 {
117 #if (fcFOR_CHIP_ID == fcA60868)
118 bOnOff = 0;
119 #endif
120 if (bOnOff)
121 {
122 broadcast_on();
123 msg("Broadcast ON\n");
124 u1BroadcastOnOff = bOnOff;
125 }
126 else
127 {
128 broadcast_off();
129 msg("Broadcast OFF\n");
130 u1BroadcastOnOff = bOnOff;
131 }
132 }
133 #endif
134
135 #ifdef DUMP_INIT_RG_LOG_TO_DE
136 if(gDUMP_INIT_RG_LOG_TO_DE_RG_log_flag)
137 {
138 U8 u1BroadcastStatus = 0;
139 U32 addr, val;
140
141 addr = DRAMC_WBR;
142 val = bOnOff;
143 // *((volatile unsigned int *)(DRAMC_WBR)) = bOnOff;
144
145 mcSHOW_DUMP_INIT_RG_MSG("*(UINT32P)(0x%x)) = 0x%x;\n",addr, val);
146 // mcDELAY_MS(1);
147 #if (FOR_DV_SIMULATION_USED==0)
148 GPT_Delay_ms(1);
149 #endif
150 }
151 #endif
152 }
153
154
155
156 #if __ETT__
157 const U32 u4Cannot_Use_Dramc_WBR_Reg[]=
158 {
159 DDRPHY_REG_CA_DLL_ARPI5,
160 DDRPHY_REG_B0_DLL_ARPI5,
161 DDRPHY_REG_B1_DLL_ARPI5,
162
163 DDRPHY_REG_SHU_CA_DLL0,
164 DDRPHY_REG_SHU_CA_DLL1,
165
166 DDRPHY_REG_CA_LP_CTRL0,
167
168 DDRPHY_REG_MISC_DVFSCTL2,
169 DDRPHY_REG_MISC_SHU_OPT,
170
171 DDRPHY_REG_MISC_DVFSCTL,
172 DDRPHY_REG_MISC_DVFSCTL3,
173
174 DDRPHY_REG_MISC_CKMUX_SEL,
175 DRAMC_REG_DVFS_CTRL0
176 };
177 #define CANNOT_USE_WBR_SIZE ((sizeof(u4Cannot_Use_Dramc_WBR_Reg)) / (sizeof(U32)))
CheckDramcWBR(U32 u4address)178 void CheckDramcWBR(U32 u4address)
179 {
180
181 U32 i, channel_and_value;
182 if (GetDramcBroadcast()==DRAMC_BROADCAST_ON)
183 {
184 #if ((CHANNEL_NUM == 1) || (CHANNEL_NUM == 2))
185 channel_and_value = 0x1;
186 #else //for channel number = 3 or 4
187 channel_and_value = 0x3;
188 #endif
189 if ((((u4address - Channel_A_DRAMC_NAO_BASE_VIRTUAL) >> POS_BANK_NUM) & channel_and_value) != CHANNEL_A)
190 {
191 err("Error! virtual address 0x%x is not CHA and cannot use Dramc WBR\n", u4address);
192 while (1);
193 }
194 for (i = 0; i < CANNOT_USE_WBR_SIZE; i++)
195 {
196 if (u4Cannot_Use_Dramc_WBR_Reg[i] == u4address)
197 {
198 err("Error! virtual address 0x%x cannot use Dramc WBR\n", u4address);
199 while (1);
200 }
201 }
202 }
203 }
204 #endif
205
vSetPHY2ChannelMapping(DRAMC_CTX_T * p,U8 u1Channel)206 void vSetPHY2ChannelMapping(DRAMC_CTX_T *p, U8 u1Channel)
207 {
208 p->channel = (DRAM_CHANNEL_T)u1Channel;
209 }
210
vGetPHY2ChannelMapping(DRAMC_CTX_T * p)211 U8 vGetPHY2ChannelMapping(DRAMC_CTX_T *p)
212 {
213 return p->channel;
214 }
215
vSetChannelNumber(DRAMC_CTX_T * p)216 void vSetChannelNumber(DRAMC_CTX_T *p)
217 {
218 #if 1//(!FOR_DV_SIMULATION_USED)
219 p->support_channel_num = CHANNEL_NUM;
220 #else
221 p->support_channel_num = CHANNEL_SINGLE;
222 #endif
223 }
224
vSetRank(DRAMC_CTX_T * p,U8 ucRank)225 void vSetRank(DRAMC_CTX_T *p, U8 ucRank)
226 {
227 p->rank = (DRAM_RANK_T)ucRank;
228 }
229
u1GetRank(DRAMC_CTX_T * p)230 U8 u1GetRank(DRAMC_CTX_T *p)
231 {
232 return p->rank;
233 }
234
vSetRankNumber(DRAMC_CTX_T * p)235 void vSetRankNumber(DRAMC_CTX_T *p)
236 {
237 #if(FOR_DV_SIMULATION_USED==0 && SW_CHANGE_FOR_SIMULATION==0)
238 if (u4IO32ReadFldAlign(DRAMC_REG_SA_RESERVE, SA_RESERVE_SINGLE_RANK) == 1)
239 {
240 p->support_rank_num =RANK_SINGLE;
241 }
242 else
243 #endif
244 {
245 p->support_rank_num = RANK_DUAL;
246 }
247 }
248
vSetFSPNumber(DRAMC_CTX_T * p)249 void vSetFSPNumber(DRAMC_CTX_T *p)
250 {
251 #if (__LP5_COMBO__ == TRUE)
252 if (TRUE == is_lp5_family(p))
253 p->support_fsp_num = 3;
254 else
255 #endif
256 p->support_fsp_num = 2;
257 }
258
setFreqGroup(DRAMC_CTX_T * p)259 static void setFreqGroup(DRAMC_CTX_T *p)
260 {
261
262 /* Below listed conditions represent freqs that exist in ACTimingTable
263 * -> Should cover freqGroup settings for all real freq values
264 */
265 #if (__LP5_COMBO__ == TRUE)
266 if (TRUE == is_lp5_family(p))
267 {
268 if (p->frequency <= 400) // DDR800
269 {
270 p->freqGroup = 400;
271 }
272 else if (p->frequency <= 600) // DDR1200
273 {
274 p->freqGroup = 600;
275 }
276 else if (p->frequency <= 800) // DDR1600
277 {
278 p->freqGroup = 800;
279 }
280 else if (p->frequency <= 933) //DDR1866
281 {
282 p->freqGroup = 933;
283 }
284 else if (p->frequency <= 1200) //DDR2400, DDR2280
285 {
286 p->freqGroup = 1200;
287 }
288 else if (p->frequency <= 1600) // DDR3200
289 {
290 p->freqGroup = 1600;
291 }
292 else if (p->frequency <= 1866) // DDR3733
293 {
294 p->freqGroup = 1866;
295 }
296 else if (p->frequency <= 2133) // DDR4266
297 {
298 p->freqGroup = 2133;
299 }
300 else if (p->frequency <= 2400) // DDR4800
301 {
302 p->freqGroup = 2400;
303 }
304 else if (p->frequency <= 2750) // DDR5500
305 {
306 p->freqGroup = 2750;
307 }
308 else if (p->frequency <= 3000) // DDR6000
309 {
310 p->freqGroup = 3000;
311 }
312 else // DDR6600
313 {
314 p->freqGroup = 3300;
315 }
316 }
317 else
318 #endif
319 {
320 if (p->frequency <= 200) // DDR400
321 {
322 p->freqGroup = 200;
323 }
324 else if (p->frequency <= 400) // DDR800
325 {
326 p->freqGroup = 400;
327 }
328 else if (p->frequency <= 600) // DDR1200
329 {
330 p->freqGroup = 600;
331 }
332 else if (p->frequency <= 800) // DDR1600
333 {
334 p->freqGroup = 800;
335 }
336 else if (p->frequency <= 933) //DDR1866
337 {
338 p->freqGroup = 933;
339 }
340 else if (p->frequency <= 1200) //DDR2400, DDR2280
341 {
342 p->freqGroup = 1200;
343 }
344 else if (p->frequency <= 1333) // DDR2667
345 {
346 p->freqGroup = 1333;
347 }
348 else if (p->frequency <= 1600) // DDR3200
349 {
350 p->freqGroup = 1600;
351 }
352 else if (p->frequency <= 1866) // DDR3733
353 {
354 p->freqGroup = 1866;
355 }
356 else // DDR4266
357 {
358 p->freqGroup = 2133;
359 }
360 }
361
362 msg3("[setFreqGroup] p-> frequency %u, freqGroup: %u\n", p->frequency, p->freqGroup);
363 return;
364 }
365
366
367 #define CKGEN_FMETER 0x0
368 #define ABIST_FMETER 0x1
369
370 U16 gddrphyfmeter_value = 0;
DDRPhyFMeter(void)371 U16 DDRPhyFMeter(void)
372 {
373 return gddrphyfmeter_value;
374 }
375
376 #if __ETT__ || defined(SLT)
GetPhyPllFrequency(DRAMC_CTX_T * p)377 void GetPhyPllFrequency(DRAMC_CTX_T *p)
378 {
379 //U8 u1ShuLevel = u4IO32ReadFldAlign(DRAMC_REG_SHUSTATUS, SHUSTATUS_SHUFFLE_LEVEL);
380 U8 u1ShuLevel = u4IO32ReadFldAlign(DDRPHY_REG_DVFS_STATUS, DVFS_STATUS_OTHER_SHU_GP);
381 U32 u4PLL5_ADDR = DDRPHY_REG_SHU_PHYPLL1 + SHU_GRP_DDRPHY_OFFSET * u1ShuLevel;
382 U32 u4PLL8_ADDR = DDRPHY_REG_SHU_PHYPLL2 + SHU_GRP_DDRPHY_OFFSET * u1ShuLevel;
383 U32 u4B0_DQ = DDRPHY_REG_SHU_B0_DQ1 + SHU_GRP_DDRPHY_OFFSET * u1ShuLevel;
384 U32 u4PLL3_ADDR = DDRPHY_REG_SHU_PHYPLL3 + SHU_GRP_DDRPHY_OFFSET * u1ShuLevel;
385 //Darren-U32 u4PLL4 = DDRPHY_SHU_PLL4 + SHU_GRP_DDRPHY_OFFSET * u1ShuLevel; // for DDR4266
386 U32 u4B0_DQ6 = DDRPHY_REG_SHU_B0_DQ6 + SHU_GRP_DDRPHY_OFFSET * u1ShuLevel;
387
388 /* VCOFreq = FREQ_XTAL x ((RG_RCLRPLL_SDM_PCW) / 2^(RG_*_RCLRPLL_PREDIV)) / 2^(RG_*_RCLRPLL_POSDIV) */
389 U32 u4SDM_PCW = u4IO32ReadFldAlign(u4PLL5_ADDR, SHU_PHYPLL1_RG_RPHYPLL_SDM_PCW);
390 U32 u4PREDIV = u4IO32ReadFldAlign(u4PLL8_ADDR, SHU_PHYPLL2_RG_RPHYPLL_PREDIV);
391 U32 u4POSDIV = u4IO32ReadFldAlign(u4PLL8_ADDR, SHU_PHYPLL2_RG_RPHYPLL_POSDIV);
392 U32 u4CKDIV4 = u4IO32ReadFldAlign(u4B0_DQ, SHU_B0_DQ1_RG_ARPI_MIDPI_CKDIV4_EN_B0);
393 U8 u1FBKSEL = u4IO32ReadFldAlign(u4PLL3_ADDR, SHU_PHYPLL3_RG_RPHYPLL_FBKSEL);
394 //Darren-U16 u2CKMUL2 = u4IO32ReadFldAlign(u4PLL4, SHU_PLL4_RG_RPHYPLL_RESERVED);
395 U8 u1SopenDQ = u4IO32ReadFldAlign(u4B0_DQ6, SHU_B0_DQ6_RG_ARPI_SOPEN_EN_B0);
396
397 U32 u4VCOFreq = (((52>>u4PREDIV)*(u4SDM_PCW>>8))>>u4POSDIV) << u1FBKSEL;
398 U32 u4DataRate = u4VCOFreq>>u4CKDIV4;
399 if (u1SopenDQ == ENABLE) // for 1:4 mode DDR800 (3.2G/DIV4)
400 u4DataRate >>= 2;
401
402 //msg("PCW=0x%X, u4PREDIV=%d, u4POSDIV=%d, CKDIV4=%d, DataRate=%d\n", u4SDM_PCW, u4PREDIV, u4POSDIV, u4CKDIV4, u4DataRate);
403 msg("[F] DataRate=%d at SHU%d\n", u4DataRate, u1ShuLevel);
404 }
405 #endif
406
vGet_PLL_FreqSel(DRAMC_CTX_T * p)407 DRAM_PLL_FREQ_SEL_T vGet_PLL_FreqSel(DRAMC_CTX_T *p)
408 {
409 return p->pDFSTable->freq_sel;
410 }
411
vSet_PLL_FreqSel(DRAMC_CTX_T * p,DRAM_PLL_FREQ_SEL_T sel)412 void vSet_PLL_FreqSel(DRAMC_CTX_T *p, DRAM_PLL_FREQ_SEL_T sel)
413 {
414 p->pDFSTable->freq_sel = sel;
415 }
416
vGet_DDR_Loop_Mode(DRAMC_CTX_T * p)417 DDR800_MODE_T vGet_DDR_Loop_Mode(DRAMC_CTX_T *p)
418 {
419 return p->pDFSTable->ddr_loop_mode;
420 }
421
422
vSet_Div_Mode(DRAMC_CTX_T * p,DIV_MODE_T eMode)423 void vSet_Div_Mode(DRAMC_CTX_T *p, DIV_MODE_T eMode)
424 {
425 p->pDFSTable->divmode = eMode;
426 }
427
vGet_Div_Mode(DRAMC_CTX_T * p)428 DIV_MODE_T vGet_Div_Mode(DRAMC_CTX_T *p)
429 {
430 return p->pDFSTable->divmode;
431 }
432
vSet_Current_ShuLevel(DRAMC_CTX_T * p,DRAM_DFS_SRAM_SHU_T u1ShuIndex)433 void vSet_Current_ShuLevel(DRAMC_CTX_T *p, DRAM_DFS_SRAM_SHU_T u1ShuIndex)
434 {
435 p->pDFSTable->shuffleIdx = u1ShuIndex;
436 }
437
vGet_Current_ShuLevel(DRAMC_CTX_T * p)438 DRAM_DFS_SRAM_SHU_T vGet_Current_ShuLevel(DRAMC_CTX_T *p)
439 {
440 return p->pDFSTable->shuffleIdx;
441 }
442
443
444 #if 0
445 void vSet_Duty_Calibration_Mode(DRAMC_CTX_T *p, U8 kMode)
446 {
447 p->pDFSTable->duty_calibration_mode = kMode;
448 }
449 #endif
450
Get_Duty_Calibration_Mode(DRAMC_CTX_T * p)451 DUTY_CALIBRATION_T Get_Duty_Calibration_Mode(DRAMC_CTX_T *p)
452 {
453 return p->pDFSTable->duty_calibration_mode;
454 }
455
Get_Vref_Calibration_OnOff(DRAMC_CTX_T * p)456 VREF_CALIBRATION_ENABLE_T Get_Vref_Calibration_OnOff(DRAMC_CTX_T *p)
457 {
458 return p->pDFSTable->vref_calibartion_enable;
459 }
460
461 /* vGet_Dram_CBT_Mode
462 * Due to current HW design (both ranks share the same set of ACTiming regs), mixed
463 * mode LP4 now uses byte mode ACTiming settings. This means most calibration steps
464 * should use byte mode code flow.
465 * Note: The below items must have per-rank settings (Don't use this function)
466 * 1. CBT training 2. TX tracking
467 */
vGet_Dram_CBT_Mode(DRAMC_CTX_T * p)468 DRAM_CBT_MODE_T vGet_Dram_CBT_Mode(DRAMC_CTX_T *p)
469 {
470 if (p->support_rank_num == RANK_DUAL)
471 {
472 if(p->dram_cbt_mode[RANK_0] == CBT_NORMAL_MODE && p->dram_cbt_mode[RANK_1] == CBT_NORMAL_MODE)
473 return CBT_NORMAL_MODE;
474 }
475 else // Single rank
476 {
477 if(p->dram_cbt_mode[RANK_0] == CBT_NORMAL_MODE)
478 return CBT_NORMAL_MODE;
479 }
480
481 return CBT_BYTE_MODE1;
482 }
483
vPrintCalibrationBasicInfo(DRAMC_CTX_T * p)484 void vPrintCalibrationBasicInfo(DRAMC_CTX_T *p)
485 {
486 #if __ETT__
487 msg("===============================================================================\n"
488 "Dram Type= %d, Freq= %u, FreqGroup= %u, CH_%d, rank %d\n"
489 "fsp= %d, odt_onoff= %d, Byte mode= %d, DivMode= %d\n"
490 "===============================================================================\n",
491 p->dram_type, DDRPhyFMeter()?DDRPhyFMeter():p->frequency, p->freqGroup, p->channel, p->rank,
492 p->dram_fsp, p->odt_onoff, p->dram_cbt_mode[p->rank], vGet_Div_Mode(p));
493 #else
494 msg("==\n"
495 "Dram Type= %d, Freq= %u, CH_%d, rank %d\n"
496 "fsp= %d, odt_onoff= %d, Byte mode= %d, DivMode= %d\n"
497 "==\n",
498 p->dram_type,
499 DDRPhyFMeter(),
500 p->channel,
501 p->rank,
502 p->dram_fsp,
503 p->odt_onoff,
504 p->dram_cbt_mode[p->rank],
505 vGet_Div_Mode(p));
506 #endif
507 }
508
509 #if VENDER_JV_LOG
vPrintCalibrationBasicInfo_ForJV(DRAMC_CTX_T * p)510 void vPrintCalibrationBasicInfo_ForJV(DRAMC_CTX_T *p)
511 {
512 msg5("\n\nDram type:");
513
514 switch (p->dram_type)
515 {
516 case TYPE_LPDDR4:
517 msg5("LPDDR4\t");
518 break;
519
520 case TYPE_LPDDR4X:
521 msg5("LPDDR4X\t");
522 break;
523
524 case TYPE_LPDDR4P:
525 msg5("LPDDR4P\t");
526 break;
527 }
528
529 msg5("Freq: %d, FreqGroup %u, channel %d, rank %d\n"
530 "dram_fsp= %d, odt_onoff= %d, Byte mode= %d, DivMode= %d\n\n",
531 p->frequency, p->freqGroup, p->channel, p->rank,
532 p->dram_fsp, p->odt_onoff, p->dram_cbt_mode[p->rank], vGet_Div_Mode(p));
533
534 return;
535 }
536 #endif
537
GetFreqBySel(DRAMC_CTX_T * p,DRAM_PLL_FREQ_SEL_T sel)538 U16 GetFreqBySel(DRAMC_CTX_T *p, DRAM_PLL_FREQ_SEL_T sel)
539 {
540 U16 u2freq=0;
541
542 switch(sel)
543 {
544 case LP4_DDR4266:
545 u2freq=2133;
546 break;
547 case LP4_DDR3733:
548 u2freq=1866;
549 break;
550 case LP4_DDR3200:
551 u2freq=1600;
552 break;
553 case LP4_DDR2667:
554 u2freq=1333;
555 break;
556 case LP4_DDR2400:
557 u2freq=1200;
558 break;
559 case LP4_DDR1866:
560 u2freq=933;
561 break;
562 case LP4_DDR1600:
563 u2freq=800;
564 break;
565 case LP4_DDR1200:
566 u2freq=600;
567 break;
568 case LP4_DDR800:
569 u2freq=400;
570 break;
571 case LP4_DDR400:
572 u2freq=200;
573 break;
574
575 case LP5_DDR6400:
576 u2freq=3200;
577 break;
578 case LP5_DDR6000:
579 u2freq=3000;
580 break;
581 case LP5_DDR5500:
582 u2freq=2750;
583 break;
584 case LP5_DDR4800:
585 u2freq=2400;
586 break;
587 case LP5_DDR4266:
588 u2freq=2133;
589 break;
590 case LP5_DDR3733:
591 u2freq=1866;
592 break;
593 case LP5_DDR3200:
594 u2freq=1600;
595 break;
596 case LP5_DDR2400:
597 u2freq=1200;
598 break;
599 case LP5_DDR1600:
600 u2freq=800;
601 break;
602 case LP5_DDR1200:
603 u2freq=600;
604 break;
605 case LP5_DDR800:
606 u2freq=400;
607 break;
608
609 default:
610 err("[GetFreqBySel] freq sel is incorrect !!!\n");
611 break;
612 }
613
614 return u2freq;
615 }
616
GetSelByFreq(DRAMC_CTX_T * p,U16 u2freq)617 DRAM_PLL_FREQ_SEL_T GetSelByFreq(DRAMC_CTX_T *p, U16 u2freq)
618 {
619 DRAM_PLL_FREQ_SEL_T sel=0;
620
621 switch(u2freq)
622 {
623 case 2133:
624 sel=LP4_DDR4266;
625 break;
626 case 1866:
627 sel=LP4_DDR3733;
628 break;
629 case 1600:
630 sel=LP4_DDR3200;
631 break;
632 case 1333:
633 sel=LP4_DDR2667;
634 break;
635 case 1200:
636 sel=LP4_DDR2400;
637 break;
638 case 933:
639 sel=LP4_DDR1866;
640 break;
641 case 800:
642 sel=LP4_DDR1600;
643 break;
644 case 600:
645 sel=LP4_DDR1200;
646 break;
647 case 400:
648 sel=LP4_DDR800;
649 break;
650 case 200:
651 sel=LP4_DDR400;
652 break;
653 default:
654 err("[GetSelByFreq] sel is incorrect !!!\n");
655 break;
656 }
657
658 return sel;
659 }
660
DDRPhyFreqSel(DRAMC_CTX_T * p,DRAM_PLL_FREQ_SEL_T sel)661 void DDRPhyFreqSel(DRAMC_CTX_T *p, DRAM_PLL_FREQ_SEL_T sel)
662 {
663 p->freq_sel = sel;
664 p->frequency = GetFreqBySel(p, sel);
665
666 if(is_lp5_family(p))
667 {
668 ///TODO: Dennis
669 //p->dram_fsp = (p->frequency < LP5_MRFSP_TERM_FREQ)? FSP_0: FSP_1;
670 p->dram_fsp = FSP_0;
671 #if LP5_DDR4266_RDBI_WORKAROUND
672 if(p->frequency >= 2133)
673 p->DBI_R_onoff[FSP_0] = DBI_ON;
674 #endif
675 p->odt_onoff = (p->frequency < LP5_MRFSP_TERM_FREQ)? ODT_OFF: ODT_ON;
676
677 if(p->frequency >= 2750)
678 vSet_Div_Mode(p, DIV16_MODE);
679 }
680 else
681 {
682 p->dram_fsp = (p->frequency < LP4_MRFSP_TERM_FREQ)? FSP_0: FSP_1;
683 p->odt_onoff = (p->frequency < LP4_MRFSP_TERM_FREQ)? ODT_OFF: ODT_ON;
684 }
685
686 if (p->dram_type == TYPE_LPDDR4P)
687 p->odt_onoff = ODT_OFF;
688
689 p->shu_type = get_shuffleIndex_by_Freq(p);
690 setFreqGroup(p); /* Set p->freqGroup to support freqs not in ACTimingTable */
691
692 ///TODO: add DBI_onoff by condition
693 //p->DBI_onoff = p->odt_onoff;
694 }
695
696
u2DFSGetHighestFreq(DRAMC_CTX_T * p)697 U16 u2DFSGetHighestFreq(DRAMC_CTX_T * p)
698 {
699 U8 u1ShuffleIdx = 0;
700 U16 u2Freq=0;
701 static U16 u2FreqMax=0;
702
703 if ((u2FreqMax == 0) || (gUpdateHighestFreq == TRUE))
704 {
705 gUpdateHighestFreq = FALSE;
706 u2FreqMax = 0;
707 for (u1ShuffleIdx = DRAM_DFS_SHUFFLE_1; u1ShuffleIdx < DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
708 {
709 u2Freq = GetFreqBySel(p, gFreqTbl[u1ShuffleIdx].freq_sel);
710 if(u2FreqMax < u2Freq)
711 u2FreqMax = u2Freq;
712 }
713 }
714
715 return u2FreqMax;
716 }
717
GetEyeScanEnable(DRAMC_CTX_T * p,U8 get_type)718 U8 GetEyeScanEnable(DRAMC_CTX_T * p, U8 get_type)
719 {
720 #if ENABLE_EYESCAN_GRAPH
721 #if (fcFOR_CHIP_ID == fcA60868) //need check unterm highest freq is saved at SRAM_SHU4??
722 //CBT
723 if (get_type == 0)
724 if (ENABLE_EYESCAN_CBT==1) return ENABLE; //TO DO :Temp Force open EYESCAN
725
726 //RX
727 if (get_type == 1)
728 if (ENABLE_EYESCAN_RX==1) return ENABLE; //TO DO :Temp Force open EYESCAN
729
730 //TX
731 if (get_type == 2)
732 if (ENABLE_EYESCAN_TX==1) return ENABLE; //TO DO :Temp Force open EYESCAN
733
734 #else
735 //CBT
736 if (get_type == 0)
737 {
738 if (gCBT_EYE_Scan_flag==0) return DISABLE;
739 if (gCBT_EYE_Scan_only_higheset_freq_flag == 0) return ENABLE; //K All freq
740 if (p->frequency == u2DFSGetHighestFreq(p)) return ENABLE; // K highest freq
741 if (gEye_Scan_unterm_highest_flag==1 && vGet_Current_ShuLevel(p)==SRAM_SHU2) return ENABLE; // K unterm highest freq
742 }
743
744 //RX
745 if (get_type == 1)
746 {
747 if (gRX_EYE_Scan_flag==0) return DISABLE;
748 if (gRX_EYE_Scan_only_higheset_freq_flag == 0) return ENABLE; //K All freq
749 if (p->frequency == u2DFSGetHighestFreq(p)) return ENABLE; // K highest freq
750 if (gEye_Scan_unterm_highest_flag==1 && vGet_Current_ShuLevel(p)==SRAM_SHU2) return ENABLE; // K unterm highest freq
751 }
752
753 //TX
754 if (get_type == 2)
755 {
756 if (gTX_EYE_Scan_flag==0) return DISABLE;
757 if (gTX_EYE_Scan_only_higheset_freq_flag == 0) return ENABLE; //K All freq
758 if (p->frequency == u2DFSGetHighestFreq(p)) return ENABLE; // K highest freq
759 if (gEye_Scan_unterm_highest_flag==1 && vGet_Current_ShuLevel(p)==SRAM_SHU2) return ENABLE; // K unterm highest freq
760 }
761
762 #endif
763 #endif
764
765 return DISABLE;
766 }
767
DramcWriteDBIOnOff(DRAMC_CTX_T * p,U8 onoff)768 void DramcWriteDBIOnOff(DRAMC_CTX_T *p, U8 onoff)
769 {
770 // DRAMC Write-DBI On/Off
771 vIO32WriteFldAlign_All(DRAMC_REG_SHU_TX_SET0, onoff, SHU_TX_SET0_DBIWR);
772 msg("DramC Write-DBI %s\n", (onoff == DBI_ON) ? "on" : "off");
773 }
774
DramcReadDBIOnOff(DRAMC_CTX_T * p,U8 onoff)775 void DramcReadDBIOnOff(DRAMC_CTX_T *p, U8 onoff)
776 {
777 // DRAMC Read-DBI On/Off
778 vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B0_DQ7, onoff, SHU_B0_DQ7_R_DMDQMDBI_SHU_B0);
779 vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B1_DQ7, onoff, SHU_B1_DQ7_R_DMDQMDBI_SHU_B1);
780 msg("DramC Read-DBI %s\n", (onoff == DBI_ON) ? "on" : "off");
781 }
782 #if ENABLE_READ_DBI
SetDramModeRegForReadDBIOnOff(DRAMC_CTX_T * p,U8 u1fsp,U8 onoff)783 void SetDramModeRegForReadDBIOnOff(DRAMC_CTX_T *p, U8 u1fsp, U8 onoff)
784 {
785 #if MRW_CHECK_ONLY
786 mcSHOW_MRW_MSG("\n==[MR Dump] %s==\n", __func__);
787 #endif
788 //msg("--Fsp%d --\n", p->dram_fsp);
789
790 //DRAM MR3[6] read-DBI On/Off
791 u1MR03Value[u1fsp] = ((u1MR03Value[u1fsp] & 0xbf) | (onoff << 6));
792 DramcModeRegWriteByRank(p, p->rank, 3, u1MR03Value[u1fsp]);
793 }
794 #endif
795
796 #if ENABLE_WRITE_DBI
SetDramModeRegForWriteDBIOnOff(DRAMC_CTX_T * p,U8 u1fsp,U8 onoff)797 void SetDramModeRegForWriteDBIOnOff(DRAMC_CTX_T *p, U8 u1fsp, U8 onoff)
798 {
799 #if MRW_CHECK_ONLY
800 mcSHOW_MRW_MSG("\n==[MR Dump] %s==\n", __func__);
801 #endif
802 //DRAM MR3[7] write-DBI On/Off
803 u1MR03Value[u1fsp] = ((u1MR03Value[u1fsp] & 0x7F) | (onoff << 7));
804 DramcModeRegWriteByRank(p, p->rank, 3, u1MR03Value[u1fsp]);
805 }
806 #endif
807
CKEFixOnOff(DRAMC_CTX_T * p,U8 u1RankIdx,CKE_FIX_OPTION option,CKE_FIX_CHANNEL WriteChannelNUM)808 void CKEFixOnOff(DRAMC_CTX_T *p, U8 u1RankIdx, CKE_FIX_OPTION option, CKE_FIX_CHANNEL WriteChannelNUM)
809 {
810 U8 u1CKEOn, u1CKEOff;
811
812 if (option == CKE_DYNAMIC) //if CKE is dynamic, set both CKE fix On and Off as 0
813 { //After CKE FIX on/off, CKE should be returned to dynamic (control by HW)
814 u1CKEOn = u1CKEOff = 0;
815 }
816 else //if CKE fix on is set as 1, CKE fix off should also be set as 0; vice versa
817 {
818 u1CKEOn = option;
819 u1CKEOff = (1 - option);
820 }
821
822 if (WriteChannelNUM == CKE_WRITE_TO_ALL_CHANNEL) //write register to all channel
823 {
824 if((u1RankIdx == RANK_0)||(u1RankIdx == CKE_WRITE_TO_ALL_RANK))
825 {
826 vIO32WriteFldMulti_All(DRAMC_REG_CKECTRL, P_Fld(u1CKEOff, CKECTRL_CKEFIXOFF)
827 | P_Fld(u1CKEOn, CKECTRL_CKEFIXON));
828 }
829
830 if(u1RankIdx == RANK_1||((u1RankIdx == CKE_WRITE_TO_ALL_RANK) && (p->support_rank_num == RANK_DUAL)))
831 {
832 vIO32WriteFldMulti_All(DRAMC_REG_CKECTRL, P_Fld(u1CKEOff, CKECTRL_CKE1FIXOFF)
833 | P_Fld(u1CKEOn, CKECTRL_CKE1FIXON));
834 }
835 }
836 else
837 {
838 if((u1RankIdx == RANK_0) || (u1RankIdx == CKE_WRITE_TO_ALL_RANK))
839 {
840 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL), P_Fld(u1CKEOff, CKECTRL_CKEFIXOFF)
841 | P_Fld(u1CKEOn, CKECTRL_CKEFIXON));
842 }
843
844 if((u1RankIdx == RANK_1) ||((u1RankIdx == CKE_WRITE_TO_ALL_RANK) && (p->support_rank_num == RANK_DUAL)))
845 {
846 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL), P_Fld(u1CKEOff, CKECTRL_CKE1FIXOFF)
847 | P_Fld(u1CKEOn, CKECTRL_CKE1FIXON));
848 }
849 }
850 }
851
852
vAutoRefreshSwitch(DRAMC_CTX_T * p,U8 option)853 void vAutoRefreshSwitch(DRAMC_CTX_T *p, U8 option)
854 {
855 if (option == ENABLE)
856 {
857 //enable autorefresh
858 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL0), 0, REFCTRL0_REFDIS); //REFDIS=0, enable auto refresh
859 }
860 else // DISABLE
861 {
862 //disable autorefresh
863 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL0), 1, REFCTRL0_REFDIS); //REFDIS=1, disable auto refresh
864
865 //because HW will actually disable autorefresh after refresh_queue empty, so we need to wait quene empty.
866 mcDELAY_US(u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MISC_STATUSA), MISC_STATUSA_REFRESH_QUEUE_CNT) * 4); //wait refresh_queue_cnt * 3.9us
867 }
868 }
869
870
871 //-------------------------------------------------------------------------
872 /** vCKERankCtrl
873 * Control CKE toggle mode (toggle both ranks 1. at the same time (CKE_RANK_DEPENDENT) 2. individually (CKE_RANK_INDEPENDENT))
874 * Note: Sets CKE toggle mode for all channels
875 * @param p Pointer of context created by DramcCtxCreate.
876 * @param CKECtrlMode Indicates
877 */
878 //-------------------------------------------------------------------------
vCKERankCtrl(DRAMC_CTX_T * p,CKE_CTRL_MODE_T CKECtrlMode)879 void vCKERankCtrl(DRAMC_CTX_T *p, CKE_CTRL_MODE_T CKECtrlMode)
880 {
881 /* Struct indicating all register fields mentioned in "multi rank CKE control" */
882 typedef struct
883 {
884 U8 u1CKE2Rank: Fld_wid(RKCFG_CKE2RANK);
885 U8 u1CKE2Rank_Opt :Fld_wid(CKECTRL_CKE2RANK_OPT);
886 U8 u1CKE2Rank_Opt2 :Fld_wid(CKECTRL_CKE2RANK_OPT2);
887 U8 u1CKE2Rank_Opt3: Fld_wid(CKECTRL_CKE2RANK_OPT3);
888 U8 u1CKE2Rank_Opt5: Fld_wid(CKECTRL_CKE2RANK_OPT5);
889 U8 u1CKE2Rank_Opt6: Fld_wid(CKECTRL_CKE2RANK_OPT6);
890 U8 u1CKE2Rank_Opt7: Fld_wid(CKECTRL_CKE2RANK_OPT7);
891 U8 u1CKE2Rank_Opt8: Fld_wid(CKECTRL_CKE2RANK_OPT8);
892 U8 u1CKETimer_Sel: Fld_wid(CKECTRL_CKETIMER_SEL);
893 U8 u1FASTWake: Fld_wid(SHU_DCM_CTRL0_FASTWAKE);
894 U8 u1FASTWake2: Fld_wid(SHU_DCM_CTRL0_FASTWAKE2);
895 U8 u1FastWake_Sel: Fld_wid(CKECTRL_FASTWAKE_SEL);
896 U8 u1CKEWake_Sel: Fld_wid(CKECTRL_CKEWAKE_SEL);
897 U8 u1ClkWiTrfc: Fld_wid(ACTIMING_CTRL_CLKWITRFC);
898 } CKE_CTRL_T;
899
900 /* CKE_Rank dependent/independent mode register setting values */
901 CKE_CTRL_T CKE_Mode, CKE_Rank_Independent = { .u1CKE2Rank = 0, .u1CKE2Rank_Opt3 = 0, .u1CKE2Rank_Opt2 = 1,
902 .u1CKE2Rank_Opt5 = 0, .u1CKE2Rank_Opt6 = 0, .u1CKE2Rank_Opt7 = 1, .u1CKE2Rank_Opt8 = 0,
903 .u1CKETimer_Sel = 0, .u1FASTWake = 1, .u1FASTWake2 = 1, .u1FastWake_Sel = 1, .u1CKEWake_Sel = 0, .u1ClkWiTrfc = 0
904 },
905 CKE_Rank_Dependent = { .u1CKE2Rank = 1, .u1CKE2Rank_Opt3 = 0,
906 .u1CKE2Rank_Opt5 = 0, .u1CKE2Rank_Opt6 = 0, .u1CKE2Rank_Opt7 = 0, .u1CKE2Rank_Opt8 = 0, .u1CKETimer_Sel = 1,
907 .u1FASTWake = 1, .u1FASTWake2 = 0, .u1FastWake_Sel = 0, .u1CKEWake_Sel = 0, .u1ClkWiTrfc = 0
908 };
909 //Select CKE control mode
910 CKE_Mode = (CKECtrlMode == CKE_RANK_INDEPENDENT)? CKE_Rank_Independent: CKE_Rank_Dependent;
911
912 //Apply CKE control mode register settings
913 vIO32WriteFldAlign_All(DRAMC_REG_RKCFG, CKE_Mode.u1CKE2Rank, RKCFG_CKE2RANK);
914 vIO32WriteFldMulti_All(DRAMC_REG_CKECTRL, P_Fld(CKE_Mode.u1CKE2Rank_Opt3, CKECTRL_CKE2RANK_OPT3)
915 | P_Fld(CKE_Mode.u1CKE2Rank_Opt, CKECTRL_CKE2RANK_OPT)
916 | P_Fld(CKE_Mode.u1CKE2Rank_Opt2, CKECTRL_CKE2RANK_OPT2)
917 | P_Fld(CKE_Mode.u1CKE2Rank_Opt5, CKECTRL_CKE2RANK_OPT5)
918 | P_Fld(CKE_Mode.u1CKE2Rank_Opt6, CKECTRL_CKE2RANK_OPT6)
919 | P_Fld(CKE_Mode.u1CKE2Rank_Opt7, CKECTRL_CKE2RANK_OPT7)
920 | P_Fld(CKE_Mode.u1CKE2Rank_Opt8, CKECTRL_CKE2RANK_OPT8)
921 | P_Fld(CKE_Mode.u1CKETimer_Sel, CKECTRL_CKETIMER_SEL)
922 | P_Fld(CKE_Mode.u1FastWake_Sel, CKECTRL_FASTWAKE_SEL)
923 | P_Fld(CKE_Mode.u1CKEWake_Sel, CKECTRL_CKEWAKE_SEL));
924
925 vIO32WriteFldMulti_All(DRAMC_REG_SHU_DCM_CTRL0, P_Fld(CKE_Mode.u1FASTWake, SHU_DCM_CTRL0_FASTWAKE) | P_Fld(CKE_Mode.u1FASTWake2, SHU_DCM_CTRL0_FASTWAKE2));
926
927 vIO32WriteFldAlign_All(DRAMC_REG_ACTIMING_CTRL, CKE_Mode.u1ClkWiTrfc, ACTIMING_CTRL_CLKWITRFC);
928 }
929
930
931 #define MAX_CMP_CPT_WAIT_LOOP 100000 // max loop
DramcSetRWOFOEN(DRAMC_CTX_T * p,U8 u1onoff)932 static void DramcSetRWOFOEN(DRAMC_CTX_T *p, U8 u1onoff)
933 {
934 U32 u4loop_count = 0;
935
936 {
937 while(u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MISC_STATUSA), MISC_STATUSA_REQQ_EMPTY) != 1)
938 {
939 mcDELAY_US(1);
940 u4loop_count ++;
941
942 if(u4loop_count > MAX_CMP_CPT_WAIT_LOOP)
943 {
944 err("RWOFOEN timout! queue is not empty\n");
945 #if __ETT__
946 while(1);
947 #else
948 break;
949 #endif
950 }
951 }
952 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SCHEDULER_COM), u1onoff, SCHEDULER_COM_RWOFOEN);
953 }
954 }
955
956
957 //static void DramcEngine2CleanWorstSiPattern(DRAMC_CTX_T *p)
958 //{
959 // vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
960 // P_Fld(0, TEST2_A3_AUTO_GEN_PAT) |
961 // P_Fld(0, TEST2_A3_HFIDPAT) |
962 // P_Fld(0, TEST2_A3_TEST_AID_EN));
963 //}
964
965
DramcEngine2SetUiShift(DRAMC_CTX_T * p,U8 option)966 static void DramcEngine2SetUiShift(DRAMC_CTX_T *p, U8 option)//UI shift function
967 {
968 if(option == ENABLE)
969 {
970 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A0),
971 P_Fld(1, TEST2_A0_TA2_LOOP_EN) |
972 P_Fld(3, TEST2_A0_LOOP_CNT_INDEX));
973 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
974 P_Fld(1, TEST2_A3_TEST2_PAT_SHIFT) |
975 P_Fld(0, TEST2_A3_PAT_SHIFT_SW_EN));
976 }
977 else
978 {
979 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A0),
980 P_Fld(0, TEST2_A0_TA2_LOOP_EN) |
981 P_Fld(0, TEST2_A0_LOOP_CNT_INDEX));
982 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
983 P_Fld(0, TEST2_A3_TEST2_PAT_SHIFT));
984 }
985 }
986
987
DramcSetRankEngine2(DRAMC_CTX_T * p,U8 u1RankSel)988 void DramcSetRankEngine2(DRAMC_CTX_T *p, U8 u1RankSel)
989 {
990 //LPDDR2_3_ADRDECEN_TARKMODE =0, always rank0
991 /* ADRDECEN_TARKMODE: rank input selection
992 * 1'b1 select CTO_AGENT1_RANK, 1'b0 rank by address decode
993 */
994 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3), 1, TEST2_A3_ADRDECEN_TARKMODE);
995
996 // DUMMY_TESTAGENTRKSEL =0, select rank according to CATRAIN_TESTAGENTRK
997 /* TESTAGENTRKSEL: Test agent access rank mode selection
998 * 2'b00: rank selection by TESTAGENTRK, 2'b01: rank selection by CTO_AGENT_1_BK_ADR[0]
999 * 2'b10: rank selection by CTO_AGENT1_COL_ADR[3], 2'b11: rank selection by CTO_AGENT1_COL_ADR[4]
1000 */
1001 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4), 0, TEST2_A4_TESTAGENTRKSEL);
1002
1003 //CATRAIN_TESTAGENTRK = u1RankSel
1004 /* TESTAGENTRK: Specify test agent rank
1005 * 2'b00 rank 0, 2'b01 rank 1, 2'b10 rank 2
1006 */
1007 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4), u1RankSel, TEST2_A4_TESTAGENTRK);
1008 }
1009
1010
DramcEngine2SetPat(DRAMC_CTX_T * p,U8 u1TestPat,U8 u1LoopCnt,U8 u1Len1Flag,U8 u1EnableUiShift)1011 void DramcEngine2SetPat(DRAMC_CTX_T *p, U8 u1TestPat, U8 u1LoopCnt, U8 u1Len1Flag, U8 u1EnableUiShift) //u1LoopCnt is related to rank
1012 {
1013
1014 if ((u1TestPat == TEST_XTALK_PATTERN) || (u1TestPat == TEST_SSOXTALK_PATTERN)) //xtalk or SSO+XTALK
1015 {
1016 //TEST_REQ_LEN1=1 is new feature, hope to make dq bus continously.
1017 //but DV simulation will got problem of compare err
1018 //so go back to use old way
1019 //TEST_REQ_LEN1=0, R_DMRWOFOEN=1
1020 if (u1Len1Flag != 0)
1021 {
1022 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1023 P_Fld(1, TEST2_A4_TEST_REQ_LEN1)); //test agent 2 with cmd length = 0, LEN1 of 256bits data
1024 DramcSetRWOFOEN(p, 0); //@IPM will fix for LEN1=1 issue
1025
1026 }
1027 else
1028 {
1029 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1030 P_Fld(0, TEST2_A4_TEST_REQ_LEN1)); //test agent 2 with cmd length = 0
1031 DramcSetRWOFOEN(p, 1);
1032 }
1033
1034 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1035 P_Fld(0, TEST2_A3_AUTO_GEN_PAT) |
1036 P_Fld(0, TEST2_A3_HFIDPAT) |
1037 P_Fld(0, TEST2_A3_TEST_AID_EN) |
1038 P_Fld(0, TEST2_A3_TESTAUDPAT) |
1039 P_Fld(u1LoopCnt, TEST2_A3_TESTCNT)); //dont use audio pattern
1040
1041 if (u1TestPat == TEST_SSOXTALK_PATTERN)
1042 {
1043 //set addr 0x48[16] to 1, TESTXTALKPAT = 1
1044 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1045 P_Fld(1, TEST2_A4_TESTXTALKPAT) |
1046 P_Fld(0, TEST2_A4_TESTAUDMODE) |
1047 P_Fld(0, TEST2_A4_TESTAUDBITINV)); //use XTALK pattern, dont use audio pattern
1048
1049 //R_DMTESTSSOPAT=0, R_DMTESTSSOXTALKPAT=0
1050 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1051 P_Fld(1, TEST2_A4_TESTSSOPAT) |
1052 P_Fld(0, TEST2_A4_TESTSSOXTALKPAT)); //dont use sso, sso+xtalk pattern
1053 }
1054 else //select XTALK pattern
1055 {
1056 //set addr 0x48[16] to 1, TESTXTALKPAT = 1
1057 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1058 P_Fld(1, TEST2_A4_TESTXTALKPAT) |
1059 P_Fld(0, TEST2_A4_TESTAUDMODE) |
1060 P_Fld(0, TEST2_A4_TESTAUDBITINV)); //use XTALK pattern, dont use audio pattern
1061
1062 //R_DMTESTSSOPAT=0, R_DMTESTSSOXTALKPAT=0
1063 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1064 P_Fld(0, TEST2_A4_TESTSSOPAT) |
1065 P_Fld(0, TEST2_A4_TESTSSOXTALKPAT)); //dont use sso, sso+xtalk pattern
1066 }
1067 }
1068 else if (u1TestPat == TEST_AUDIO_PATTERN) //AUDIO
1069 {
1070 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1071 P_Fld(0, TEST2_A4_TEST_REQ_LEN1)); //test agent 2 with cmd length = 0
1072 // set AUDINIT=0x11 AUDINC=0x0d AUDBITINV=1 AUDMODE=1(1:read only(address fix), 0: write/read address change)
1073 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1074 P_Fld(0x00000011, TEST2_A4_TESTAUDINIT) |
1075 P_Fld(0x0000000d, TEST2_A4_TESTAUDINC) |
1076 P_Fld(0, TEST2_A4_TESTXTALKPAT) |
1077 P_Fld(0, TEST2_A4_TESTAUDMODE) |
1078 P_Fld(1, TEST2_A4_TESTAUDBITINV));
1079
1080 // set addr 0x044 [7] to 1 ,select audio pattern
1081 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1082 P_Fld(0, TEST2_A3_AUTO_GEN_PAT) |
1083 P_Fld(0, TEST2_A3_HFIDPAT) |
1084 P_Fld(0, TEST2_A3_TEST_AID_EN) |
1085 P_Fld(1, TEST2_A3_TESTAUDPAT) |
1086 P_Fld(u1LoopCnt, TEST2_A3_TESTCNT));
1087 }
1088 else if (u1TestPat == TEST_WORST_SI_PATTERN) //TEST2_OFF > 'h56
1089 {
1090 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1091 P_Fld(u1Len1Flag, TEST2_A4_TEST_REQ_LEN1)|
1092 P_Fld(0, TEST2_A4_TESTAUDINIT) |
1093 P_Fld(0, TEST2_A4_TESTAUDINC) |
1094 P_Fld(0, TEST2_A4_TESTXTALKPAT) |
1095 P_Fld(0, TEST2_A4_TESTSSOPAT)
1096 );
1097 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1098 P_Fld(0, TEST2_A3_TESTAUDPAT) |
1099 P_Fld(1, TEST2_A3_AUTO_GEN_PAT) |
1100 P_Fld(1, TEST2_A3_HFIDPAT) |
1101 P_Fld(1, TEST2_A3_TEST_AID_EN) |
1102 P_Fld(u1LoopCnt, TEST2_A3_TESTCNT)
1103 );
1104 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A2), 0x56, TEST2_A2_TEST2_OFF);//Set to min value to save time;
1105 }
1106 else //ISI
1107 {
1108 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1109 P_Fld(0, TEST2_A4_TEST_REQ_LEN1)); //test agent 2 with cmd length = 0
1110
1111 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1112 P_Fld(0, TEST2_A3_AUTO_GEN_PAT) |
1113 P_Fld(0, TEST2_A3_HFIDPAT) |
1114 P_Fld(0, TEST2_A3_TEST_AID_EN) |
1115 P_Fld(0, TEST2_A3_TESTAUDPAT) |
1116 P_Fld(u1LoopCnt, TEST2_A3_TESTCNT));
1117 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4), 0, TEST2_A4_TESTXTALKPAT);
1118 }
1119
1120 DramcEngine2SetUiShift(p, u1EnableUiShift); //Enalbe/Disable UI shift
1121 }
1122
1123 #define CMP_CPT_POLLING_PERIOD 1 // timeout for TE2: (CMP_CPT_POLLING_PERIOD X MAX_CMP_CPT_WAIT_LOOP)
1124 #define MAX_CMP_CPT_WAIT_LOOP 100000 // max loop
DramcEngine2CheckComplete(DRAMC_CTX_T * p,U8 u1status)1125 static void DramcEngine2CheckComplete(DRAMC_CTX_T *p, U8 u1status)
1126 {
1127 U32 u4loop_count = 0;
1128 U32 u4Ta2_loop_count = 0;
1129 U32 u4ShiftUiFlag = 0;//Use TEST_WORST_SI_PATTERN_UI_SHIFT
1130
1131 while ((u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_TESTRPT)) & u1status) != u1status)
1132 {
1133 mcDELAY_US(CMP_CPT_POLLING_PERIOD);
1134 u4loop_count++;
1135 if ((u4loop_count > 3) && (u4loop_count <= MAX_CMP_CPT_WAIT_LOOP))
1136 {
1137 //err("TESTRPT_DM_CMP_CPT: %d\n", u4loop_count);
1138 }
1139 else if (u4loop_count > MAX_CMP_CPT_WAIT_LOOP)
1140 {
1141 /*TINFO="fcWAVEFORM_MEASURE_A %d: time out\n", u4loop_count*/
1142 msg("fcWAVEFORM_MEASURE_A %d :time out, [22:20]=0x%x\n", u4loop_count, u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TESTRPT), TESTRPT_TESTSTAT));
1143
1144 //mcFPRINTF(fp_A60501, "fcWAVEFORM_MEASURE_A %d: time out\n", u4loop_count);
1145
1146 break;
1147 }
1148 }
1149
1150 u4loop_count = 0;
1151 u4ShiftUiFlag = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3), TEST2_A3_TEST2_PAT_SHIFT);
1152 if(u4ShiftUiFlag)//Use TEST_WORST_SI_PATTERN_UI_SHIFT
1153 {
1154 while ((u4Ta2_loop_count = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_TEST_LOOP_CNT))) != 8)
1155 {
1156 u4loop_count++;
1157 if(u4loop_count > MAX_CMP_CPT_WAIT_LOOP)
1158 {
1159 msg("over MAX_CMP_CPT_WAIT_LOOP[%d] TEST_LOOP_CNT[%d]\n", u4loop_count, u4Ta2_loop_count);
1160 break;
1161 }
1162 }
1163 }
1164 }
1165
DramcEngine2Compare(DRAMC_CTX_T * p,DRAM_TE_OP_T wr)1166 static U32 DramcEngine2Compare(DRAMC_CTX_T *p, DRAM_TE_OP_T wr)
1167 {
1168 U32 u4result = 0xffffffff;
1169 U32 u4loopcount;
1170 U8 u1status = 1; //RK0
1171 U32 u4ShiftUiFlag = 0;
1172
1173 u4loopcount = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3), TEST2_A3_TESTCNT);
1174 if (u4loopcount == 1)
1175 u1status = 3; //RK0/1
1176
1177 u4ShiftUiFlag = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3), TEST2_A3_TEST2_PAT_SHIFT);
1178
1179 if (wr == TE_OP_WRITE_READ_CHECK)
1180 {
1181 if(!u4ShiftUiFlag)//Could not use while UI shift is open
1182 {
1183 // read data compare ready check
1184 DramcEngine2CheckComplete(p, u1status);
1185
1186 // disable write
1187 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1188 P_Fld(0, TEST2_A3_TEST2W) |
1189 P_Fld(0, TEST2_A3_TEST2R) |
1190 P_Fld(0, TEST2_A3_TEST1));
1191
1192 mcDELAY_US(1);
1193
1194 // enable read
1195 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1196 P_Fld(0, TEST2_A3_TEST2W) |
1197 P_Fld(1, TEST2_A3_TEST2R) |
1198 P_Fld(0, TEST2_A3_TEST1));
1199 }
1200 }
1201
1202 // 5
1203 // read data compare ready check
1204 DramcEngine2CheckComplete(p, u1status);
1205
1206 // delay 10ns after ready check from DE suggestion (1ms here)
1207 //mcDELAY_US(1);
1208
1209 u4result = (u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_TESTRPT)) >> 4) & u1status; //CMP_ERR_RK0/1
1210
1211 return u4result;
1212 }
1213
1214 //-------------------------------------------------------------------------
1215 /** DramcEngine2
1216 * start the self test engine 2 inside dramc to test dram w/r.
1217 * @param p Pointer of context created by DramcCtxCreate.
1218 * @param wr (DRAM_TE_OP_T): TE operation
1219 * @param test2_1 (U32): 28bits,base address[27:0].
1220 * @param test2_2 (U32): 28bits,offset address[27:0]. (unit is 16-byte, i.e: 0x100 is 0x1000).
1221 * @param loopforever (S16): 0 read\write one time ,then exit
1222 * >0 enable eingie2, after "loopforever" second ,write log and exit
1223 * -1 loop forever to read\write, every "period" seconds ,check result ,only when we find error,write log and exit
1224 * -2 loop forever to read\write, every "period" seconds ,write log ,only when we find error,write log and exit
1225 * -3 just enable loop forever ,then exit
1226 * @param period (U8): it is valid only when loopforever <0; period should greater than 0
1227 * @param u1LoopCnt (U8): test loop number of test agent2 loop number =2^(u1LoopCnt) ,0 one time
1228 * @retval status (U32): return the value of DM_CMP_ERR ,0 is ok ,others mean error
1229 */
1230 //-------------------------------------------------------------------------
1231 static U32 uiReg0D0h;
DramcEngine2Init(DRAMC_CTX_T * p,U32 test2_1,U32 test2_2,U8 u1TestPat,U8 u1LoopCnt,U8 u1EnableUiShift)1232 DRAM_STATUS_T DramcEngine2Init(DRAMC_CTX_T *p, U32 test2_1, U32 test2_2, U8 u1TestPat, U8 u1LoopCnt, U8 u1EnableUiShift)
1233 {
1234 U8 u1Len1Flag;
1235
1236 // error handling
1237 if (!p)
1238 {
1239 err("context is NULL\n");
1240 return DRAM_FAIL;
1241 }
1242
1243 // check loop number validness
1244 // if ((u1LoopCnt > 15) || (u1LoopCnt < 0)) // U8 >=0 always.
1245 if (u1LoopCnt > 15)
1246 {
1247 err("wrong param: u1LoopCnt > 15\n");
1248 return DRAM_FAIL;
1249 }
1250
1251 u1Len1Flag = (u1TestPat & 0x80) >> 7;
1252 u1TestPat = u1TestPat & 0x7f;
1253
1254 DramcSetRankEngine2(p, p->rank);
1255
1256 uiReg0D0h = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_DUMMY_RD));
1257 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_DUMMY_RD),
1258 P_Fld(0, DUMMY_RD_DQSG_DMYRD_EN) |
1259 P_Fld(0, DUMMY_RD_DQSG_DMYWR_EN) |
1260 P_Fld(0, DUMMY_RD_DUMMY_RD_EN) |
1261 P_Fld(0, DUMMY_RD_SREF_DMYRD_EN) |
1262 P_Fld(0, DUMMY_RD_DMY_RD_DBG) |
1263 P_Fld(0, DUMMY_RD_DMY_WR_DBG)); //must close dummy read when do test agent
1264
1265 //fixme-zj vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TESTCHIP_DMA1), 0, TESTCHIP_DMA1_DMA_LP4MATAB_OPT);
1266
1267 // disable self test engine1 and self test engine2
1268 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1269 P_Fld(0, TEST2_A3_TEST2W) |
1270 P_Fld(0, TEST2_A3_TEST2R) |
1271 P_Fld(0, TEST2_A3_TEST1));
1272
1273 // 1.set pattern ,base address ,offset address
1274 // 2.select ISI pattern or audio pattern or xtalk pattern
1275 // 3.set loop number
1276 // 4.enable read or write
1277 // 5.loop to check DM_CMP_CPT
1278 // 6.return CMP_ERR
1279 // currently only implement ucengine_status = 1, others are left for future extension
1280
1281 // 1
1282 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A0),
1283 P_Fld(test2_1 >> 24, TEST2_A0_TEST2_PAT0) |
1284 P_Fld(test2_2 >> 24, TEST2_A0_TEST2_PAT1));
1285
1286 #if (__LP5_COMBO__ == TRUE)
1287 if (TRUE == is_lp5_family(p))
1288 {
1289 // LP5 TA2 base: 0x0
1290 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RK_TEST2_A1),
1291 test2_1 & 0x00ffffff, RK_TEST2_A1_TEST2_BASE);
1292 }
1293 else
1294 #endif
1295 {
1296 // LP4 TA2 base: 0x10000. It's only TBA constrain, but not HW.
1297 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RK_TEST2_A1),
1298 (test2_1 + 0x10000) & 0x00ffffff, RK_TEST2_A1_TEST2_BASE);
1299 }
1300
1301 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A2),
1302 test2_2 & 0x00ffffff, TEST2_A2_TEST2_OFF);
1303
1304 // 2 & 3
1305 // (TESTXTALKPAT, TESTAUDPAT) = 00 (ISI), 01 (AUD), 10 (XTALK), 11 (UNKNOWN)
1306 DramcEngine2SetPat(p, u1TestPat, u1LoopCnt, u1Len1Flag, u1EnableUiShift);
1307
1308 return DRAM_OK;
1309 }
1310
1311
DramcEngine2Run(DRAMC_CTX_T * p,DRAM_TE_OP_T wr,U8 u1TestPat)1312 U32 DramcEngine2Run(DRAMC_CTX_T *p, DRAM_TE_OP_T wr, U8 u1TestPat)
1313 {
1314 U32 u4result = 0xffffffff;
1315
1316 // 4
1317 if (wr == TE_OP_READ_CHECK)
1318 {
1319 if ((u1TestPat == 1) || (u1TestPat == 2))
1320 {
1321 //if audio pattern, enable read only (disable write after read), AUDMODE=0x48[15]=0
1322 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4), 0, TEST2_A4_TESTAUDMODE);
1323 }
1324
1325 // enable read, 0x008[31:29]
1326 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1327 P_Fld(0, TEST2_A3_TEST2W) |
1328 P_Fld(1, TEST2_A3_TEST2R) |
1329 P_Fld(0, TEST2_A3_TEST1));
1330 }
1331 else if (wr == TE_OP_WRITE_READ_CHECK)
1332 {
1333 // enable write
1334 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1335 P_Fld(1, TEST2_A3_TEST2W) |
1336 P_Fld(0, TEST2_A3_TEST2R) |
1337 P_Fld(0, TEST2_A3_TEST1));
1338 }
1339 DramcEngine2Compare(p, wr);
1340
1341 // delay 10ns after ready check from DE suggestion (1ms here)
1342 mcDELAY_US(1);
1343
1344 // 6
1345 // return CMP_ERR, 0 is ok ,others are fail,diable test2w or test2r
1346 // get result
1347 // or all result
1348 u4result = (u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_CMP_ERR)));
1349
1350 // disable read
1351 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3),
1352 P_Fld(0, TEST2_A3_TEST2W) |
1353 P_Fld(0, TEST2_A3_TEST2R) |
1354 P_Fld(0, TEST2_A3_TEST1));
1355
1356 return u4result;
1357 }
1358
DramcEngine2End(DRAMC_CTX_T * p)1359 void DramcEngine2End(DRAMC_CTX_T *p)
1360 {
1361 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
1362 P_Fld(0, TEST2_A4_TEST_REQ_LEN1)); //test agent 2 with cmd length = 0
1363 DramcSetRWOFOEN(p, 1);
1364 vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_DUMMY_RD), uiReg0D0h);
1365 }
1366
1367 U8 u1MaType = 0x2; // for DV sim
TA2_Test_Run_Time_HW_Set_Column_Num(DRAMC_CTX_T * p)1368 void TA2_Test_Run_Time_HW_Set_Column_Num(DRAMC_CTX_T * p)
1369 {
1370 U8 u1ChannelIdx = 0;
1371 U8 u1EmiChIdx = 0;
1372 U32 u4matypeR0 = 0, u4matypeR1 = 0;
1373 U32 u4matype = 0;
1374 U32 u4EmiOffset = 0;
1375 DRAM_CHANNEL_T eOriChannel = p->channel;
1376
1377 for (u1ChannelIdx = 0; u1ChannelIdx < p->support_channel_num; u1ChannelIdx++)
1378 {
1379 vSetPHY2ChannelMapping(p, u1ChannelIdx);
1380
1381 u4EmiOffset = 0;
1382 u1EmiChIdx = u1ChannelIdx;
1383 #if (CHANNEL_NUM > 2)
1384 if (u1ChannelIdx >= CHANNEL_C)
1385 {
1386 u4EmiOffset = 0x4000; // 0x1021D000 for CH2/3
1387 u1EmiChIdx = u1ChannelIdx-2;
1388 }
1389 #endif
1390
1391 u4matype = u4IO32Read4B(EMI_APB_BASE + u4EmiOffset);
1392 u4matypeR0 = ((u4matype >> (4 + u1EmiChIdx * 16)) & 0x3) + 1; //refer to init_ta2_single_channel()
1393 u4matypeR1 = ((u4matype >> (6 + u1EmiChIdx * 16)) & 0x3) + 1; //refer to init_ta2_single_channel()
1394
1395 if(p->support_rank_num==RANK_SINGLE)
1396 {
1397 u4matype = u4matypeR0;
1398 }
1399 else
1400 {
1401 u4matype = (u4matypeR0 > u4matypeR1) ? u4matypeR1 : u4matypeR0; //get min value
1402 }
1403 //vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MATYPE), u4matype, MATYPE_MATYPE);
1404 }
1405 vSetPHY2ChannelMapping(p, eOriChannel);
1406 u1MaType = u4matype;
1407
1408 return;
1409 }
1410
1411 /* ----------------------------------------------------------------------
1412 * LP4 RG Address
1413 * bit: 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1 9 8 7 6 5 4 3 2 1 0
1414 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
1415 * ---------------------------------------------------------------
1416 * RG: - - R R R R R R R R R R R R R R R R R R|B B B|C C C C C C - - -
1417 * 2_BASE 1 1 1 1 1 1 1 1 9 8 7 6 5 4 3 2 1 0|A A A|9 8 7 6 5 4
1418 * 7 6 5 4 3 2 1 0 |2 1 0|
1419 * AXI ---------------------------------------------------------------
1420 * Addr: R R R R R R R R R R R R R R R R|B B B|C C C|C|C C C C C C C -
1421 * 1 1 1 1 1 1 9 8 7 6 5 4 3 2 1 0|A A A|9 8 7|H|6 5 4 3 2 1 0
1422 * 5 4 3 2 1 0 |2 1 0| | |
1423 * ----------------------------------------------------------------------
1424 */
1425 #define TA2_RANK0_ADDRESS (0x40200000)
1426 #define AXI_CHAN_BIT_WIDTH 1//2: 4_channel 1: 2_channel
1427 #define OFFSET_OF_RG_BASE_AND_AXI 2
1428 #define LEN1_INTRINSIC_OFFSET 2
1429 #define TRANSFER_DRAM_ADDR_BY_EMI_API 1 //1: by emi API 0: by above table
TA2_Test_Run_Time_HW_Presetting(DRAMC_CTX_T * p,U32 len,TA2_RKSEL_TYPE_T rksel_mode)1430 void TA2_Test_Run_Time_HW_Presetting(DRAMC_CTX_T * p, U32 len, TA2_RKSEL_TYPE_T rksel_mode)/* Should call after TA2_Test_Run_Time_Pat_Setting() */
1431 {
1432 DRAM_CHANNEL_T eOriChannel = p->channel;
1433 DRAM_RANK_T eOriRank = p->rank;
1434 U32 u4BaseR0, u4BaseR1, u4Offset, u4Addr;
1435 U32 u4matypeR0, u4matypeR1, u4LEN1;
1436 U8 u1ChannelIdx, uiRWOFOEN, u1RkIdx;
1437
1438 u4Addr = TA2_RANK0_ADDRESS & 0x1fffffff;
1439 if (u1IsLP4Family(p->dram_type))
1440 {
1441 #if TRANSFER_DRAM_ADDR_BY_EMI_API
1442 {
1443 dram_addr_t dram_addr = {0};
1444 unsigned long long ull_axi_addr = TA2_RANK0_ADDRESS;
1445 phy_addr_to_dram_addr(&dram_addr, ull_axi_addr); //Make sure row. bank. column are correct
1446 u4BaseR0 = ((dram_addr.row << 12) | (dram_addr.bk << 9) | (dram_addr.col >> 1)) >> 3;// >>1: RG C4 @3th bit >>3: RG start with bit 3
1447 }
1448 #else
1449 // >>AXI_CHAN_BIT_WIDTH: drop bit8; >>OFFSET_OF_RG_BASE_AND_AXI: align with RG row; >>3: RG start with bit 3
1450 u4BaseR0 = (((u4Addr & ~0x1ff) >> AXI_CHAN_BIT_WIDTH) | (u4Addr & 0xff)) >> (OFFSET_OF_RG_BASE_AND_AXI + 3);
1451 #endif
1452 u4Offset = len >> (AXI_CHAN_BIT_WIDTH + 5);//5:0x20 bytes(256 bits) address coverage per pattern(128 bits data + 128 bits bubble); offset should bigger than 0xFF
1453 }
1454 else
1455 {
1456 u4BaseR0 = u4Addr >> 4;
1457 if (rksel_mode == TA2_RKSEL_XRT)
1458 {
1459 u4Offset = len >> 4;//16B per pattern
1460 }
1461 else
1462 {
1463 u4Offset = (len >> 4) >> 1;//16B per pattern //len should be >>2 or test engine will time out
1464 }
1465 }
1466 u4BaseR1 = u4BaseR0;
1467
1468 u4matypeR0 = ((u4IO32Read4B(EMI_APB_BASE) >> 4) & 0x3) + 1;
1469 u4matypeR1 = ((u4IO32Read4B(EMI_APB_BASE) >> 6) & 0x3) + 1;
1470 if (u4matypeR0 != u4matypeR1)//R0 R1 mix mode
1471 {
1472 (u4matypeR0 > u4matypeR1)? (u4BaseR0 >>= 1): (u4BaseR1 >>= 1);//set the correct start address, refer to mapping table
1473 u4Offset >>= 1;//set min value
1474 }
1475
1476 u4Offset = (u4Offset == 0) ? 1 : u4Offset; //halt if u4Offset = 0
1477
1478 u4LEN1 = u4IO32ReadFldAlign(DRAMC_REG_TEST2_A4, TEST2_A4_TEST_REQ_LEN1);
1479 if(u4LEN1)
1480 {
1481 u4Offset = u4Offset - LEN1_INTRINSIC_OFFSET;
1482 }
1483
1484 #if ENABLE_EMI_LPBK_TEST && EMI_USE_TA2
1485 if (gEmiLpbkTest)
1486 {
1487 u4matypeR0 = 2;
1488 u4matypeR1 = 2;
1489 u4Offset = 3;
1490 //u4Offset = 6;//3; //6: for emilpbk_dq_dvs_leadlag_toggle_ringcnt
1491 }
1492 #endif
1493
1494 if (TA2_RKSEL_XRT == rksel_mode)
1495 {
1496 // In order to enhance XRT R2R/W2W probability, use TEST2_4_TESTAGENTRKSEL=3, PERFCTL0_RWOFOEN=0 mode
1497 uiRWOFOEN = 0;
1498 msg("=== TA2 XRT R2R/W2W\n");
1499 }
1500 else
1501 {
1502 uiRWOFOEN = 1;
1503 #if !ENABLE_EMI_LPBK_TEST
1504 msg("=== TA2 HW\n");
1505 #endif
1506 }
1507 #if !ENABLE_EMI_LPBK_TEST
1508 msg("=== OFFSET:0x%x\n", u4Offset);
1509 #endif
1510 for (u1ChannelIdx = 0; u1ChannelIdx < p->support_channel_num; u1ChannelIdx++)
1511 {
1512 p->channel = (DRAM_CHANNEL_T)u1ChannelIdx;
1513
1514 for(u1RkIdx = 0; u1RkIdx < p->support_rank_num; u1RkIdx++)
1515 {
1516 p->rank = (DRAM_RANK_T)u1RkIdx;
1517 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RK_TEST2_A1), u4BaseR0, RK_TEST2_A1_TEST2_BASE);//fill based on RG table for Rank 0
1518 }
1519 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A2), u4Offset, TEST2_A2_TEST2_OFF);//128 bits data length per offest
1520 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4), rksel_mode, TEST2_A4_TESTAGENTRKSEL);
1521 DramcSetRWOFOEN(p, uiRWOFOEN);
1522 }
1523 p->channel = eOriChannel;
1524 p->rank = eOriRank;
1525 //TA2_Test_Run_Time_HW_Set_Column_Num(p);
1526
1527 return;
1528 }
1529 #if ETT_MINI_STRESS_USE_TA2_LOOP_MODE
1530 #define TA2_PAT TEST_WORST_SI_PATTERN
1531 #else
1532 #define TA2_PAT TEST_XTALK_PATTERN
1533 #endif
TA2_Test_Run_Time_Pat_Setting(DRAMC_CTX_T * p,U8 PatSwitch)1534 void TA2_Test_Run_Time_Pat_Setting(DRAMC_CTX_T *p, U8 PatSwitch)
1535 {
1536 static U8 u1Pat = TA2_PAT, u1loop = 1;
1537 U8 u1ChannelIdx = 0;
1538 DRAM_CHANNEL_T eOriChannel = p->channel;
1539
1540 if (u1loop || PatSwitch == TA2_PAT_SWITCH_ON)
1541 {
1542 #if !ENABLE_EMI_LPBK_TEST
1543 msg("TA2 PAT: %d\n", u1Pat);
1544 #endif
1545 for (u1ChannelIdx = CHANNEL_A; u1ChannelIdx < p->support_channel_num; u1ChannelIdx++)
1546 {
1547 p->channel = (DRAM_CHANNEL_T)u1ChannelIdx;
1548 DramcEngine2SetPat(p, u1Pat, p->support_rank_num - 1, 0, TE_NO_UI_SHIFT);
1549 }
1550 p->channel = eOriChannel;
1551
1552 #if !ETT_MINI_STRESS_USE_TA2_LOOP_MODE
1553 {
1554 U32 u4Value = 0;
1555 u4Value = (u1Pat == TEST_WORST_SI_PATTERN) ? 1 : 0; //Worst SI pattern + loop mode + LEN1
1556 vIO32WriteFldAlign_All(DRAMC_REG_TEST2_A0, u4Value, TEST2_A0_TA2_LOOP_EN);
1557 vIO32WriteFldAlign_All(DRAMC_REG_TEST2_A0, u4Value, TEST2_A0_LOOP_NV_END);
1558 vIO32WriteFldAlign_All(DRAMC_REG_TEST2_A0, u4Value, TEST2_A0_ERR_BREAK_EN);
1559 vIO32WriteFldAlign_All(DRAMC_REG_TEST2_A4, u4Value, TEST2_A4_TEST_REQ_LEN1);
1560 }
1561 #endif
1562 if (PatSwitch)
1563 u1Pat = (u1Pat + 1) % 4;
1564 else
1565 u1loop = 0;
1566 }
1567 return;
1568 }
1569
TA2_Test_Run_Time_HW_Write(DRAMC_CTX_T * p,U8 u1Enable)1570 void TA2_Test_Run_Time_HW_Write(DRAMC_CTX_T * p, U8 u1Enable)
1571 {
1572 DRAM_CHANNEL_T eOriChannel = p->channel;
1573 U8 u1ChannelIdx;
1574
1575 #if !ENABLE_EMI_LPBK_TEST
1576 msg("\nTA2 Trigger Write\n");
1577 #endif
1578 for (u1ChannelIdx = 0; u1ChannelIdx < p->support_channel_num; u1ChannelIdx++)
1579 {
1580 p->channel = (DRAM_CHANNEL_T)u1ChannelIdx;
1581 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3), u1Enable, TEST2_A3_TEST2W);
1582 }
1583 p->channel = eOriChannel;
1584 return;
1585 }
1586
TA2_Show_Cnt(DRAMC_CTX_T * p,U32 u4ErrorValue)1587 static void TA2_Show_Cnt(DRAMC_CTX_T * p, U32 u4ErrorValue)
1588 {
1589 static U32 err_count = 0;
1590 static U32 pass_count = 0;
1591 U8 u1RankIdx = 0;
1592 for (u1RankIdx = 0; u1RankIdx < p->support_rank_num; u1RankIdx++)
1593 {
1594 if (u4ErrorValue & (1 << u1RankIdx))
1595 {
1596 err_count++;
1597 msg("HW channel(%d) Rank(%d), TA2 failed, pass_cnt:%d, err_cnt:%d\n", p->channel, u1RankIdx, pass_count, err_count);
1598 }
1599 else
1600 {
1601 pass_count++;
1602 msg("HW channel(%d) Rank(%d), TA2 pass, pass_cnt:%d, err_cnt:%d\n", p->channel, u1RankIdx, pass_count, err_count);
1603 }
1604 }
1605 }
1606 #if ETT_MINI_STRESS_USE_TA2_LOOP_MODE
TA2_Test_Run_Time_HW_Status(DRAMC_CTX_T * p)1607 U32 TA2_Test_Run_Time_HW_Status(DRAMC_CTX_T * p)
1608 {
1609 U8 u1ChannelIdx = 0;
1610 U32 u4ErrorValue = 0;
1611 U32 bit_error = 0;
1612 DRAM_CHANNEL_T eOriChannel = p->channel;
1613 for (u1ChannelIdx = 0; u1ChannelIdx < p->support_channel_num; u1ChannelIdx++)
1614 {
1615 vSetPHY2ChannelMapping(p, u1ChannelIdx);
1616 u4ErrorValue = DramcEngine2Compare(p, TE_OP_WRITE_READ_CHECK);
1617 if (u4ErrorValue & 0x3) //RK0 or RK1 test fail
1618 {
1619 msg("=== HW channel(%d) u4ErrorValue: 0x%x, bit error: 0x%x\n", u1ChannelIdx, u4ErrorValue, u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_CMP_ERR)));
1620 #if defined(SLT)
1621 err("[dramc] DRAM_FATAL_ERR_FLAG = 0x80000000, line: %d\n",__LINE__);
1622 while (1);
1623 #endif
1624 }
1625 TA2_Show_Cnt(p, u4ErrorValue);
1626 bit_error |= u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_CMP_ERR));
1627 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3), P_Fld(0, TEST2_A3_TEST2W) | P_Fld(0, TEST2_A3_TEST2R) | P_Fld(0, TEST2_A3_TEST1));
1628 }
1629 vSetPHY2ChannelMapping(p, eOriChannel);
1630 return bit_error;
1631 }
1632 #else
TA2_Test_Run_Time_HW_Status(DRAMC_CTX_T * p)1633 U32 TA2_Test_Run_Time_HW_Status(DRAMC_CTX_T * p)
1634 {
1635 U8 u1ChannelIdx = 0;
1636 U32 u4ErrorValue = 0;
1637 U32 u4Ta2LoopEn = 0;
1638 U32 u4loopcount = 0;
1639 U8 u1status = 0;
1640 U32 bit_error = 0;
1641 static U32 err_count = 0;
1642 static U32 pass_count = 0;
1643 DRAM_CHANNEL_T eOriChannel = p->channel;
1644
1645 for (u1ChannelIdx = 0; u1ChannelIdx < p->support_channel_num; u1ChannelIdx++)
1646 {
1647 vSetPHY2ChannelMapping(p, u1ChannelIdx);
1648 u4Ta2LoopEn = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A0), TEST2_A0_TA2_LOOP_EN);
1649 //msg("### u4Ta2LoopEn:%d ### \n", u4Ta2LoopEn);
1650
1651 if(u4Ta2LoopEn)
1652 {
1653 u4loopcount = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3), TEST2_A3_TESTCNT);
1654 if (u4loopcount == 1)
1655 u1status = 3; //RK0/1
1656
1657 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A0), 0, TEST2_A0_LOOP_NV_END);//cancel NV_END
1658 DramcEngine2CheckComplete(p, u1status);//Wait for complete
1659 //msg("TESTRPT_TESTSTAT:%x\n", u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TESTRPT), TESTRPT_TESTSTAT));//check TESTRPT_TESTSTAT
1660 u4ErrorValue = (u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_TESTRPT)) >> 4) & 0x3; //CMP_ERR_RK0/1
1661 }
1662 else
1663 u4ErrorValue = DramcEngine2Compare(p, TE_OP_WRITE_READ_CHECK);
1664
1665 if (u4ErrorValue & 0x3) //RK0 or RK1 test fail
1666 {
1667 msg("=== HW channel(%d) u4ErrorValue: 0x%x, bit error: 0x%x\n", u1ChannelIdx, u4ErrorValue, u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_CMP_ERR)));
1668 #if defined(SLT)
1669 err("[dramc] DRAM_FATAL_ERR_FLAG = 0x80000000, line: %d\n",__LINE__);
1670 while (1);
1671 #endif
1672 }
1673 TA2_Show_Cnt(p, u4ErrorValue);
1674
1675 bit_error |= u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_CMP_ERR));
1676 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3), P_Fld(0, TEST2_A3_TEST2W) | P_Fld(0, TEST2_A3_TEST2R) | P_Fld(0, TEST2_A3_TEST1));
1677 }
1678 vSetPHY2ChannelMapping(p, eOriChannel);
1679
1680 return bit_error;
1681 }
1682 #endif
1683
TA2_Test_Run_Time_HW(DRAMC_CTX_T * p)1684 void TA2_Test_Run_Time_HW(DRAMC_CTX_T * p)
1685 {
1686 DRAM_CHANNEL_T channel_bak = p->channel;
1687 DRAM_RANK_T rank_bak = p->rank;
1688
1689 TA2_Test_Run_Time_HW_Presetting(p, 0x10000, TA2_RKSEL_HW); //TEST2_2_TEST2_OFF = 0x400
1690 TA2_Test_Run_Time_Pat_Setting(p, TA2_PAT_SWITCH_OFF);
1691 TA2_Test_Run_Time_HW_Write(p, ENABLE);
1692 //mcDELAY_MS(1);
1693 TA2_Test_Run_Time_HW_Status(p);
1694
1695 p->channel = channel_bak;
1696 p->rank = rank_bak;
1697 return;
1698 }
1699
Temp_TA2_Test_After_K(DRAMC_CTX_T * p)1700 void Temp_TA2_Test_After_K(DRAMC_CTX_T * p)
1701 {
1702 DRAM_CHANNEL_T channel_bak = p->channel;
1703 DRAM_RANK_T rank_bak = p->rank;
1704
1705 do {
1706 TA2_Test_Run_Time_Pat_Setting(p, TA2_PAT_SWITCH_ON);
1707 TA2_Test_Run_Time_HW_Presetting(p, 0x200000, TA2_RKSEL_HW);
1708 TA2_Test_Run_Time_HW_Write(p, ENABLE);//TA2 trigger W
1709 TA2_Test_Run_Time_HW_Status(p);
1710 }while(1);
1711
1712 p->channel = channel_bak;
1713 p->rank = rank_bak;
1714 return;
1715 }
1716
DramcFetchGlobalMR(DRAMC_CTX_T * p,U8 mr_idx)1717 static U8 *DramcFetchGlobalMR(DRAMC_CTX_T *p, U8 mr_idx)
1718 {
1719 U8 *pMRGlobalValue = NULL;
1720
1721 switch (mr_idx)
1722 {
1723 case 13: pMRGlobalValue = &u1MR13Value[p->rank]; break;
1724 case 26: pMRGlobalValue = &u1MR26Value[p->rank]; break;
1725 case 30: pMRGlobalValue = &u1MR30Value[p->rank]; break;
1726 default:
1727 err("%s NULL\n", __func__);
1728 #if __ETT__
1729 while(1);
1730 #endif
1731 break;
1732 }
1733
1734 return pMRGlobalValue;
1735 }
1736
1737 #if MRW_BACKUP
DramcMRWriteBackup(DRAMC_CTX_T * p,U8 u1MRIdx,U8 u1Rank)1738 U8 DramcMRWriteBackup(DRAMC_CTX_T *p, U8 u1MRIdx, U8 u1Rank)
1739 {
1740 U8 u1Value=0xff;
1741 U8 u1Fsp;
1742 U8 u1MRBackup_ERR_Flag=0, u1backupRK=p->rank;
1743 U16 u2Offset=0x0;
1744 REG_TRANSFER_T TransferReg;
1745
1746 u1Fsp = FSP_0;
1747
1748 #if (__LP5_COMBO__ == TRUE)
1749 if (is_lp5_family(p))
1750 {
1751 switch(u1MRIdx)
1752 {
1753 case 1:
1754 case 2:
1755 case 3:
1756 case 10:
1757 case 11:
1758 case 12:
1759 case 14:
1760 case 15:
1761 case 17:
1762 case 18:
1763 case 19:
1764 case 20:
1765 case 24:
1766 case 30:
1767 case 41:
1768 u1Fsp = gFSPWR_Flag[u1Rank];
1769 break;
1770 }
1771 }
1772 else
1773 #endif
1774 {
1775 switch(u1MRIdx)
1776 {
1777 case 1:
1778 case 2:
1779 case 3:
1780 case 11:
1781 case 12:
1782 case 14:
1783 case 22:
1784 u1Fsp = gFSPWR_Flag[u1Rank];
1785 break;
1786 }
1787 }
1788 if (u1Fsp == FSP_0) /* All MR */
1789 {
1790 switch (u1MRIdx)
1791 {
1792 case 1:
1793 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_00_RK0_FSP0;
1794 TransferReg.u4Fld = MR_BACKUP_00_RK0_FSP0_MRWBK_RK0_FSP0_MR1;
1795 break;
1796 case 2:
1797 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_00_RK0_FSP0;
1798 TransferReg.u4Fld = MR_BACKUP_00_RK0_FSP0_MRWBK_RK0_FSP0_MR2;
1799 break;
1800 case 3:
1801 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_00_RK0_FSP0;
1802 TransferReg.u4Fld = MR_BACKUP_00_RK0_FSP0_MRWBK_RK0_FSP0_MR3;
1803 break;
1804 case 4:
1805 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_00_RK0_FSP0;
1806 TransferReg.u4Fld = MR_BACKUP_00_RK0_FSP0_MRWBK_RK0_FSP0_MR4;
1807 break;
1808 case 9:
1809 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_01_RK0_FSP0;
1810 TransferReg.u4Fld = MR_BACKUP_01_RK0_FSP0_MRWBK_RK0_FSP0_MR9;
1811 break;
1812 case 10:
1813 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_01_RK0_FSP0;
1814 TransferReg.u4Fld = MR_BACKUP_01_RK0_FSP0_MRWBK_RK0_FSP0_MR10;
1815 break;
1816 case 11:
1817 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_01_RK0_FSP0;
1818 TransferReg.u4Fld = MR_BACKUP_01_RK0_FSP0_MRWBK_RK0_FSP0_MR11;
1819 break;
1820 case 12:
1821 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_01_RK0_FSP0;
1822 TransferReg.u4Fld = MR_BACKUP_01_RK0_FSP0_MRWBK_RK0_FSP0_MR12;
1823 break;
1824 case 13:
1825 //MR13(LP4) work around, two RG is not synchronized
1826 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_02_RK0_FSP0;
1827 TransferReg.u4Fld = MR_BACKUP_02_RK0_FSP0_MRWBK_RK0_FSP0_MR13;
1828 break;
1829 case 14:
1830 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_02_RK0_FSP0;
1831 TransferReg.u4Fld = MR_BACKUP_02_RK0_FSP0_MRWBK_RK0_FSP0_MR14;
1832 break;
1833 case 15:
1834 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_02_RK0_FSP0;
1835 TransferReg.u4Fld = MR_BACKUP_02_RK0_FSP0_MRWBK_RK0_FSP0_MR15;
1836 break;
1837 case 16:
1838 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_02_RK0_FSP0;
1839 TransferReg.u4Fld = MR_BACKUP_02_RK0_FSP0_MRWBK_RK0_FSP0_MR16;
1840 break;
1841 case 17:
1842 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_03_RK0_FSP0;
1843 TransferReg.u4Fld = MR_BACKUP_03_RK0_FSP0_MRWBK_RK0_FSP0_MR17;
1844 break;
1845 case 18:
1846 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_03_RK0_FSP0;
1847 TransferReg.u4Fld = MR_BACKUP_03_RK0_FSP0_MRWBK_RK0_FSP0_MR18;
1848 break;
1849 case 19:
1850 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_03_RK0_FSP0;
1851 TransferReg.u4Fld = MR_BACKUP_03_RK0_FSP0_MRWBK_RK0_FSP0_MR19;
1852 break;
1853 case 20:
1854 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_03_RK0_FSP0;
1855 TransferReg.u4Fld = MR_BACKUP_03_RK0_FSP0_MRWBK_RK0_FSP0_MR20;
1856 break;
1857 case 21:
1858 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_04_RK0_FSP0;
1859 TransferReg.u4Fld = MR_BACKUP_04_RK0_FSP0_MRWBK_RK0_FSP0_MR21;
1860 break;
1861 case 22:
1862 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_04_RK0_FSP0;
1863 TransferReg.u4Fld = MR_BACKUP_04_RK0_FSP0_MRWBK_RK0_FSP0_MR22;
1864 break;
1865 case 23:
1866 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_04_RK0_FSP0;
1867 TransferReg.u4Fld = MR_BACKUP_04_RK0_FSP0_MRWBK_RK0_FSP0_MR23;
1868 break;
1869 case 24:
1870 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_04_RK0_FSP0;
1871 TransferReg.u4Fld = MR_BACKUP_04_RK0_FSP0_MRWBK_RK0_FSP0_MR24;
1872 break;
1873 case 25:
1874 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_05_RK0_FSP0;
1875 TransferReg.u4Fld = MR_BACKUP_05_RK0_FSP0_MRWBK_RK0_FSP0_MR25;
1876 break;
1877 case 26:
1878 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_05_RK0_FSP0;
1879 TransferReg.u4Fld = MR_BACKUP_05_RK0_FSP0_MRWBK_RK0_FSP0_MR26;
1880 break;
1881 case 27:
1882 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_05_RK0_FSP0;
1883 TransferReg.u4Fld = MR_BACKUP_05_RK0_FSP0_MRWBK_RK0_FSP0_MR27;
1884 break;
1885 case 28:
1886 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_05_RK0_FSP0;
1887 TransferReg.u4Fld = MR_BACKUP_05_RK0_FSP0_MRWBK_RK0_FSP0_MR28;
1888 break;
1889 case 30:
1890 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_06_RK0_FSP0;
1891 TransferReg.u4Fld = MR_BACKUP_06_RK0_FSP0_MRWBK_RK0_FSP0_MR30;
1892 break;
1893 case 31:
1894 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_06_RK0_FSP0;
1895 TransferReg.u4Fld = MR_BACKUP_06_RK0_FSP0_MRWBK_RK0_FSP0_MR31;
1896 break;
1897 case 32:
1898 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_06_RK0_FSP0;
1899 TransferReg.u4Fld = MR_BACKUP_06_RK0_FSP0_MRWBK_RK0_FSP0_MR32;
1900 break;
1901 case 33:
1902 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_06_RK0_FSP0;
1903 TransferReg.u4Fld = MR_BACKUP_06_RK0_FSP0_MRWBK_RK0_FSP0_MR33;
1904 break;
1905 case 34:
1906 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_07_RK0_FSP0;
1907 TransferReg.u4Fld = MR_BACKUP_07_RK0_FSP0_MRWBK_RK0_FSP0_MR34;
1908 break;
1909 case 37:
1910 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_07_RK0_FSP0;
1911 TransferReg.u4Fld = MR_BACKUP_07_RK0_FSP0_MRWBK_RK0_FSP0_MR37;
1912 break;
1913 case 39:
1914 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_07_RK0_FSP0;
1915 TransferReg.u4Fld = MR_BACKUP_07_RK0_FSP0_MRWBK_RK0_FSP0_MR39;
1916 break;
1917 case 40:
1918 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_07_RK0_FSP0;
1919 TransferReg.u4Fld = MR_BACKUP_07_RK0_FSP0_MRWBK_RK0_FSP0_MR40;
1920 break;
1921 case 41:
1922 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_08_RK0_FSP0;
1923 TransferReg.u4Fld = MR_BACKUP_08_RK0_FSP0_MRWBK_RK0_FSP0_MR41;
1924 break;
1925 case 42:
1926 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_08_RK0_FSP0;
1927 TransferReg.u4Fld = MR_BACKUP_08_RK0_FSP0_MRWBK_RK0_FSP0_MR42;
1928 break;
1929 case 46:
1930 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_08_RK0_FSP0;
1931 TransferReg.u4Fld = MR_BACKUP_08_RK0_FSP0_MRWBK_RK0_FSP0_MR46;
1932 break;
1933 case 48:
1934 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_08_RK0_FSP0;
1935 TransferReg.u4Fld = MR_BACKUP_08_RK0_FSP0_MRWBK_RK0_FSP0_MR48;
1936 break;
1937 case 51:
1938 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_09_RK0_FSP0;
1939 TransferReg.u4Fld = MR_BACKUP_09_RK0_FSP0_MRWBK_RK0_FSP0_MR51;
1940 break;
1941 case 63:
1942 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_09_RK0_FSP0;
1943 TransferReg.u4Fld = MR_BACKUP_09_RK0_FSP0_MRWBK_RK0_FSP0_MR63;
1944 break;
1945 }
1946 }
1947 else if (u1MRIdx == 21 || u1MRIdx == 22) /* MR only in FSP0/FSP1 */
1948 {
1949 if (u1MRIdx == 21)
1950 {
1951 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_04_RK0_FSP1;
1952 TransferReg.u4Fld = MR_BACKUP_04_RK0_FSP1_MRWBK_RK0_FSP1_MR21;
1953 }
1954 else
1955 {
1956 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_03_RK0_FSP1;
1957 TransferReg.u4Fld = MR_BACKUP_03_RK0_FSP1_MRWBK_RK0_FSP1_MR22;
1958 }
1959 }
1960 else /* MR in FSP0/FSP1/FSP2 */
1961 {
1962 if (u1MRIdx <= 20)
1963 {
1964 if (u1MRIdx <= 10)
1965 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_00_RK0_FSP1;
1966 else if (u1MRIdx <= 15)
1967 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_01_RK0_FSP1;
1968 else
1969 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_03_RK0_FSP1;
1970 TransferReg.u4Addr += ((u1Fsp - FSP_1) * 0x30);
1971 if (u1MRIdx == 1 || u1MRIdx == 11 || u1MRIdx == 17)
1972 TransferReg.u4Fld = MR_BACKUP_00_RK0_FSP1_MRWBK_RK0_FSP1_MR1;
1973 else if (u1MRIdx == 2 || u1MRIdx == 12 || u1MRIdx == 18)
1974 TransferReg.u4Fld = MR_BACKUP_00_RK0_FSP1_MRWBK_RK0_FSP1_MR2;
1975 else if (u1MRIdx == 3 || u1MRIdx == 14 || u1MRIdx == 19)
1976 TransferReg.u4Fld = MR_BACKUP_00_RK0_FSP1_MRWBK_RK0_FSP1_MR3;
1977 else
1978 TransferReg.u4Fld = MR_BACKUP_00_RK0_FSP1_MRWBK_RK0_FSP1_MR10;
1979 }
1980 else if (u1Fsp == FSP_2 && u1MRIdx == 24)
1981 {
1982 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_03_RK0_FSP2;
1983 TransferReg.u4Fld = MR_BACKUP_03_RK0_FSP2_MRWBK_RK0_FSP2_MR24;
1984 }
1985 else if (u1Fsp == FSP_1 && u1MRIdx == 41)
1986 {
1987 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_03_RK0_FSP1;
1988 TransferReg.u4Fld = MR_BACKUP_03_RK0_FSP1_MRWBK_RK0_FSP1_MR41;
1989 }
1990 else
1991 {
1992 TransferReg.u4Addr = DRAMC_REG_MR_BACKUP_03_RK0_FSP1 + ((u1Fsp - FSP_1) * 0x30);
1993 if ((u1Fsp == FSP_1 && u1MRIdx == 24) || (u1Fsp == FSP_2 && u1MRIdx == 30))
1994 {
1995 TransferReg.u4Fld = MR_BACKUP_03_RK0_FSP1_MRWBK_RK0_FSP1_MR24;
1996 }
1997 else
1998 {
1999 TransferReg.u4Fld = MR_BACKUP_03_RK0_FSP1_MRWBK_RK0_FSP1_MR30;
2000 }
2001 }
2002 }
2003 vSetRank(p, u1Rank);
2004 if (u1MRBackup_ERR_Flag==0)
2005 {
2006 u1Value=u4IO32ReadFldAlign(DRAMC_REG_ADDR(TransferReg.u4Addr), TransferReg.u4Fld);
2007 mcSHOW_MRW_MSG(" [MRW Backup] Rank%d FSP%d MR%d=0x%x\n",u1Rank, gFSPWR_Flag[u1Rank], u1MRIdx, u1Value);
2008 }
2009 vSetRank(p, u1backupRK);
2010
2011 return u1Value;
2012
2013 }
2014 #endif
2015
DramcMRWriteFldMsk(DRAMC_CTX_T * p,U8 mr_idx,U8 listVal8,U8 msk8,U8 UpdateMode)2016 void DramcMRWriteFldMsk(DRAMC_CTX_T *p, U8 mr_idx, U8 listVal8, U8 msk8, U8 UpdateMode)
2017 {
2018 U8 *pMRGlobalValue = DramcFetchGlobalMR(p, mr_idx);
2019
2020 // ASSERT (pMRGlobalValue != NULL)
2021
2022 *pMRGlobalValue = ((*pMRGlobalValue & ~msk8) | listVal8);
2023
2024 if (UpdateMode == TO_MR)
2025 DramcModeRegWriteByRank(p, p->rank, mr_idx, *pMRGlobalValue);
2026 }
2027
DramcMRWriteFldAlign(DRAMC_CTX_T * p,U8 mr_idx,U8 value,U32 mr_fld,U8 UpdateMode)2028 void DramcMRWriteFldAlign(DRAMC_CTX_T *p, U8 mr_idx, U8 value, U32 mr_fld, U8 UpdateMode)
2029 {
2030 U8 *pMRGlobalValue = DramcFetchGlobalMR(p, mr_idx);
2031
2032 // ASSERT (pMRGlobalValue != NULL)
2033
2034 *pMRGlobalValue &= ~(Fld2Msk32(mr_fld));
2035 *pMRGlobalValue |= (value << Fld_shft(mr_fld));
2036
2037 if (UpdateMode == TO_MR)
2038 DramcModeRegWriteByRank(p, p->rank, mr_idx, *pMRGlobalValue);
2039 }
2040
DramcModeRegRead(DRAMC_CTX_T * p,U8 u1MRIdx,U16 * u2pValue)2041 void DramcModeRegRead(DRAMC_CTX_T *p, U8 u1MRIdx, U16 *u2pValue)
2042 {
2043 U32 u4MRValue;
2044 #ifdef DUMP_INIT_RG_LOG_TO_DE
2045 gDUMP_INIT_RG_LOG_TO_DE_RG_log_flag=0;
2046 #endif
2047 //vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), p->rank, SWCMD_CTRL0_MRRRK);
2048 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u1MRIdx, SWCMD_CTRL0_MRSMA);
2049
2050 // MRR command will be fired when MRREN 0->1
2051 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 1, SWCMD_EN_MRREN);
2052
2053 // wait MRR command fired.
2054 while (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP), SPCMDRESP_MRR_RESPONSE) == 0)
2055 {
2056 mcDELAY_US(1);
2057 }
2058
2059 // Since LP3 does not support CG condition, LP3 can not use MRR_STATUS_MRR_SW_REG to do sw mrr.
2060 // After fix HW CG condition, LP3 will use MRR_STATUS_MRR_SW_REG to do sw mrr.
2061 U32 u4MRRReg;
2062 if (u1IsLP4Family(p->dram_type))
2063 u4MRRReg = MRR_STATUS_MRR_SW_REG;
2064 else
2065 u4MRRReg = MRR_STATUS_MRR_REG;
2066
2067 // Read out mode register value
2068 u4MRValue = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MRR_STATUS), u4MRRReg);
2069 *u2pValue = (U16)u4MRValue;
2070
2071 // Set MRREN =0 for next time MRR.
2072 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_MRREN);
2073
2074 #ifdef DUMP_INIT_RG_LOG_TO_DE
2075 gDUMP_INIT_RG_LOG_TO_DE_RG_log_flag=1;
2076 #endif
2077
2078 msg3("Read MR%d =0x%x\n", u1MRIdx, u4MRValue);
2079 }
2080
2081
DramcModeRegReadByRank(DRAMC_CTX_T * p,U8 u1Rank,U8 u1MRIdx,U16 * u2pValue)2082 void DramcModeRegReadByRank(DRAMC_CTX_T *p, U8 u1Rank, U8 u1MRIdx, U16 *u2pValue)
2083 {
2084 U16 u2Value = 0;
2085 U8 u1RankBak;
2086
2087 /* Since, TMRRI design changed (2 kinds of modes depending on value of R_DMRK_SCINPUT_OPT)
2088 * DE: Jouling, Berson
2089 * To specify SW_MRR rank -> new mode(scinput_opt == 0): MRSRK
2090 * old mode(scinput_opt == 1): MRRRK
2091 * Note: MPCRK is not used by SW to control rank anymore
2092 */
2093 //Backup & set rank
2094 u1RankBak = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), SWCMD_CTRL0_MRSRK); //backup rank
2095 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u1Rank, SWCMD_CTRL0_MRSRK); //set rank
2096
2097 //Mode reg read
2098 DramcModeRegRead(p, u1MRIdx, &u2Value);
2099 *u2pValue = u2Value;
2100
2101 //Restore rank
2102 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u1RankBak, SWCMD_CTRL0_MRSRK);
2103
2104 }
2105
DramcModeRegWriteByRank_RTMRW(DRAMC_CTX_T * p,U8 * u1Rank,U8 * u1MRIdx,U8 * u1Value,U8 u1Len)2106 void DramcModeRegWriteByRank_RTMRW(DRAMC_CTX_T *p, U8 *u1Rank, U8 *u1MRIdx, U8 *u1Value, U8 u1Len)
2107 {
2108 U32 u4Response, u4TimeCnt, ii;
2109 U8 u1MRRK[6] = {0}, u1MRMA[6] = {0}, u1MROP[6] = {0};
2110
2111 if (u1Len > 6 || u1Len == 0)
2112 return;
2113
2114 for (ii = 0;ii < u1Len;ii++)
2115 {
2116 u1MRRK[ii] = u1Rank[ii];
2117 u1MRMA[ii] = u1MRIdx[ii];
2118 u1MROP[ii] = u1Value[ii];
2119 }
2120
2121 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL0),
2122 P_Fld(3, RTMRW_CTRL0_RTMRW_LAT) |
2123 P_Fld(0x20, RTMRW_CTRL0_RTMRW_AGE) |
2124 P_Fld(u1Len - 1, RTMRW_CTRL0_RTMRW_LEN) |
2125 P_Fld(u1MRRK[0], RTMRW_CTRL0_RTMRW0_RK) |
2126 P_Fld(u1MRRK[1], RTMRW_CTRL0_RTMRW1_RK) |
2127 P_Fld(u1MRRK[2], RTMRW_CTRL0_RTMRW2_RK) |
2128 P_Fld(u1MRRK[3], RTMRW_CTRL0_RTMRW3_RK) |
2129 P_Fld(u1MRRK[4], RTMRW_CTRL0_RTMRW4_RK) |
2130 P_Fld(u1MRRK[5], RTMRW_CTRL0_RTMRW5_RK));
2131 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL1),
2132 P_Fld(u1MRMA[0], RTMRW_CTRL1_RTMRW0_MA) |
2133 P_Fld(u1MRMA[1], RTMRW_CTRL1_RTMRW1_MA) |
2134 P_Fld(u1MRMA[2], RTMRW_CTRL1_RTMRW2_MA) |
2135 P_Fld(u1MRMA[3], RTMRW_CTRL1_RTMRW3_MA));
2136 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL2),
2137 P_Fld(u1MROP[0], RTMRW_CTRL2_RTMRW0_OP) |
2138 P_Fld(u1MROP[1], RTMRW_CTRL2_RTMRW1_OP) |
2139 P_Fld(u1MROP[2], RTMRW_CTRL2_RTMRW2_OP) |
2140 P_Fld(u1MROP[3], RTMRW_CTRL2_RTMRW3_OP));
2141 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL3),
2142 P_Fld(u1MRMA[4], RTMRW_CTRL3_RTMRW4_MA) |
2143 P_Fld(u1MRMA[5], RTMRW_CTRL3_RTMRW5_MA) |
2144 P_Fld(u1MROP[4], RTMRW_CTRL3_RTMRW4_OP) |
2145 P_Fld(u1MROP[5], RTMRW_CTRL3_RTMRW5_OP));
2146 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MPC_CTRL),
2147 1, MPC_CTRL_RTMRW_HPRI_EN);
2148 mcDELAY_US(5);
2149 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN),
2150 1, SWCMD_EN_RTMRWEN);
2151
2152 #if __LP5_COMBO__
2153 #if WORKAROUND_LP5_HEFF
2154 if (is_heff_mode(p))
2155 {
2156 mcDELAY_US(1);
2157 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL),
2158 1, CKECTRL_CKE2RANK_OPT6);
2159 }
2160 #endif
2161 #endif
2162
2163 u4TimeCnt = TIME_OUT_CNT;
2164
2165 do {
2166 u4Response = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP),
2167 SPCMDRESP_RTMRW_RESPONSE);
2168 u4TimeCnt--;
2169 mcDELAY_US(5);
2170 } while ((u4Response == 0) && (u4TimeCnt > 0));
2171
2172 if (u4TimeCnt == 0)//time out
2173 {
2174 msg("[LP5 RT MRW ] Resp fail (time out) Rank=%d, MR%d=0x%x\n", u1Rank[0], u1MRIdx[0], u1Value[0]);
2175 }
2176
2177 #if __LP5_COMBO__
2178 #if WORKAROUND_LP5_HEFF
2179 if (is_heff_mode(p))
2180 {
2181 mcDELAY_US(1);
2182 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL),
2183 0, CKECTRL_CKE2RANK_OPT6);
2184 }
2185 #endif
2186 #endif
2187
2188 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN),
2189 0, SWCMD_EN_RTMRWEN);
2190 }
2191
DramcModeRegWriteByRank_SCSM(DRAMC_CTX_T * p,U8 u1Rank,U8 u1MRIdx,U8 u1Value)2192 static void DramcModeRegWriteByRank_SCSM(DRAMC_CTX_T *p, U8 u1Rank, U8 u1MRIdx, U8 u1Value)
2193 {
2194 U32 counter = 0;
2195 U32 u4RabnkBackup;
2196 U32 u4register_024;
2197
2198 // backup rank
2199 u4RabnkBackup = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), SWCMD_CTRL0_MRSRK);
2200
2201 //backup register of CKE fix on/off
2202 u4register_024 = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL));
2203
2204 // set rank
2205 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u1Rank, SWCMD_CTRL0_MRSRK);
2206
2207 //CKE must be fix on when doing MRW
2208 CKEFixOnOff(p, u1Rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
2209
2210 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u1MRIdx, SWCMD_CTRL0_MRSMA);
2211 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u1Value, SWCMD_CTRL0_MRSOP);
2212
2213 // MRW command will be fired when MRWEN 0->1
2214 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 1, SWCMD_EN_MRWEN);
2215
2216 // wait MRW command fired.
2217 while (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP), SPCMDRESP_MRW_RESPONSE) == 0)
2218 {
2219 counter++;
2220 msg2("wait MRW command Rank%d MR%d =0x%x fired (%d)\n", u1Rank, u1MRIdx, u1Value, counter);
2221 mcDELAY_US(1);
2222 }
2223
2224 // Set MRWEN =0 for next time MRW.
2225 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_MRWEN);
2226
2227 // restore CKEFIXON value
2228 vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL), u4register_024);
2229
2230 // restore rank
2231 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u4RabnkBackup, SWCMD_CTRL0_MRSRK);
2232 }
2233
DramcModeRegWriteByRank(DRAMC_CTX_T * p,U8 u1Rank,U8 u1MRIdx,U8 u1Value)2234 void DramcModeRegWriteByRank(DRAMC_CTX_T *p, U8 u1Rank, U8 u1MRIdx, U8 u1Value)
2235 {
2236 msg2("MRW RK%d MR#%d = 0x%x\n", u1Rank,u1MRIdx, u1Value);
2237
2238 #ifndef ENABLE_POST_PACKAGE_REPAIR
2239 if ((u1MRIdx == 0x04) && (u1Value & 0x10))
2240 {
2241 err("ERROR MRW RK%d MR#%d = 0x%x\n", u1Rank,u1MRIdx, u1Value);
2242 while(1);
2243 }
2244 #endif
2245 #if (fcFOR_CHIP_ID == fcA60868)
2246 // RTMRW & RTSWCMD-MRW can not be used in runtime
2247
2248 if (u1EnterRuntime)
2249 {
2250 DramcModeRegWriteByRank_SCSM(p, u1Rank, u1MRIdx, u1Value);
2251 }
2252 else
2253 #endif
2254 {
2255 #if (__LP5_COMBO__ == TRUE)
2256 if (is_lp5_family(p))
2257 {
2258 #if ENABLE_RUNTIME_MRW_FOR_LP5
2259 DramcModeRegWriteByRank_RTMRW(p, &u1Rank, &u1MRIdx, &u1Value, 1);
2260 #else
2261 DramcModeRegWriteByRank_RTSWCMD_MRW(p, u1Rank, u1MRIdx, u1Value);
2262 #endif
2263 }
2264 else
2265 #endif
2266 {
2267 DramcModeRegWriteByRank_SCSM(p, u1Rank, u1MRIdx, u1Value);
2268 }
2269 }
2270
2271 #if MRW_CHECK_ONLY
2272 u1PrintModeRegWrite = 1;
2273 U8 u1Backup_Rank;
2274 U8 u1RankIdx, u1RankNum, u1RankStart;
2275 U8 u1FSPMRIdx;
2276
2277 u1Backup_Rank=p->rank;
2278
2279 if (u1Rank==3)
2280 {
2281 u1RankNum = 2;
2282 u1RankStart = 0;
2283 }
2284 else
2285 {
2286 u1RankNum = 1;
2287 u1RankStart = u1Rank;
2288 }
2289
2290 #if (__LP5_COMBO__ == TRUE)
2291 if (is_lp5_family(p))
2292 u1FSPMRIdx=16;
2293 else
2294 #endif
2295 u1FSPMRIdx=13;
2296
2297 for (u1RankIdx=u1RankStart;u1RankIdx<u1RankStart+u1RankNum;u1RankIdx++)
2298 {
2299 vSetRank(p, u1RankIdx);
2300 if (u1MRIdx==u1FSPMRIdx)
2301 {
2302 u2MRRecord[p->channel][u1RankIdx][FSP_0][u1FSPMRIdx] =u1Value;
2303 u2MRRecord[p->channel][u1RankIdx][FSP_1][u1FSPMRIdx] =u1Value;
2304 }
2305 else
2306 u2MRRecord[p->channel][u1RankIdx][gFSPWR_Flag[u1RankIdx]][u1MRIdx] = u1Value;
2307
2308 if(u1PrintModeRegWrite)
2309 {
2310 #if VENDER_JV_LOG
2311 jv_msg("Write Rank%d MR%d =0x%x\n", u1RankIdx, u1MRIdx, u1Value);
2312 #endif
2313 #if MRW_CHECK_ONLY
2314 mcSHOW_MRW_MSG("MRW CH%d Rank%d FSP%d MR%d =0x%x\n", p->channel, u1RankIdx, gFSPWR_Flag[u1RankIdx], u1MRIdx, u1Value);
2315 #endif
2316 msg2("Write Rank%d MR%d =0x%x\n", u1RankIdx, u1MRIdx, u1Value);
2317 reg_msg("Write Rank%d MR%d =0x%x\n", u1RankIdx, u1MRIdx, u1Value);
2318 }
2319 #if MRW_BACKUP
2320 U8 MR_backup;
2321
2322 MR_backup=DramcMRWriteBackup(p, u1MRIdx, u1RankIdx);
2323 if (MR_backup!=0xff)
2324 mcSHOW_MRW_MSG(" [MRW Check] Rank%d FSP%d Backup_MR%d= 0x%x MR%d= 0x%x ==>%s\n", u1RankIdx, gFSPWR_Flag[u1RankIdx], u1MRIdx, MR_backup, u1MRIdx, u1Value, (u1Value==MR_backup?"PASS":"FAIL"));
2325 #endif
2326
2327 #if (__LP5_COMBO__ == TRUE)
2328 if (is_lp5_family(p))
2329 {
2330 if (u1MRIdx==u1FSPMRIdx)
2331 gFSPWR_Flag[u1RankIdx] = u1Value & 0x3;
2332 }
2333 else
2334 #endif
2335 {
2336 if (u1MRIdx==u1FSPMRIdx)
2337 gFSPWR_Flag[u1RankIdx] = (u1Value>> 6) & 0x1;
2338 }
2339 }
2340 vSetRank(p, u1Backup_Rank);
2341 #endif
2342 }
2343
2344 #ifdef __ETT__
2345 static U8 u1gpRegBackup;
2346 #endif
2347 U32 u4gpRegBackupVlaue[100];
DramcBackupRegisters(DRAMC_CTX_T * p,U32 * backup_addr,U32 backup_num)2348 void DramcBackupRegisters(DRAMC_CTX_T *p, U32 *backup_addr, U32 backup_num)
2349 {
2350 U32 u4RegIdx;
2351
2352 #ifdef __ETT__
2353 if (backup_num > 100 || u1gpRegBackup)
2354 {
2355 err("[DramcBackupRegisters] backup number over 64!!!\n");
2356 while (1);
2357 }
2358
2359 u1gpRegBackup++;
2360 #endif
2361
2362 for (u4RegIdx = 0; u4RegIdx < backup_num; u4RegIdx++)
2363 {
2364 u4gpRegBackupVlaue[u4RegIdx] = u4IO32Read4B(backup_addr[u4RegIdx]);
2365 //msg("Backup Reg(0x%X) = 0x%X\n", backup_addr[u4RegIdx], u4gpRegBackupVlaue[u4RegIdx]);
2366 }
2367 }
2368
DramcRestoreRegisters(DRAMC_CTX_T * p,U32 * restore_addr,U32 restore_num)2369 void DramcRestoreRegisters(DRAMC_CTX_T *p, U32 *restore_addr, U32 restore_num)
2370 {
2371 U32 u4RegIdx;
2372
2373 #ifdef __ETT__
2374 if (u1gpRegBackup == 0)
2375 {
2376 err("[DramcRestoreRegisters] Need to call backup first\n");
2377 }
2378
2379 u1gpRegBackup--;
2380 #endif
2381
2382 for (u4RegIdx = 0; u4RegIdx < restore_num; u4RegIdx++)
2383 {
2384 vIO32Write4B(restore_addr[u4RegIdx], u4gpRegBackupVlaue[u4RegIdx]);
2385 //msg("Restore Reg(0x%X) = 0x%X\n", restore_addr[u4RegIdx], u4gpRegBackupVlaue[u4RegIdx]);
2386 }
2387 }
2388
2389 #if 0
2390 //#if defined(DDR_INIT_TIME_PROFILING) || (__ETT__ && SUPPORT_SAVE_TIME_FOR_CALIBRATION)
2391 void DramcConfInfraReset(DRAMC_CTX_T *p)
2392 {
2393 #if (FOR_DV_SIMULATION_USED == 0 && SW_CHANGE_FOR_SIMULATION == 0)
2394 vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CLK_CTRL, P_Fld(0, MISC_CLK_CTRL_DVFS_CLK_MEM_SEL)
2395 | P_Fld(0, MISC_CLK_CTRL_DVFS_MEM_CK_MUX_UPDATE_EN));
2396
2397 vIO32WriteFldMulti_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CG_CTRL0), P_Fld(0, MISC_CG_CTRL0_CLK_MEM_SEL)
2398 | P_Fld(1, MISC_CG_CTRL0_W_CHG_MEM));
2399 mcDELAY_XNS(100);//reserve 100ns period for clock mute and latch the rising edge sync condition for BCLK
2400 vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CG_CTRL0), 0, MISC_CG_CTRL0_W_CHG_MEM);
2401
2402 #if (fcFOR_CHIP_ID == fcLafite)
2403 // 26M
2404 vIO32WriteFldMulti_All(DDRPHY_CKMUX_SEL, P_Fld(0x1, CKMUX_SEL_R_PHYCTRLMUX) //move CKMUX_SEL_R_PHYCTRLMUX to here (it was originally between MISC_CG_CTRL0_CLK_MEM_SEL and MISC_CTRL0_R_DMRDSEL_DIV2_OPT)
2405 | P_Fld(0x1, CKMUX_SEL_R_PHYCTRLDCM)); // PHYCTRLDCM 1: follow DDRPHY_conf DCM settings, 0: follow infra DCM settings
2406 vIO32WriteFldMulti_All(DDRPHY_MISC_CG_CTRL0, P_Fld(0, MISC_CG_CTRL0_W_CHG_MEM)
2407 | P_Fld(0, MISC_CG_CTRL0_CLK_MEM_SEL));//[5:4] mem_ck mux: 2'b00: 26MHz, [0]: change memory clock
2408 vIO32WriteFldAlign_All(DDRPHY_MISC_CG_CTRL0, 1, MISC_CG_CTRL0_W_CHG_MEM);//change clock freq
2409 mcDELAY_US(1);
2410 vIO32WriteFldAlign_All(DDRPHY_MISC_CG_CTRL0, 0, MISC_CG_CTRL0_W_CHG_MEM);//disable memory clock change
2411
2412 // dramc conf reset
2413 //time_msg("Before infra reset, 0x10001148:%x\n", *(volatile unsigned *)(0x10001148));
2414 *(volatile unsigned *)(0x10001140) = (0x1 << 15);
2415 //time_msg("After infra reset, 0x10001148:%x\n", *(volatile unsigned *)(0x10001148));
2416 __asm__ __volatile__ ("dsb" : : : "memory");
2417 mcDELAY_US(200);
2418 //time_msg("Before infra clear, 0x10001148:%x\n", *(volatile unsigned *)(0x10001148));
2419 *(volatile unsigned *)(0x10001144) = (0x1 << 15);
2420 //time_msg("After infra clear, 0x10001148:%x\n", *(volatile unsigned *)(0x10001148));
2421
2422 #if 0
2423 mcDELAY_US(200);
2424 *(volatile unsigned *)(0x10007018) = 0x88000040;
2425 mcDELAY_US(200);
2426 *(volatile unsigned *)(0x10007018) = 0x88000000;
2427 mcDELAY_US(200);
2428 #endif
2429
2430 //DDRPHY Reset
2431 vIO32WriteFldAlign_All(DDRPHY_B0_DQ3, 0x0, B0_DQ3_RG_ARDQ_RESETB_B0);
2432 vIO32WriteFldAlign_All(DDRPHY_B0_DLL_ARPI0, 0x0, B0_DLL_ARPI0_RG_ARPI_RESETB_B0);
2433 vIO32WriteFldAlign_All(DDRPHY_B1_DQ3, 0x0, B1_DQ3_RG_ARDQ_RESETB_B1);
2434 vIO32WriteFldAlign_All(DDRPHY_B1_DLL_ARPI0, 0x0, B1_DLL_ARPI0_RG_ARPI_RESETB_B1);
2435 vIO32WriteFldAlign_All(DDRPHY_CA_CMD3, 0x0, CA_CMD3_RG_ARCMD_RESETB);
2436 vIO32WriteFldAlign_All(DDRPHY_CA_DLL_ARPI0, 0x0, CA_DLL_ARPI0_RG_ARPI_RESETB_CA);
2437 vIO32WriteFldAlign(DDRPHY_PLL4, 0x0, PLL4_RG_RPHYPLL_RESETB);//Since there is only 1 PLL, only control CHA
2438 mcDELAY_US(200);
2439 vIO32WriteFldAlign_All(DDRPHY_B0_DQ3, 0x1, B0_DQ3_RG_ARDQ_RESETB_B0);
2440 vIO32WriteFldAlign_All(DDRPHY_B0_DLL_ARPI0, 0x1, B0_DLL_ARPI0_RG_ARPI_RESETB_B0);
2441 vIO32WriteFldAlign_All(DDRPHY_B1_DQ3, 0x1, B1_DQ3_RG_ARDQ_RESETB_B1);
2442 vIO32WriteFldAlign_All(DDRPHY_B1_DLL_ARPI0, 0x1, B1_DLL_ARPI0_RG_ARPI_RESETB_B1);
2443 vIO32WriteFldAlign_All(DDRPHY_CA_CMD3, 0x1, CA_CMD3_RG_ARCMD_RESETB);
2444 vIO32WriteFldAlign_All(DDRPHY_CA_DLL_ARPI0, 0x1, CA_DLL_ARPI0_RG_ARPI_RESETB_CA);
2445 vIO32WriteFldAlign(DDRPHY_PLL4, 0x1, PLL4_RG_RPHYPLL_RESETB);//Since there is only 1 PLL, only control CHA
2446
2447 //Disable SPM control
2448 vIO32WriteFldMulti(SPM_POWERON_CONFIG_EN, P_Fld(0xB16, POWERON_CONFIG_EN_PROJECT_CODE) | P_Fld(0, POWERON_CONFIG_EN_BCLK_CG_EN));
2449
2450 //For FMeter after dcm enable
2451 vIO32WriteFldAlign_All(DDRPHY_MISC_CG_CTRL2, 0x0, MISC_CG_CTRL2_RG_MEM_DCM_DCM_EN);
2452 vIO32WriteFldAlign_All(DDRPHY_MISC_CG_CTRL2, 0x1, MISC_CG_CTRL2_RG_MEM_DCM_FORCE_ON);
2453 #endif
2454 #endif
2455 }
2456 #endif
2457
2458 #define PATTERN1 0x5A5A5A5A
2459 #define PATTERN2 0xA5A5A5A5
2460
2461 #if defined(DDR_INIT_TIME_PROFILING) || ENABLE_APB_MASK_WRITE
2462 U32 l_low_tick0, l_high_tick0, l_low_tick1, l_high_tick1;
TimeProfileBegin(void)2463 void TimeProfileBegin(void)
2464 {
2465 #if __ETT__
2466 l_low_tick0 = GPT_GetTickCount(&l_high_tick0);
2467 #else
2468 l_low_tick0 = get_timer(0);
2469 #endif
2470 }
2471
TimeProfileEnd(void)2472 U32 TimeProfileEnd(void)
2473 {
2474 #if __ETT__
2475 l_low_tick1 = GPT_GetTickCount(&l_high_tick1);
2476
2477 //time_msg("Time0 %u %u\n", l_high_tick0, l_low_tick0);
2478 //time_msg("Time1 %u %u\n", l_high_tick1, l_low_tick1);
2479 return ((l_low_tick1 - l_low_tick0) * 76) / 1000;
2480 #else
2481 l_low_tick1 = get_timer(l_low_tick0);
2482 return l_low_tick1 * 1000;
2483 #endif
2484 }
2485 #endif
2486
2487 #if QT_GUI_Tool
TA2_Test_Run_Time_SW_Presetting(DRAMC_CTX_T * p,U32 test2_1,U32 test2_2,U8 u1TestPat,U8 u1LoopCnt)2488 void TA2_Test_Run_Time_SW_Presetting(DRAMC_CTX_T *p, U32 test2_1, U32 test2_2, U8 u1TestPat, U8 u1LoopCnt)
2489 {
2490 u1TestPat = u1TestPat & 0x7f;
2491
2492 DramcSetRankEngine2(p, p->rank);
2493
2494 uiReg0D0h=u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_DUMMY_RD));
2495 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_DUMMY_RD), P_Fld(0, DUMMY_RD_DQSG_DMYRD_EN) | P_Fld(0, DUMMY_RD_DQSG_DMYWR_EN) | P_Fld(0, DUMMY_RD_DUMMY_RD_EN) | P_Fld(0, DUMMY_RD_SREF_DMYRD_EN) | P_Fld(0, DUMMY_RD_DMY_RD_DBG) | P_Fld(0, DUMMY_RD_DMY_WR_DBG)); //must close dummy read when do test agent
2496
2497 //vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TESTCHIP_DMA1), 0, TESTCHIP_DMA1_DMA_LP4MATAB_OPT);//Eddie
2498 // disable self test engine1 and self test engine2
2499 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A3), P_Fld(0, TEST2_A3_TEST2W) | P_Fld(0, TEST2_A3_TEST2R) | P_Fld(0, TEST2_A3_TEST1));
2500
2501 // 1.set pattern ,base address ,offset address
2502 // 2.select ISI pattern or audio pattern or xtalk pattern
2503 // 3.set loop number
2504 // 4.enable read or write
2505 // 5.loop to check DM_CMP_CPT
2506 // 6.return CMP_ERR
2507 // currently only implement ucengine_status = 1, others are left for future extension
2508
2509 // 1
2510 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A0), P_Fld(test2_1>>24,TEST2_A0_TEST2_PAT0)|P_Fld(test2_2>>24,TEST2_A0_TEST2_PAT1));
2511
2512 #if (FOR_DV_SIMULATION_USED==1 || SW_CHANGE_FOR_SIMULATION==1)
2513 //DV sim memory 0~0x100 has values, can't used
2514 //vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A1), (test2_1+0x100) & 0x00ffffff, TEST2_A1_TEST2_BASE);
2515 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RK_TEST2_A1), 0x10000, RK_TEST2_A1_TEST2_BASE); //LPDDR4 Setting
2516 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RK_TEST2_A1), 0x0, RK_TEST2_A1_TEST2_BASE); //Eddie Change to 0 for LP5
2517 #else
2518 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RK_TEST2_A1), 0, RK_TEST2_A1_TEST2_BASE);
2519 #endif
2520 vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A2), 0x2, TEST2_A2_TEST2_OFF);//Eddie
2521
2522 return;
2523 }
2524 #endif
2525