xref: /aosp_15_r20/external/coreboot/src/vendorcode/mediatek/mt8192/dramc/dramc_pi_calibration_api.c (revision b9411a12aaaa7e1e6a6fb7c5e057f44ee179a49c)
1 /* SPDX-License-Identifier: BSD-3-Clause */
2 
3 //-----------------------------------------------------------------------------
4 // Include files
5 //-----------------------------------------------------------------------------
6 #include "dramc_common.h"
7 #include "dramc_int_global.h"
8 #include "x_hal_io.h"
9 #include "sv_c_data_traffic.h"
10 
11 #define BITMAP_BITS_MAX 		128
12 
13 #if CBT_MOVE_CA_INSTEAD_OF_CLK
14 #define MAX_CA_PI_DELAY 		95
15 #else
16 #define MAX_CA_PI_DELAY 		63
17 #endif
18 #define MAX_CS_PI_DELAY 		63
19 #define MAX_CLK_PI_DELAY		31
20 
21 #define PASS_RANGE_NA	0x7fff
22 
23 #define DIE_NUM_MAX 		1 //LP4 only
24 static U8 fgwrlevel_done = 0;
25 
26 
27 #if __ETT__
28 U8 gETT_WHILE_1_flag = 1;
29 #endif
30 
31 U8 u1MR01Value[FSP_MAX];
32 U8 u1MR02Value[FSP_MAX];
33 U8 u1MR03Value[FSP_MAX];
34 U8 u1MR11Value[FSP_MAX];
35 U8 u1MR18Value[FSP_MAX];
36 U8 u1MR19Value[FSP_MAX];
37 U8 u1MR20Value[FSP_MAX];
38 U8 u1MR21Value[FSP_MAX];
39 U8 u1MR22Value[FSP_MAX];
40 U8 u1MR51Value[FSP_MAX];
41 
42 U8 u1MR04Value[RANK_MAX];
43 U8 u1MR13Value[RANK_MAX];
44 U8 u1MR26Value[RANK_MAX];
45 U8 u1MR30Value[RANK_MAX];
46 
47 U8 u1MR12Value[CHANNEL_NUM][RANK_MAX][FSP_MAX];
48 U8 u1MR14Value[CHANNEL_NUM][RANK_MAX][FSP_MAX];
49 U16 gu2MR0_Value[RANK_MAX] = {0xffff, 0xffff};
50 
51 #if PINMUX_AUTO_TEST_PER_BIT_RX
52 S16 gFinalRXPerbitFirstPass[CHANNEL_NUM][DQ_DATA_WIDTH];
53 #endif
54 #if PINMUX_AUTO_TEST_PER_BIT_TX
55 S16 gFinalTXPerbitFirstPass[CHANNEL_NUM][DQ_DATA_WIDTH];
56 #endif
57 #if PINMUX_AUTO_TEST_PER_BIT_CA
58 S16 gFinalCAPerbitFirstPass[CHANNEL_NUM][RANK_MAX][CATRAINING_NUM_LP4];
59 #endif
60 
61 #ifdef FOR_HQA_TEST_USED
62 U16 gFinalCBTVrefCA[CHANNEL_NUM][RANK_MAX];
63 U16 gFinalCBTCA[CHANNEL_NUM][RANK_MAX][10];
64 U16 gFinalRXPerbitWin[CHANNEL_NUM][RANK_MAX][DQ_DATA_WIDTH];
65 U16 gFinalTXPerbitWin[CHANNEL_NUM][RANK_MAX][DQ_DATA_WIDTH];
66 U16 gFinalTXPerbitWin_min_max[CHANNEL_NUM][RANK_MAX];
67 U16 gFinalTXPerbitWin_min_margin[CHANNEL_NUM][RANK_MAX];
68 U16 gFinalTXPerbitWin_min_margin_bit[CHANNEL_NUM][RANK_MAX];
69 S8 gFinalClkDuty[CHANNEL_NUM];
70 U32 gFinalClkDutyMinMax[CHANNEL_NUM][2];
71 S8 gFinalDQSDuty[CHANNEL_NUM][DQS_NUMBER];
72 U32 gFinalDQSDutyMinMax[CHANNEL_NUM][DQS_NUMBER][2];
73 #endif
74 
75 U8 gFinalCBTVrefDQ[CHANNEL_NUM][RANK_MAX];
76 U8 gFinalRXVrefDQ[CHANNEL_NUM][RANK_MAX][2];
77 U8 gFinalTXVrefDQ[CHANNEL_NUM][RANK_MAX];
78 
79 #if defined(RELEASE)
80 U8 gEye_Scan_color_flag = 0;
81 U8 gCBT_EYE_Scan_flag = 0;
82 U8 gCBT_EYE_Scan_only_higheset_freq_flag = 1;
83 U8 gRX_EYE_Scan_flag = 0;
84 U8 gRX_EYE_Scan_only_higheset_freq_flag = 1;
85 U8 gTX_EYE_Scan_flag = 1;
86 U8 gTX_EYE_Scan_only_higheset_freq_flag = 1;
87 U8 gEye_Scan_unterm_highest_flag = 0;
88 #elif  (CFG_DRAM_LOG_TO_STORAGE)
89 U8 gEye_Scan_color_flag = 0;
90 U8 gCBT_EYE_Scan_flag = 0;
91 U8 gCBT_EYE_Scan_only_higheset_freq_flag = 1;
92 U8 gRX_EYE_Scan_flag = 1;
93 U8 gRX_EYE_Scan_only_higheset_freq_flag = 1;
94 U8 gTX_EYE_Scan_flag = 1;
95 U8 gTX_EYE_Scan_only_higheset_freq_flag = 1;
96 U8 gEye_Scan_unterm_highest_flag = 0;
97 #else
98 U8 gEye_Scan_color_flag = 1;
99 U8 gCBT_EYE_Scan_flag = 0;
100 U8 gCBT_EYE_Scan_only_higheset_freq_flag = 1;
101 U8 gRX_EYE_Scan_flag = 0;
102 U8 gRX_EYE_Scan_only_higheset_freq_flag = 1;
103 U8 gTX_EYE_Scan_flag = 0;
104 U8 gTX_EYE_Scan_only_higheset_freq_flag = 1;
105 U8 gEye_Scan_unterm_highest_flag = 0;
106 #endif
107 
108 #ifdef FOR_HQA_REPORT_USED
109 #if CFG_DRAM_LOG_TO_STORAGE
110 U8 gHQALog_flag = 1;
111 #else
112 U8 gHQALog_flag = 0;
113 #endif
114 U16 gHQALOG_RX_delay_cell_ps_075V = 0;
115 #endif
116 
117 #if (TX_AUTO_K_ENABLE && TX_AUTO_K_WORKAROUND)
118 U32 u4DQM_MCK_RK1_backup;
119 U32 u4DQM_UI_RK1_backup;
120 U32 u4DQM_PI_RK1_backup[2];
121 U32 u4DQ_MCK_RK1_backup;
122 U32 u4DQ_UI_RK1_backup;
123 U32 u4DQ_PI_RK1_backup[2];
124 #endif
125 
126 #if SIMULATION_RX_DVS
127 U8 u1DVS_increase[RANK_MAX][DQS_NUMBER_LP4];
128 #endif
129 
130 static S32 CATrain_CmdDelay[CHANNEL_NUM][RANK_MAX];
131 static U32 CATrain_CsDelay[CHANNEL_NUM][RANK_MAX];
132 
133 static S32 wrlevel_dqs_final_delay[RANK_MAX][DQS_NUMBER]; // 3 is channel number
134 static U16 u2g_num_dlycell_perT = 49;
135 U16 u2gdelay_cell_ps;
136 U16 u2g_num_dlycell_perT_all[DRAM_DFS_SHUFFLE_MAX][CHANNEL_NUM];///TODO: to be removed by Francis
137 U16 u2gdelay_cell_ps_all[DRAM_DFS_SHUFFLE_MAX][CHANNEL_NUM];///TODO: to be removed by Francis
138 U32 u4gVcore[DRAM_DFS_SHUFFLE_MAX];
139 
140 U8 gFinalRXVrefDQForSpeedUp[CHANNEL_NUM][RANK_MAX][2/*ODT_onoff*/][2/*2bytes*/] = {0};
141 U32 gDramcSwImpedanceResult[IMP_VREF_MAX][IMP_DRV_MAX] = {{0,0,0,0},{0,0,0,0},{0,0,0,0}};//ODT_ON/OFF x DRVP/DRVN/ODTP/ODTN
142 
143 S16 gu2RX_DQS_Duty_Offset[DQS_NUMBER][2];
144 
145 #define RX_DELAY_PRE_CAL 1
146 #if RX_DELAY_PRE_CAL
147 S16 s2RxDelayPreCal=PASS_RANGE_NA;
148 #endif
149 
150 #if MRW_CHECK_ONLY
151 U16 u2MRRecord[CHANNEL_NUM][RANK_MAX][FSP_MAX][MR_NUM];
152 #endif
153 #if MRW_CHECK_ONLY || MRW_BACKUP
154 U8 gFSPWR_Flag[RANK_MAX]={FSP_0};
155 #endif
156 
157 #define IN_CBT	(0)
158 #define OUT_CBT (1)
159 
160 #if PRINT_CALIBRATION_SUMMARY
vSetCalibrationResult(DRAMC_CTX_T * p,U8 ucCalType,U8 ucResult)161 static void vSetCalibrationResult(DRAMC_CTX_T *p, U8 ucCalType, U8 ucResult)
162 {
163 	U32 *Pointer_CalExecute,*Pointer_CalResult;
164 	if (ucCalType == DRAM_CALIBRATION_SW_IMPEDANCE)
165 	{
166 		Pointer_CalExecute = &p->SWImpCalExecute;
167 		Pointer_CalResult = &p->SWImpCalResult;
168 	}
169 	else
170 	{
171 		Pointer_CalExecute = &p->aru4CalExecuteFlag[p->channel][p->rank];
172 		Pointer_CalResult = &p->aru4CalResultFlag[p->channel][p->rank];
173 	}
174 
175 	if (ucResult == DRAM_FAIL)	// Calibration FAIL
176 	{
177 		*Pointer_CalExecute |= (1<<ucCalType); // ececution done
178 		*Pointer_CalResult |= (1<<ucCalType); // no result found
179 			 }
180 	else if(ucResult == DRAM_OK)  // Calibration OK
181 	{
182 		*Pointer_CalExecute |= (1<<ucCalType); // ececution done
183 		*Pointer_CalResult &= (~(1<<ucCalType)); // result found
184 	}
185 	else if(ucResult == DRAM_FAST_K)  // FAST K
186 	{
187 		 *Pointer_CalExecute &= (~(1<<ucCalType)); // no ececution
188 		 *Pointer_CalResult &= (~(1<<ucCalType)); // result found
189 	}
190 	else  // NO K
191 	{
192 		  *Pointer_CalExecute &= (~(1<<ucCalType)); // no ececution
193 		  *Pointer_CalResult |= (1<<ucCalType); // no result found
194 	}
195 }
196 
197 #if PRINT_CALIBRATION_SUMMARY_FASTK_CHECK
Fast_K_CheckResult(DRAMC_CTX_T * p,U8 ucCalType)198 void Fast_K_CheckResult(DRAMC_CTX_T *p, U8 ucCalType)
199 {
200 	U32 CheckResult=0xFFFFFFFF;
201 	U32 debug_cnt[2], u4all_result_R, u4all_result_F;
202 	BOOL FastK_Check_flag=0;
203 	U32 *Pointer_FastKExecute,*Pointer_FastKResult;
204 
205 	Pointer_FastKExecute = &p->FastKExecuteFlag[p->channel][p->rank];
206 	Pointer_FastKResult = &p->FastKResultFlag[p->channel][p->rank];
207 
208 	if ((ucCalType==DRAM_CALIBRATION_TX_PERBIT)||(ucCalType==DRAM_CALIBRATION_DATLAT)||(ucCalType==DRAM_CALIBRATION_RX_PERBIT))
209 	{
210 		DramcEngine2Init(p, p->test2_1, p->test2_2, TEST_XTALK_PATTERN, 0, TE_NO_UI_SHIFT);
211 		CheckResult = DramcEngine2Run(p,TE_OP_WRITE_READ_CHECK , TEST_XTALK_PATTERN);
212 		DramcEngine2End(p);
213 		FastK_Check_flag=1;
214 	}
215 	else if (ucCalType==DRAM_CALIBRATION_RX_RDDQC)
216 	{
217 		DramcRxWinRDDQCInit(p);
218 		CheckResult = DramcRxWinRDDQCRun(p);
219 		DramcRxWinRDDQCEnd(p);
220 		FastK_Check_flag=1;
221 	}
222 	else if (ucCalType==DRAM_CALIBRATION_GATING)
223 	{
224 		DramcEngine2Init(p, 0x55000000, 0xaa000000 |0x23, TEST_AUDIO_PATTERN, 0, TE_NO_UI_SHIFT);
225 
226 		//Gating Counter Reset
227 		DramPhyReset(p);
228 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2), 1,
229 				MISC_STBCAL2_DQSG_CNT_RST);
230 		mcDELAY_US(1);
231 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2), 0,
232 				MISC_STBCAL2_DQSG_CNT_RST);
233 
234 		DramcEngine2Run(p, TE_OP_READ_CHECK, TEST_AUDIO_PATTERN);
235 
236 		debug_cnt[0] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_CAL_DQSG_CNT_B0));
237 		debug_cnt[1] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_CAL_DQSG_CNT_B1));
238 
239 		//msg(" 0x%X  ",u4DebugCnt)
240 		if (debug_cnt[0]==0x4646 && debug_cnt[1]==0x4646)
241 			CheckResult=0;
242 
243 		DramcEngine2End(p);
244 		FastK_Check_flag=1;
245 	}
246 
247 
248 	if ((FastK_Check_flag==1)&&(CheckResult==0))
249 	{
250 		//msg(" [FAST K CHECK]->PASS\n")
251 		*Pointer_FastKResult &= (~(1<<ucCalType)); // result PASS
252 		*Pointer_FastKExecute |= (1<<ucCalType);; // Excuted
253 	}
254 	else if ((FastK_Check_flag==1)&&(CheckResult !=0))
255 	{
256 		//msg(" [FAST K CHECK]->FAIL\n")
257 		*Pointer_FastKResult |= (1<<ucCalType); // result FAIL
258 		*Pointer_FastKExecute |= (1<<ucCalType);; // Excuted
259 	}
260 }
261 #endif
262 
263 const char *szCalibStatusName[DRAM_CALIBRATION_MAX]=
264 {
265 	"SW Impedance	  ",
266 	"DUTY Scan		  ",
267 	"ZQ Calibration   ",
268 	"Jitter Meter	  ",
269 	"CBT Training	  ",
270 	"Write leveling   ",
271 	"RX DQS gating	  ",
272 	"RX DQ/DQS(RDDQC) ",
273 	"TX DQ/DQS		  ",
274 	"RX DATLAT		  ",
275 	"RX DQ/DQS(Engine)",
276 	"TX OE			  ",
277 };
278 
vPrintCalibrationResult(DRAMC_CTX_T * p)279 void vPrintCalibrationResult(DRAMC_CTX_T *p)
280 {
281 	U8 ucCHIdx, ucRankIdx, ucCalIdx;
282 	U32 ucCalResult_All, ucCalExecute_All;
283 	U8 ucCalResult, ucCalExecute;
284 	U8 u1CalibrationFail;
285 
286 	msg("\n\n[Calibration Summary] %d Mbps\n", p->frequency * 2);
287 
288 	//for(ucFreqIdx=0; ucFreqIdx<DRAM_DFS_SHUFFLE_MAX; ucFreqIdx++)
289 	{
290 		//msg("==Freqency = %d==\n", get_FreqTbl_by_shuffleIndex(p,ucFreqIdx)->frequency);
291 		for(ucCHIdx=0; ucCHIdx<p->support_channel_num; ucCHIdx++)
292 		{
293 			for(ucRankIdx=0; ucRankIdx<p->support_rank_num; ucRankIdx++)
294 			{
295 				u1CalibrationFail =0;
296 				ucCalExecute_All = p->aru4CalExecuteFlag[ucCHIdx][ucRankIdx];
297 				ucCalResult_All = p->aru4CalResultFlag[ucCHIdx][ucRankIdx];
298 				msg("CH %d, Rank %d\n", ucCHIdx, ucRankIdx);
299 				//msg("[vPrintCalibrationResult] Channel = %d, Rank= %d, Freq.= %d, (ucCalExecute_All 0x%x, ucCalResult_All 0x%x)\n", ucCHIdx, ucRankIdx, ucFreqIdx, ucCalExecute_All, ucCalResult_All);
300 
301 				for(ucCalIdx =0; ucCalIdx<DRAM_CALIBRATION_MAX; ucCalIdx++)
302 				{
303 					if(ucCalIdx==0)
304 						{
305 							ucCalExecute = (U8)p->SWImpCalExecute; //for SW Impedence
306 							ucCalResult = (U8)p->SWImpCalResult; //for SW Impedence
307 						}
308 					else
309 						{
310 							ucCalExecute = (U8)((ucCalExecute_All >>ucCalIdx) & 0x1);
311 							ucCalResult =  (U8)((ucCalResult_All >>ucCalIdx) & 0x1);
312 						}
313 
314 					#if PRINT_CALIBRATION_SUMMARY_DETAIL
315 					msg("%s: ", szCalibStatusName[ucCalIdx])
316 					if(ucCalExecute==1 && ucCalResult ==1) // excuted and fail
317 					{
318 						u1CalibrationFail =1;
319 						msg("%s\n", "@_@FAIL@_@")
320 					}
321 					else if (ucCalExecute==1 && ucCalResult ==0) // DRAM_OK
322 					{
323 						msg("%s\n", "PASS")
324 					}
325 					else if (ucCalExecute==0 && ucCalResult ==0) // DRAM_FAST K
326 					{
327 						msg("%s\n", "FAST K")
328 					}
329 					else //DRAM_NO K
330 					{
331 						msg("%s\n", "NO K")
332 					}
333 
334 					#else
335 					if(ucCalExecute==1 && ucCalResult ==1) // excuted and fail
336 					{
337 						u1CalibrationFail =1;
338 						msg("%s: %s\n", szCalibStatusName[ucCalIdx],"@_@FAIL@_@")
339 					}
340 					#endif
341 				}
342 
343 				if(u1CalibrationFail ==0)
344 				{
345 					msg("All Pass.\n");
346 				}
347 				msg("\n");
348 			}
349 		}
350 	}
351 
352 }
353 #endif
354 
355 #if __FLASH_TOOL_DA__
356 #define CA_THRESHOLD 20
357 #define RX_THRESHOLD 150
358 #define TX_THRESHOLD 20
359 #define PERCENTAGE_THRESHOLD 50
360 #define PRINT_WIN_SIZE 0
print_Impedence_LOG_type(U8 print_type)361 U8* print_Impedence_LOG_type(U8 print_type)
362 {
363 	switch (print_type)
364 	{
365 		case 0: return "DRVP";
366 		case 1: return "DRVN";
367 		case 2: return "ODTP";
368 		case 3: return "ODTN";
369 		default: return "ERROR";
370 	}
371 }
vPrintPinInfoResult(DRAMC_CTX_T * p)372 void vPrintPinInfoResult(DRAMC_CTX_T *p)
373 {
374 	U8 u1CHIdx, u1RankIdx, u1CAIdx, u1ByteIdx, u1ByteIdx_DQ, u1BitIdx, u1BitIdx_DQ, u1FreqRegionIdx, u1ImpIdx;
375 	U8 u1PinError=0;
376 	msg("\n\n[Pin Info Summary] Freqency %d\n", p->frequency);
377 	for (u1FreqRegionIdx=0;u1FreqRegionIdx<2/*IMP_VREF_MAX*/;u1FreqRegionIdx++)
378 	{
379 		for (u1ImpIdx=0;u1ImpIdx<IMP_DRV_MAX;u1ImpIdx++)
380 		{
381 			msg("IMP %s type:%s %s\n", u1FreqRegionIdx?"Region1":"Region0", print_Impedence_LOG_type(u1ImpIdx), (PINInfo_flashtool.IMP_ERR_FLAG>>(u1FreqRegionIdx*4+u1ImpIdx)&0x1)?"ERROR":"PASS");
382 		}
383 	}
384 	{
385 		for(u1CHIdx=0; u1CHIdx<p->support_channel_num; u1CHIdx++)
386 		{
387 			for(u1RankIdx=0; u1RankIdx<p->support_rank_num; u1RankIdx++)
388 			{
389 				msg("CH %d, Rank %d\n", u1CHIdx, u1RankIdx);
390 				for (u1CAIdx =0; u1CAIdx <CATRAINING_NUM_LP4; u1CAIdx++)
391 				{
392 					#if 1//Transfer to Percentage
393 					PINInfo_flashtool.CA_WIN_SIZE[u1CHIdx][u1RankIdx][u1CAIdx]= (PINInfo_flashtool.CA_WIN_SIZE[u1CHIdx][u1RankIdx][u1CAIdx]* 100 + 63) /64;
394 					if ((PINInfo_flashtool.CA_WIN_SIZE[u1CHIdx][u1RankIdx][u1CAIdx]==0)||(PINInfo_flashtool.CA_WIN_SIZE[u1CHIdx][u1RankIdx][u1CAIdx]<=PERCENTAGE_THRESHOLD))
395 					#else
396 					if ((PINInfo_flashtool.CA_WIN_SIZE[u1CHIdx][u1RankIdx][u1CAIdx]==0)||(PINInfo_flashtool.CA_WIN_SIZE[u1CHIdx][u1RankIdx][u1CAIdx]<=CA_THRESHOLD))
397 					#endif
398 					{
399 						PINInfo_flashtool.CA_ERR_FLAG[u1CHIdx][u1RankIdx] |= (1<<u1CAIdx);
400 						PINInfo_flashtool.TOTAL_ERR |= (0x1<<(u1CHIdx*4+u1RankIdx*2));
401 					}
402 					msg("CA %d: %s ", u1CAIdx, (PINInfo_flashtool.CA_ERR_FLAG[u1CHIdx][u1RankIdx]>>u1CAIdx) & 0x1 ? "ERROR" : "PASS");
403 					#if PRINT_WIN_SIZE
404 					msg("(WIN_SIZE: %d %% )", (PINInfo_flashtool.CA_WIN_SIZE[u1CHIdx][u1RankIdx][u1CAIdx]));
405 					#endif
406 					msg("\n");
407 				}
408 				for (u1BitIdx =0; u1BitIdx <DQ_DATA_WIDTH_LP4; u1BitIdx++)
409 				{
410 					u1ByteIdx = (u1BitIdx>=8?1:0);
411 					u1BitIdx_DQ = uiLPDDR4_O1_Mapping_POP[p->channel][u1BitIdx];
412 					u1ByteIdx_DQ = (u1BitIdx_DQ>=8?1:0);
413 				#if 1//Transfer to Percentage
414 					PINInfo_flashtool.DQ_RX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx] = ((PINInfo_flashtool.DQ_RX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]* gHQALOG_RX_delay_cell_ps_075V * p->frequency * 2)+ (1000000 - 1)) / 1000000;
415 					if (PINInfo_flashtool.DQ_RX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]<=PERCENTAGE_THRESHOLD)
416 				#else
417 					if ((PINInfo_flashtool.DQ_RX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]==0)||(PINInfo_flashtool.DQ_RX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]<=RX_THRESHOLD)\
418 						 ||(PINInfo_flashtool.DQ_TX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]==0)||(PINInfo_flashtool.DQ_TX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]<=TX_THRESHOLD))
419 				#endif
420 					{
421 						PINInfo_flashtool.DQ_RX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx] |= (1<<(u1BitIdx-(u1ByteIdx==1?8:0)));
422 						PINInfo_flashtool.DRAM_PIN_RX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx_DQ] |= (1<<(u1BitIdx_DQ-(u1ByteIdx_DQ==1?8:0)));
423 						PINInfo_flashtool.TOTAL_ERR |= (0x1<<(u1CHIdx*4+u1RankIdx*2+1));
424 					}
425 				#if 1//Transfer to Percentage
426 					PINInfo_flashtool.DQ_TX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx] = (PINInfo_flashtool.DQ_TX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]* 100+ (vGet_DDR_Loop_Mode(p) == DDR800_CLOSE_LOOP? 63: 31)) / (vGet_DDR_Loop_Mode(p) == DDR800_CLOSE_LOOP? 64: 32);
427 					if (PINInfo_flashtool.DQ_TX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]<=PERCENTAGE_THRESHOLD)
428 				#else
429 					if ((PINInfo_flashtool.DQ_RX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]==0)||(PINInfo_flashtool.DQ_RX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]<=RX_THRESHOLD)\
430 						 ||(PINInfo_flashtool.DQ_TX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]==0)||(PINInfo_flashtool.DQ_TX_WIN_SIZE[u1CHIdx][u1RankIdx][u1BitIdx]<=TX_THRESHOLD))
431 				#endif
432 					{
433 						PINInfo_flashtool.DQ_TX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx] |= (1<<(u1BitIdx-(u1ByteIdx==1?8:0)));
434 						PINInfo_flashtool.DRAM_PIN_TX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx_DQ] |= (1<<(u1BitIdx_DQ-(u1ByteIdx_DQ==1?8:0)));
435 						PINInfo_flashtool.TOTAL_ERR |= (0x1<<(u1CHIdx*4+u1RankIdx*2+1));
436 					}
437 				}
438 				for (u1BitIdx_DQ=0; u1BitIdx_DQ<DQ_DATA_WIDTH_LP4; u1BitIdx_DQ++)
439 				{
440 					u1ByteIdx_DQ = (u1BitIdx_DQ>=8?1:0);
441 					msg("DRAM DQ %d: RX %s, TX %s ", u1BitIdx_DQ, ((PINInfo_flashtool.DRAM_PIN_RX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx_DQ]>>(u1BitIdx_DQ-(u1ByteIdx_DQ==1?8:0))&0x1)?"ERROR":"PASS"),\
442 																														(((PINInfo_flashtool.DRAM_PIN_TX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx_DQ]>>(u1BitIdx_DQ-(u1ByteIdx_DQ==1?8:0)))&0x1)?"ERROR":"PASS"));
443 					#if PRINT_WIN_SIZE
444 					msg("(RX WIN SIZE: %d %%, TX WIN SIZE: %d %% )", PINInfo_flashtool.DQ_RX_WIN_SIZE[u1CHIdx][u1RankIdx][uiLPDDR4_O1_Mapping_POP[u1CHIdx][u1BitIdx_DQ]], PINInfo_flashtool.DQ_TX_WIN_SIZE[u1CHIdx][u1RankIdx][uiLPDDR4_O1_Mapping_POP[u1CHIdx][u1BitIdx_DQ]]);
445 					#endif
446 					msg("\n");
447 				}
448 			}
449 		}
450 	}
451 }
vGetErrorTypeResult(DRAMC_CTX_T * p)452 void vGetErrorTypeResult(DRAMC_CTX_T *p)
453 {
454 	U8 u1CHIdx, u1CHIdx_EMI, u1RankIdx, u1CAIdx, u1ByteIdx, u1BitIdx, u1FreqRegionIdx, u1ImpIdx;
455 	msg("\n[Get Pin Error Type Result]\n");
456 	if (PINInfo_flashtool.TOTAL_ERR==0 && PINInfo_flashtool.IMP_ERR_FLAG==0)//ALL PASS
457 	{
458 		msg("ALL PASS\n");
459 	}
460 	if (PINInfo_flashtool.IMP_ERR_FLAG)
461 	{
462 		msg("[CHECK RESULT] FAIL: Impedance calibration fail\n");
463 		msg("Suspect EXTR contact issue\n");
464 		msg("Suspect EXTR related resistor contact issue\n");
465 	}
466 	if ((PINInfo_flashtool.TOTAL_ERR == 0xffff) && (PINInfo_flashtool.WL_ERR_FLAG== 0xff))
467 	{
468 		msg("[CHECK RESULT] FAIL: ALL calibration fail\n");
469 		msg("Suspect RESET_N contact issue\n");
470 		msg("Suspect DRAM Power (VDD1/VDD2/VDDQ) contact issue\n");
471 	}
472 	else
473 	{
474 		for (u1CHIdx = 0; u1CHIdx < p->support_channel_num; u1CHIdx++)
475 		{
476 			#if (CHANNEL_NUM > 2)
477 			if(u1CHIdx == CHANNEL_B)
478 				u1CHIdx_EMI = CHANNEL_C;
479 			else if(u1CHIdx == CHANNEL_C)
480 				u1CHIdx_EMI = CHANNEL_B;
481 			else //CHANNEL_A,CHANNEL_D
482 			#endif
483 				u1CHIdx_EMI = u1CHIdx;
484 			if ((PINInfo_flashtool.TOTAL_ERR>>(u1CHIdx*4) & 0xf) == 0xf)
485 			{
486 				msg("[CHECK RESULT] FAIL: CH%d all calibration fail\n",u1CHIdx);
487 				msg("Suspect EMI%d_CK_T contact issue\n",u1CHIdx_EMI);
488 				msg("Suspect EMI%d_CK_C contact issue\n",u1CHIdx_EMI);
489 				for (u1CAIdx =0; u1CAIdx <CATRAINING_NUM_LP4; u1CAIdx++)
490 				{
491 					msg("Suspect EMI%d_CA%d contact issue\n",u1CHIdx_EMI,u1CAIdx);
492 				}
493 			}
494 			else
495 			{
496 				for(u1RankIdx = 0; u1RankIdx < p->support_rank_num; u1RankIdx++)
497 				{
498 					if ((((PINInfo_flashtool.TOTAL_ERR>>(u1CHIdx*4+u1RankIdx*2)) & 0x3)==0x3) && \
499 						 (PINInfo_flashtool.DRAM_PIN_RX_ERR_FLAG[u1CHIdx][u1RankIdx][BYTE_0] == 0xff) && \
500 						 (PINInfo_flashtool.DRAM_PIN_RX_ERR_FLAG[u1CHIdx][u1RankIdx][BYTE_1] == 0xff)&& \
501 						 (PINInfo_flashtool.DRAM_PIN_TX_ERR_FLAG[u1CHIdx][u1RankIdx][BYTE_0] == 0xff) && \
502 						 (PINInfo_flashtool.DRAM_PIN_TX_ERR_FLAG[u1CHIdx][u1RankIdx][BYTE_1] == 0xff))
503 					{
504 						msg("[CHECK RESULT] FAIL: CH%d RK%d all calibration fail\n",u1CHIdx,u1RankIdx);
505 						msg("Suspect EMI%d_CKE_%d contact issue\n",u1CHIdx_EMI,u1RankIdx);
506 						msg("Suspect EMI%d_CS_%d contact issue\n",u1CHIdx_EMI,u1RankIdx);
507 					}
508 					else
509 					{
510 						for (u1ByteIdx = 0; u1ByteIdx < DQS_NUMBER_LP4; u1ByteIdx++)
511 						{
512 							if((PINInfo_flashtool.DRAM_PIN_RX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx] == 0xff) &&\
513 									  (PINInfo_flashtool.DRAM_PIN_TX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx] == 0xff))
514 							{
515 								msg("[CHECK RESULT] FAIL: CH%d RK%d Byte%d WL/Read/Write calibration fail\n",u1CHIdx,u1RankIdx,u1ByteIdx);
516 								msg("Suspect EMI%d_DQS%d_T contact issue\n",u1CHIdx_EMI,u1ByteIdx);
517 								msg("Suspect EMI%d_DQS%d_C contact issue\n",u1CHIdx_EMI,u1ByteIdx);
518 							}
519 							else if (PINInfo_flashtool.DRAM_PIN_RX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx]&&\
520 									   PINInfo_flashtool.DRAM_PIN_TX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx])
521 							{
522 								for (u1BitIdx = 0; u1BitIdx < DQS_BIT_NUMBER; u1BitIdx++)
523 								{
524 									if (((PINInfo_flashtool.DRAM_PIN_RX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx]>>u1BitIdx)&0x1)&&\
525 										 ((PINInfo_flashtool.DRAM_PIN_TX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx]>>u1BitIdx)&0x1))
526 									{
527 											msg("[CHECK RESULT] FAIL: CH%d RK%d DRAM DQ%d Read/Write fail\n",u1CHIdx,u1RankIdx,u1ByteIdx*8+u1BitIdx);
528 											msg("Suspect EMI%d_DQ%d contact issue\n",u1CHIdx_EMI,u1ByteIdx*8+u1BitIdx);
529 									}
530 								}
531 							}
532 							else if((PINInfo_flashtool.DRAM_PIN_RX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx] == 0xff) ||\
533 									  (PINInfo_flashtool.DRAM_PIN_TX_ERR_FLAG[u1CHIdx][u1RankIdx][u1ByteIdx] == 0xff))
534 							{
535 								msg("[CHECK RESULT] FAIL: CH%d RK%d Byte%d Suspect other special contact or calibration issue\n",u1CHIdx_EMI,u1RankIdx,u1ByteIdx);
536 							}
537 						}
538 					}
539 				}
540 			}
541 		}
542 	}
543 	msg("\n");
544 	return;
545 }
546 #endif
vInitGlobalVariablesByCondition(DRAMC_CTX_T * p)547 void vInitGlobalVariablesByCondition(DRAMC_CTX_T *p)
548 {
549 	U8 u1CHIdx, u1RankIdx, u1FSPIdx;
550 
551 	u1MR01Value[FSP_0] = 0x26;
552 	u1MR01Value[FSP_1] = 0x56;
553 
554 	u1MR03Value[FSP_0] = 0x31; //Set write post-amble as 0.5 tck
555 	u1MR03Value[FSP_1] = 0x31; //Set write post-amble as 0.5 tck
556 #ifndef ENABLE_POST_PACKAGE_REPAIR
557 	u1MR03Value[FSP_0] |= 0x4; //MR3 OP[2]=1 for PPR protection enabled
558 	u1MR03Value[FSP_1] |= 0x4; //MR3 OP[2]=1 for PPR protection enabled
559 #endif
560 #if ENABLE_WRITE_POST_AMBLE_1_POINT_5_TCK
561 	u1MR03Value[FSP_1] |= 0x2; //MR3 OP[1]=1 for Set write post-amble as 1.5 tck, support after Eig_er E2
562 #endif
563 	u1MR04Value[RANK_0] = 0x3;
564 	u1MR04Value[RANK_1] = 0x3;
565 
566 	// @Darren, for LP4Y single-end mode
567 	u1MR21Value[FSP_0] = 0x0;
568 	u1MR21Value[FSP_1] = 0x0;
569 	u1MR51Value[FSP_0] = 0x0;
570 	u1MR51Value[FSP_1] = 0x0;
571 
572 	for (u1FSPIdx = 0; u1FSPIdx < p->support_fsp_num; u1FSPIdx++)
573 	{
574 		u1MR02Value[u1FSPIdx] = 0x1a;
575 	}
576 
577 	for (u1CHIdx = 0; u1CHIdx < CHANNEL_NUM; u1CHIdx++)
578 		for (u1RankIdx = 0; u1RankIdx < RANK_MAX; u1RankIdx++)
579 			for (u1FSPIdx = 0; u1FSPIdx < p->support_fsp_num; u1FSPIdx++)
580 			{
581 				// MR14 default value, LP4 default 0x4d, LP4X 0x5d
582 				u1MR14Value[u1CHIdx][u1RankIdx][u1FSPIdx] = (u1FSPIdx == FSP_0)? 0x5d: 0x18;  //0x18: customize for Eig_er
583 				#if FSP1_CLKCA_TERM
584 				u1MR12Value[u1CHIdx][u1RankIdx][u1FSPIdx] = (u1FSPIdx == FSP_0)? 0x5d: 0x1b;
585 				#else
586 				u1MR12Value[u1CHIdx][u1RankIdx][u1FSPIdx] = 0x5d;
587 				#endif
588 				#if MRW_CHECK_ONLY
589 				for (u1MRIdx = 0; u1MRIdx < MR_NUM; u1MRIdx++)
590 					u2MRRecord[u1CHIdx][u1RankIdx][u1FSPIdx][u1MRIdx] = 0xffff;
591 				#endif
592 			}
593 
594 	memset(gu2RX_DQS_Duty_Offset, 0, sizeof(gu2RX_DQS_Duty_Offset));
595 }
596 
597 const U8 uiLPDDR4_CA_DRAM_Pinmux[PINMUX_MAX][CHANNEL_NUM][6] =
598 {
599 	{
600 	// for DSC
601 		//CH-A
602 		{
603 			1, 4, 5, 3, 2, 0
604 		},
605 
606 	#if (CHANNEL_NUM>1)
607 		//CH-B
608 		{
609 			3, 5, 0, 2, 4, 1
610 		},
611 	#endif
612 	#if (CHANNEL_NUM>2)
613 		//CH-C
614 		{
615 			5, 0, 4, 3, 1, 2
616 		},
617 		//CH-D
618 		{
619 			2, 5, 3, 0, 4, 1
620 		},
621 	#endif
622 	},
623 	{
624 	// for LPBK
625 		// TODO: need porting
626 	},
627 	{
628 	// for EMCP
629 		//CH-A
630 		{
631 			2, 4, 3, 5, 1, 0
632 		},
633 
634 	#if (CHANNEL_NUM>1)
635 		//CH-B
636 		{
637 			4, 5, 2, 0, 3, 1
638 		},
639 	#endif
640 	#if (CHANNEL_NUM>2)
641 		//CH-C
642 		{
643 			5, 4, 0, 2, 1, 3
644 		},
645 		//CH-D
646 		{
647 			3, 5, 2, 4, 0, 1
648 		},
649 	#endif
650 	}
651 };
652 
653 //O1 DRAM->APHY
654 const U8 uiLPDDR4_O1_DRAM_Pinmux[PINMUX_MAX][CHANNEL_NUM][16] =
655 {
656 	{
657 	// for DSC
658 		//CH-A
659 		{
660 			0, 1, 7, 6, 4, 5, 2, 3,
661 			9, 8, 11, 10, 14, 15, 13, 12
662 		},
663 		#if (CHANNEL_NUM>1)
664 		//CH-B
665 		{
666 			1, 0, 5, 6, 3, 2, 7, 4,
667 			8, 9, 11, 10, 12, 14, 13, 15
668 		},
669 		#endif
670 		#if (CHANNEL_NUM>2)
671 		//CH-C
672 		{
673 			0, 1, 7, 6, 4, 5, 2, 3,
674 			9, 8, 11, 10, 14, 15, 13, 12
675 		},
676 		//CH-D
677 		{
678 			1, 0, 5, 6, 3, 2, 7, 4,
679 			8, 9, 11, 10, 12, 14, 13, 15
680 		},
681 		#endif
682 	},
683 	{
684 	// for LPBK
685 		// TODO: need porting
686 	},
687 	{
688 	// for EMCP
689 		//CH-A
690 		{
691 			1, 0, 3, 2, 4, 7, 6, 5,
692 			8, 9, 10, 14, 11, 15, 13, 12
693 		},
694 		#if (CHANNEL_NUM>1)
695 		//CH-B
696 		{
697 			0, 1, 4, 7, 3, 5, 6, 2,
698 			9, 8, 10, 12, 11, 14, 13, 15
699 		},
700 		#endif
701 		#if (CHANNEL_NUM>2)
702 		//CH-C
703 		{
704 			1, 0, 3, 2, 4, 7, 6, 5,
705 			8, 9, 10, 14, 11, 15, 13, 12
706 		},
707 		//CH-D
708 		{
709 			0, 1, 4, 7, 3, 5, 6, 2,
710 			9, 8, 10, 12, 11, 14, 13, 15
711 		},
712 		#endif
713 	}
714 };
715 
716 //CA APHY->DRAM
717 #if (CA_PER_BIT_DELAY_CELL || PINMUX_AUTO_TEST_PER_BIT_CA)
718 const U8 uiLPDDR5_CA_Mapping_POP[CHANNEL_NUM][7] =
719 {
720 	//CH-A
721 	{
722 		0, 1, 2, 3, 4, 5, 6
723 	},
724 
725 #if (CHANNEL_NUM>1)
726 	//CH-B
727 	{
728 		0, 4, 2, 3, 1, 5, 6
729 	}
730 #endif
731 };
732 
733 U8 uiLPDDR4_CA_Mapping_POP[CHANNEL_NUM][6] =
734 {
735 	//CH-A
736 	{
737 		5, 4, 0, 2, 1, 3
738 	},
739 
740 #if (CHANNEL_NUM>1)
741 	//CH-B
742 	{
743 		3, 5, 2, 4, 0, 1
744 	},
745 #endif
746 #if (CHANNEL_NUM>2)
747 	//CH-C
748 	{
749 		5, 4, 0, 2, 1, 3
750 	},
751 	//CH-D
752 	{
753 		3, 5, 2, 4, 0, 1
754 	},
755 #endif
756 };
757 #endif
758 
759 #if (__LP5_COMBO__)
760 const U8 uiLPDDR5_O1_Mapping_POP[CHANNEL_NUM][16] =
761 {
762 	{
763 		8, 9, 10, 11, 12, 15, 14, 13,
764 		0, 1, 2, 3, 4, 7, 6, 5,
765 	},
766 
767 	#if (CHANNEL_NUM>1)
768 	{
769 		8, 9, 10, 11, 12, 15, 14, 13,
770 		0, 1, 2, 3, 4, 7, 6, 5,
771 	},
772 	#endif
773 };
774 #endif
775 
776 //O1 DRAM->APHY
777 U8 uiLPDDR4_O1_Mapping_POP[CHANNEL_NUM][16] =
778 {
779 	//CH-A
780 	{
781 		1, 0, 3, 2, 4, 7, 6, 5,
782 		8, 9, 10, 14, 11, 15, 13, 12
783 	},
784 	#if (CHANNEL_NUM>1)
785 	//CH-B
786 	{
787 		0, 1, 4, 7, 3, 5, 6, 2,
788 		9, 8, 10, 12, 11, 14, 13, 15
789 	},
790 	#endif
791 	#if (CHANNEL_NUM>2)
792 	//CH-C
793 	{
794 		1, 0, 3, 2, 4, 7, 6, 5,
795 		8, 9, 10, 14, 11, 15, 13, 12
796 	},
797 	//CH-D
798 	{
799 		0, 1, 4, 7, 3, 5, 6, 2,
800 		9, 8, 10, 12, 11, 14, 13, 15
801 	},
802 	#endif
803 };
804 
vBeforeCalibration(DRAMC_CTX_T * p)805 void vBeforeCalibration(DRAMC_CTX_T *p)
806 {
807 #if (__LP5_COMBO__ == TRUE)
808 	if (TRUE == is_lp5_family(p))
809 	{
810 		DramcMRInit_LP5(p);
811 	}
812 	else
813 #endif
814 	{
815 		//DramcMRInit_LP4(p);
816 	}
817 
818 #if SIMULATION_RX_DVS || ENABLE_RX_TRACKING
819 	DramcRxInputDelayTrackingInit_byFreq(p);
820 #endif
821 
822 	DramcHWGatingOnOff(p, 0); //disable gating tracking
823 
824 	CKEFixOnOff(p, CKE_WRITE_TO_ALL_RANK, CKE_FIXON, CKE_WRITE_TO_ALL_CHANNEL); //Let CLK always on during calibration
825 
826 #if ENABLE_TMRRI_NEW_MODE
827 	SetCKE2RankIndependent(p); //CKE should be controlled independently
828 #endif
829 
830 	//WDBI-OFF
831 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_TX_SET0, 0x0, SHU_TX_SET0_DBIWR);
832 
833 #ifdef IMPEDANCE_TRACKING_ENABLE
834 	// set correct setting to control IMPCAL HW Tracking in shuffle RG
835 	// if p->freq >= 1333, enable IMP HW tracking(SHU_DRVING1_DIS_IMPCAL_HW=0), else SHU_DRVING1_DIS_IMPCAL_HW = 1
836 	U8 u1DisImpHw;
837 	U32 u4TermFreq;
838 
839 #if (__LP5_COMBO__ == TRUE)
840 	if (TRUE == is_lp5_family(p))
841 		u4TermFreq = LP5_MRFSP_TERM_FREQ;
842 	else
843 #endif
844 		u4TermFreq = LP4_MRFSP_TERM_FREQ;
845 
846 	u1DisImpHw = (p->frequency >= u4TermFreq)? 0: 1;
847 
848 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_SHU_IMPEDAMCE_UPD_DIS1, P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_ODTN_UPD_DIS)
849 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_DRVN_UPD_DIS)
850 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_DRVP_UPD_DIS)
851 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_ODTN_UPD_DIS)
852 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_DRVN_UPD_DIS)
853 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_DRVP_UPD_DIS)
854 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_DQ_ODTN_UPD_DIS)
855 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_DQ_DRVN_UPD_DIS)
856 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_DQ_DRVP_UPD_DIS)
857 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_DQS_ODTN_UPD_DIS)
858 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_DQS_DRVN_UPD_DIS)
859 																	| P_Fld(u1DisImpHw, MISC_SHU_IMPEDAMCE_UPD_DIS1_DQS_DRVP_UPD_DIS)
860 																	| P_Fld(1, MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_DRVP_UPD_DIS)
861 																	| P_Fld(1, MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_DRVN_UPD_DIS)
862 																	| P_Fld(1, MISC_SHU_IMPEDAMCE_UPD_DIS1_WCK_ODTN_UPD_DIS));
863 
864 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_IMPCAL1, (u1DisImpHw? 0x0:0x40), SHU_MISC_IMPCAL1_IMPCALCNT);
865 
866 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_DRVING1, u1DisImpHw, SHU_MISC_DRVING1_DIS_IMPCAL_HW);
867 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_DRVING1, u1DisImpHw, SHU_MISC_DRVING1_DIS_IMP_ODTN_TRACK);
868 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_DRVING2, u1DisImpHw, SHU_MISC_DRVING2_DIS_IMPCAL_ODT_EN);
869 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CA_CMD12, u1DisImpHw, SHU_CA_CMD12_RG_RIMP_UNTERM_EN);
870 #endif
871 
872 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CLK_CTRL, P_Fld(0, MISC_CLK_CTRL_DVFS_CLK_MEM_SEL)
873 													| P_Fld(0, MISC_CLK_CTRL_DVFS_MEM_CK_MUX_UPDATE_EN));
874 
875 
876 	vIO32WriteFldMulti_All(DRAMC_REG_SHU_ZQ_SET0,
877 			P_Fld(0x1ff, SHU_ZQ_SET0_ZQCSCNT) | //Every refresh number to issue ZQCS commands, only for DDR3/LPDDR2/LPDDR3/LPDDR4
878 			P_Fld(0x1b, SHU_ZQ_SET0_TZQLAT));
879 
880 	if (p->support_channel_num == CHANNEL_SINGLE)
881 	{
882 		//single channel, ZQCSDUAL=0, ZQCSMASK=0
883 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_ZQ_SET0), P_Fld(0, ZQ_SET0_ZQCSDUAL) | P_Fld(0x0, ZQ_SET0_ZQCSMASK));
884 	}
885 	else if (p->support_channel_num == CHANNEL_DUAL)
886 	{
887 		// HW ZQ command is channel interleaving since 2 channel share the same ZQ pin.
888 		#ifdef ZQCS_ENABLE_LP4
889 		// dual channel, ZQCSDUAL =1, and CHA ZQCSMASK=0, CHB ZQCSMASK=1
890 
891 		vIO32WriteFldMulti_All(DRAMC_REG_ZQ_SET0, P_Fld(1, ZQ_SET0_ZQCSDUAL) |
892 											   P_Fld(0, ZQ_SET0_ZQCSMASK_OPT) |
893 											   P_Fld(0, ZQ_SET0_ZQMASK_CGAR) |
894 											   P_Fld(0, ZQ_SET0_ZQCS_MASK_SEL_CGAR));
895 
896 		// DRAMC CHA(CHN0):ZQCSMASK=1, DRAMC CHB(CHN1):ZQCSMASK=0.
897 		// ZQCSMASK setting: (Ch A, Ch B) = (1,0) or (0,1)
898 		// if CHA.ZQCSMASK=1, and then set CHA.ZQCALDISB=1 first, else set CHB.ZQCALDISB=1 first
899 		vIO32WriteFldAlign(DRAMC_REG_ZQ_SET0 + (CHANNEL_A << POS_BANK_NUM), 1, ZQ_SET0_ZQCSMASK);
900 		vIO32WriteFldAlign(DRAMC_REG_ZQ_SET0 + SHIFT_TO_CHB_ADDR, 0, ZQ_SET0_ZQCSMASK);
901 
902 		// DRAMC CHA(CHN0):ZQ_SET0_ZQCS_MASK_SEL=0, DRAMC CHB(CHN1):ZQ_SET0_ZQCS_MASK_SEL=0.
903 		vIO32WriteFldAlign_All(DRAMC_REG_ZQ_SET0, 0, ZQ_SET0_ZQCS_MASK_SEL);
904 		#endif
905 	}
906 #if (CHANNEL_NUM > 2)
907 	else if (p->support_channel_num == CHANNEL_FOURTH)
908 	{
909 		// HW ZQ command is channel interleaving since 2 channel share the same ZQ pin.
910 		#ifdef ZQCS_ENABLE_LP4
911 		// dual channel, ZQCSDUAL =1, and CHA ZQCSMASK=0, CHB ZQCSMASK=1
912 
913 		vIO32WriteFldMulti_All(DRAMC_REG_ZQ_SET0, P_Fld(1, ZQ_SET0_ZQCSDUAL) |
914 											   P_Fld(0, ZQ_SET0_ZQCALL) |
915 											   P_Fld(0, ZQ_SET0_ZQ_SRF_OPT) |
916 											   P_Fld(0, ZQ_SET0_ZQCSMASK_OPT) |
917 											   P_Fld(0, ZQ_SET0_ZQMASK_CGAR) |
918 											   P_Fld(0, ZQ_SET0_ZQCS_MASK_SEL_CGAR));
919 
920 		// DRAMC CHA(CHN0):ZQCSMASK=1, DRAMC CHB(CHN1):ZQCSMASK=0.
921 		// ZQCSMASK setting: (Ch A, Ch C) = (1,0) or (0,1), (Ch B, Ch D) = (1,0) or (0,1)
922 		// if CHA.ZQCSMASK=1, and then set CHA.ZQCALDISB=1 first, else set CHB.ZQCALDISB=1 first
923 	#if fcFOR_CHIP_ID == fcPetrus
924 		vIO32WriteFldAlign(DRAMC_REG_ZQ_SET0 + (CHANNEL_A << POS_BANK_NUM), 1, ZQ_SET0_ZQCSMASK);
925 		vIO32WriteFldAlign(DRAMC_REG_ZQ_SET0 + (CHANNEL_B << POS_BANK_NUM), 0, ZQ_SET0_ZQCSMASK);
926 		vIO32WriteFldAlign(DRAMC_REG_ZQ_SET0 + (CHANNEL_C << POS_BANK_NUM), 0, ZQ_SET0_ZQCSMASK);
927 		vIO32WriteFldAlign(DRAMC_REG_ZQ_SET0 + (CHANNEL_D << POS_BANK_NUM), 1, ZQ_SET0_ZQCSMASK);
928 	#endif
929 
930 		// DRAMC CHA(CHN0):ZQ_SET0_ZQCS_MASK_SEL=0, DRAMC CHB(CHN1):ZQ_SET0_ZQCS_MASK_SEL=0.
931 		vIO32WriteFldAlign_All(DRAMC_REG_ZQ_SET0, 0, ZQ_SET0_ZQCS_MASK_SEL);
932 		#endif
933 	}
934 #endif
935 
936 	// Set 0 to be able to adjust TX DQS/DQ/DQM PI during calibration, for new cross rank mode.
937 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B0_DQ2, 0, SHU_B0_DQ2_RG_ARPI_OFFSET_LAT_EN_B0);
938 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B1_DQ2, 0, SHU_B1_DQ2_RG_ARPI_OFFSET_LAT_EN_B1);
939 
940 #if ENABLE_PA_IMPRO_FOR_TX_AUTOK
941 	vIO32WriteFldAlign_All(DRAMC_REG_DCM_SUB_CTRL, 0x0, DCM_SUB_CTRL_SUBCLK_CTRL_TX_AUTOK);
942 #endif
943 	// ARPI_DQ SW mode mux, TX DQ use 1: PHY Reg 0: DRAMC Reg
944 	#if ENABLE_PA_IMPRO_FOR_TX_TRACKING
945 	vIO32WriteFldAlign_All(DRAMC_REG_DCM_SUB_CTRL, 0, DCM_SUB_CTRL_SUBCLK_CTRL_TX_TRACKING);
946 	#endif
947 	//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CTRL1, 1, MISC_CTRL1_R_DMARPIDQ_SW); @Darren, remove to LP4_UpdateInitialSettings
948 	//Disable HW MR18/19 to prevent fail case when doing SW MR18/19 in DQSOSCAuto
949 	vIO32WriteFldAlign_All(DRAMC_REG_DQSOSCR, 0x1, DQSOSCR_DQSOSCRDIS);
950 
951 	vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL0, 0x1, REFCTRL0_REFDIS); //disable refresh
952 
953 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_MATYPE, u1MaType, SHU_MATYPE_MATYPE);
954 
955 	TX_Path_Algorithm(p);
956 }
957 
vAfterCalibration(DRAMC_CTX_T * p)958 void vAfterCalibration(DRAMC_CTX_T *p)
959 {
960 #if ENABLE_READ_DBI
961 	EnableDRAMModeRegReadDBIAfterCalibration(p);
962 #endif
963 
964 #if ENABLE_WRITE_DBI
965 	EnableDRAMModeRegWriteDBIAfterCalibration(p);
966 #endif
967 
968 	SetMr13VrcgToNormalOperation(p);// Set VRCG{MR13[3]} to 0
969 	CKEFixOnOff(p, CKE_WRITE_TO_ALL_RANK, CKE_DYNAMIC, CKE_WRITE_TO_ALL_CHANNEL); //After CKE FIX on/off, CKE should be returned to dynamic (control by HW)
970 
971 	vIO32WriteFldAlign_All(DRAMC_REG_DUMMY_RD, p->support_rank_num, DUMMY_RD_RANK_NUM);
972 
973 #if FOR_DV_SIMULATION_USED == 1
974 	cal_sv_rand_args_t *psra = get_psra();
975 
976 	if (psra) {
977 		u1MR03Value[p->dram_fsp] = psra->mr3_value;
978 	}
979 #endif
980 
981 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CG_CTRL7, 0, MISC_CG_CTRL7_CK_BFE_DCM_EN);
982 	vIO32WriteFldAlign_All(DRAMC_REG_TEST2_A4, 4, TEST2_A4_TESTAGENTRKSEL); // Rank selection is controlled by Test Agent
983 	vIO32WriteFldAlign_All(DRAMC_REG_TEST2_A2, 0x20, TEST2_A2_TEST2_OFF); //@Chris, MP setting for runtime TA2 Length
984 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DUTYSCAN1, 0, MISC_DUTYSCAN1_DQSERRCNT_DIS);
985 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CTRL1, 0, MISC_CTRL1_R_DMSTBENCMP_RK_OPT);
986 }
987 
O1PathOnOff(DRAMC_CTX_T * p,U8 u1OnOff)988 static void O1PathOnOff(DRAMC_CTX_T *p, U8 u1OnOff)
989 {
990 	#if 0//O1_SETTING_RESTORE
991 	const U32 u4O1RegBackupAddress[] =
992 	{
993 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_VREF)),
994 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_VREF)),
995 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL)),
996 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_PHY_VREF_SEL)),
997 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ5)),
998 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ5))
999 	};
1000 	#endif
1001 
1002 	U8 u1VrefSel;
1003 
1004 	if (u1OnOff == ON)
1005 	{
1006 		// These RG will be restored when leaving each calibration flow
1007 		// -------------------------------------------------------
1008 		// VREF_UNTERM_EN
1009 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_VREF), 1, SHU_B0_VREF_RG_RX_ARDQ_VREF_UNTERM_EN_B0);
1010 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_VREF), 1, SHU_B1_VREF_RG_RX_ARDQ_VREF_UNTERM_EN_B1);
1011 
1012 		#if (__LP5_COMBO__ == TRUE)
1013 		if (p->dram_type==TYPE_LPDDR5)
1014 			u1VrefSel = 0x37;//unterm LP5
1015 		else
1016 		#endif
1017 			u1VrefSel = 0x37;//unterm LP4
1018 
1019 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL),
1020 					P_Fld(u1VrefSel, SHU_B0_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_LB_B0) |
1021 					P_Fld(u1VrefSel, SHU_B0_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_UB_B0));
1022 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_PHY_VREF_SEL),
1023 					P_Fld(u1VrefSel, SHU_B1_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_LB_B1) |
1024 					P_Fld(u1VrefSel, SHU_B1_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_UB_B1));
1025 	}
1026 
1027 	// DQ_O1 enable/release
1028 	// -------------------------------------------------------
1029 	// Actually this RG naming is O1_EN in APHY
1030 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6), u1OnOff, B0_DQ6_RG_RX_ARDQ_O1_SEL_B0);
1031 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6), u1OnOff, B1_DQ6_RG_RX_ARDQ_O1_SEL_B1);
1032 
1033 	// DQ_IN_BUFF_EN
1034 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ3),
1035 						P_Fld(u1OnOff, B0_DQ3_RG_RX_ARDQ_IN_BUFF_EN_B0) |
1036 						P_Fld(u1OnOff, B0_DQ3_RG_RX_ARDQS0_IN_BUFF_EN_B0));
1037 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ3),
1038 						P_Fld(u1OnOff, B1_DQ3_RG_RX_ARDQ_IN_BUFF_EN_B1) |
1039 						P_Fld(u1OnOff, B1_DQ3_RG_RX_ARDQS0_IN_BUFF_EN_B1));
1040 
1041 	// DQ_BUFF_EN_SEL
1042 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY3), u1OnOff, B0_PHY3_RG_RX_ARDQ_BUFF_EN_SEL_B0);
1043 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY3), u1OnOff, B1_PHY3_RG_RX_ARDQ_BUFF_EN_SEL_B1);
1044 
1045 	// Gating always ON
1046 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_RX_IN_GATE_EN_CTRL),(u1OnOff << 1) | u1OnOff, MISC_RX_IN_GATE_EN_CTRL_FIX_IN_GATE_EN);
1047 
1048 	mcDELAY_US(1);
1049 }
1050 
1051 /*
1052  * set_cbt_intv -- set interval related rg according to speed.
1053  *
1054  * TODO, move these to ACTimingTable ????!!!
1055  */
1056 
1057 struct cbt_intv {
1058 	DRAM_PLL_FREQ_SEL_T freq_sel;
1059 	DIV_MODE_T divmode;
1060 	u8 tcmdo1lat;
1061 	u8 catrain_intv;
1062 	u8 new_cbt_pat_intv;
1063 	u8 wlev_dqspat_lat;
1064 };
1065 
set_cbt_intv_rg(DRAMC_CTX_T * p,struct cbt_intv * pintv)1066 static void set_cbt_intv_rg(DRAMC_CTX_T *p, struct cbt_intv *pintv)
1067 {
1068 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL1),
1069 		P_Fld(pintv->tcmdo1lat, CBT_WLEV_CTRL1_TCMDO1LAT) |
1070 		P_Fld(pintv->catrain_intv, CBT_WLEV_CTRL1_CATRAIN_INTV));
1071 
1072 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL5),
1073 		P_Fld(pintv->new_cbt_pat_intv, CBT_WLEV_CTRL5_NEW_CBT_PAT_INTV));
1074 
1075 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
1076 		P_Fld(pintv->wlev_dqspat_lat, CBT_WLEV_CTRL0_WLEV_DQSPAT_LAT));
1077 }
1078 
lookup_cbt_intv(struct cbt_intv * intv,int cnt,DRAM_PLL_FREQ_SEL_T fsel,DIV_MODE_T dmode)1079 static struct cbt_intv *lookup_cbt_intv(struct cbt_intv *intv, int cnt,
1080 		DRAM_PLL_FREQ_SEL_T fsel, DIV_MODE_T dmode)
1081 {
1082 	struct cbt_intv *pintv = NULL;
1083 	int i;
1084 
1085 	for (i = 0; i < cnt; i++) {
1086 		if (intv[i].freq_sel == fsel && intv[i].divmode == dmode) {
1087 			pintv = &intv[i];
1088 			break;
1089 		}
1090 	}
1091 
1092 	return pintv;
1093 }
1094 
set_cbt_wlev_intv_lp4(DRAMC_CTX_T * p)1095 static void set_cbt_wlev_intv_lp4(DRAMC_CTX_T *p)
1096 {
1097 	struct cbt_intv intv[] = {
1098 		{
1099 			LP4_DDR4266,
1100 			DIV8_MODE,
1101 			17, /*tcmdo1lat*/
1102 			14, /* catrain_intv */
1103 			19, /* new_cbt_pat_intv */
1104 			19, /* wlev_dqspat_lat */
1105 		}, {
1106 			LP4_DDR3733,
1107 			DIV8_MODE,
1108 			16, /*tcmdo1lat*/
1109 			13, /* catrain_intv */
1110 			18, /* new_cbt_pat_intv */
1111 			18, /* wlev_dqspat_lat */
1112 		}, {
1113 			LP4_DDR3200,
1114 			DIV8_MODE,
1115 			14, /*tcmdo1lat*/
1116 			11, /* catrain_intv */
1117 			16, /* new_cbt_pat_intv */
1118 			16, /* wlev_dqspat_lat */
1119 		}, {
1120 			LP4_DDR2667,
1121 			DIV8_MODE,
1122 			13, /*tcmdo1lat*/
1123 			10, /* catrain_intv */
1124 			15, /* new_cbt_pat_intv */
1125 			15, /* wlev_dqspat_lat */
1126 		}, {
1127 			LP4_DDR2400,
1128 			DIV8_MODE,
1129 			12, /*tcmdo1lat*/
1130 			9, /* catrain_intv */
1131 			14, /* new_cbt_pat_intv */
1132 			14, /* wlev_dqspat_lat */
1133 		}, {
1134 			LP4_DDR1866,
1135 			DIV8_MODE,
1136 			11, /*tcmdo1lat*/
1137 			9, /* catrain_intv */
1138 			13, /* new_cbt_pat_intv */
1139 			13, /* wlev_dqspat_lat */
1140 		}, {
1141 			LP4_DDR1600,
1142 			DIV8_MODE,
1143 			10, /*tcmdo1lat*/
1144 			8, /* catrain_intv */
1145 			12, /* new_cbt_pat_intv */
1146 			12, /* wlev_dqspat_lat */
1147 		}, {
1148 			LP4_DDR1200,
1149 			DIV8_MODE,
1150 			9, /*tcmdo1lat*/
1151 			8, /* catrain_intv */
1152 			11, /* new_cbt_pat_intv */
1153 			11, /* wlev_dqspat_lat */
1154 		}, {
1155 			LP4_DDR800,
1156 			DIV8_MODE,
1157 			8, /*tcmdo1lat*/
1158 			8, /* catrain_intv */
1159 			10, /* new_cbt_pat_intv */
1160 			10, /* wlev_dqspat_lat */
1161 		}, {
1162 			LP4_DDR1600,
1163 			DIV4_MODE,
1164 			16, /*tcmdo1lat*/
1165 			13, /* catrain_intv */
1166 			16, /* new_cbt_pat_intv */
1167 			16, /* wlev_dqspat_lat */
1168 		}, {
1169 			LP4_DDR1200,
1170 			DIV4_MODE,
1171 			14, /*tcmdo1lat*/
1172 			13, /* catrain_intv */
1173 			14, /* new_cbt_pat_intv */
1174 			14, /* wlev_dqspat_lat */
1175 		}, {
1176 			LP4_DDR800,
1177 			DIV4_MODE,
1178 			12, /*tcmdo1lat*/
1179 			13, /* catrain_intv */
1180 			12, /* new_cbt_pat_intv */
1181 			12, /* wlev_dqspat_lat */
1182 		}, {
1183 			LP4_DDR400,
1184 			DIV4_MODE,
1185 			12, /*tcmdo1lat*/
1186 			13, /* catrain_intv */
1187 			12, /* new_cbt_pat_intv */
1188 			12, /* wlev_dqspat_lat */
1189 		},
1190 	};
1191 
1192 	struct cbt_intv *pintv;
1193 
1194 	pintv = lookup_cbt_intv(intv, ARRAY_SIZE(intv),
1195 			p->freq_sel, vGet_Div_Mode(p));
1196 	if (!pintv) {
1197 		msg("not found entry!\n");
1198 		return;
1199 	}
1200 
1201 	set_cbt_intv_rg(p, pintv);
1202 }
1203 
1204 #if __LP5_COMBO__
set_cbt_wlev_intv_lp5(DRAMC_CTX_T * p)1205 static void set_cbt_wlev_intv_lp5(DRAMC_CTX_T *p)
1206 {
1207 	struct cbt_intv intv[] = {
1208 		{
1209 			LP5_DDR6400,
1210 			UNKNOWN_MODE,
1211 			15, /*tcmdo1lat*/
1212 			15, /* catrain_intv */
1213 			17, /* new_cbt_pat_intv */
1214 			17, /* wlev_dqspat_lat */
1215 		}, {
1216 			LP5_DDR6000,
1217 			UNKNOWN_MODE,
1218 			15, /*tcmdo1lat*/
1219 			15, /* catrain_intv */
1220 			17, /* new_cbt_pat_intv */
1221 			17, /* wlev_dqspat_lat */
1222 		}, {
1223 			LP5_DDR5500,
1224 			UNKNOWN_MODE,
1225 			14, /*tcmdo1lat*/
1226 			14, /* catrain_intv */
1227 			16, /* new_cbt_pat_intv */
1228 			16, /* wlev_dqspat_lat */
1229 		}, {
1230 			LP5_DDR4800,
1231 			UNKNOWN_MODE,
1232 			13, /*tcmdo1lat*/
1233 			13, /* catrain_intv */
1234 			15, /* new_cbt_pat_intv */
1235 			15, /* wlev_dqspat_lat */
1236 		}, {
1237 			LP5_DDR4266,
1238 			UNKNOWN_MODE,
1239 			20, /*tcmdo1lat*/
1240 			20, /* catrain_intv */
1241 			22, /* new_cbt_pat_intv */
1242 			20, /* wlev_dqspat_lat */
1243 		}, {
1244 			LP5_DDR3733,
1245 			UNKNOWN_MODE,
1246 			19, /*tcmdo1lat*/
1247 			19, /* catrain_intv */
1248 			21, /* new_cbt_pat_intv */
1249 			19, /* wlev_dqspat_lat */
1250 		}, {
1251 			LP5_DDR3200,
1252 			UNKNOWN_MODE,
1253 			15, /*tcmdo1lat*/
1254 			15, /* catrain_intv */
1255 			17, /* new_cbt_pat_intv */
1256 			17, /* wlev_dqspat_lat */
1257 		}, {
1258 			LP5_DDR2400,
1259 			UNKNOWN_MODE,
1260 			13, /*tcmdo1lat*/
1261 			13, /* catrain_intv */
1262 			15, /* new_cbt_pat_intv */
1263 			15, /* wlev_dqspat_lat */
1264 		}, {
1265 			LP5_DDR1600,
1266 			UNKNOWN_MODE,
1267 			17, /*tcmdo1lat*/
1268 			17, /* catrain_intv */
1269 			19, /* new_cbt_pat_intv */
1270 			17, /* wlev_dqspat_lat */
1271 		}, {
1272 			LP5_DDR1200,
1273 			UNKNOWN_MODE,
1274 			15, /*tcmdo1lat*/
1275 			15, /* catrain_intv */
1276 			17, /* new_cbt_pat_intv */
1277 			15, /* wlev_dqspat_lat */
1278 		}, {
1279 			LP5_DDR800,
1280 			UNKNOWN_MODE,
1281 			13, /*tcmdo1lat*/
1282 			13, /* catrain_intv */
1283 			15, /* new_cbt_pat_intv */
1284 			13, /* wlev_dqspat_lat */
1285 		},
1286 	};
1287 
1288 	struct cbt_intv *pintv;
1289 
1290 	pintv = lookup_cbt_intv(intv, ARRAY_SIZE(intv), p->freq_sel, UNKNOWN_MODE);
1291 	if (!pintv) {
1292 		msg("not found entry!\n");
1293 		return;
1294 	}
1295 
1296 	set_cbt_intv_rg(p, pintv);
1297 }
1298 #endif /* __LP5_COMBO__ */
1299 
set_cbt_wlev_intv(DRAMC_CTX_T * p)1300 static void set_cbt_wlev_intv(DRAMC_CTX_T *p)
1301 {
1302 #if __LP5_COMBO__
1303 	if (is_lp5_family(p))
1304 		set_cbt_wlev_intv_lp5(p);
1305 	else
1306 #endif
1307 		set_cbt_wlev_intv_lp4(p);
1308 }
1309 
1310 #if SIMUILATION_CBT == 1
1311 /* To process LPDDR5 Pinmux */
1312 struct cbt_pinmux {
1313 	u8 dram_dq_b0; /* EMI_B0 is mapped to which DRAMC byte ?? */
1314 	u8 dram_dq_b1;
1315 	u8 dram_dmi_b0; /* EMI_DMI0 is mapped to which DRAMC DMI ?? */
1316 	u8 dram_dmi_b1;
1317 
1318 	u8 dram_dq7_b0; /* EMI_DQ7 is mapped to which DRAMC DQ ?? */
1319 	u8 dram_dq7_b1; /* EMI_DQ15 is mapped to which DRAMC DQ ?? */
1320 };
1321 
1322 /* Per-project definition */
1323 static struct cbt_pinmux lp4_cp[CHANNEL_NUM] = {
1324 	{
1325 		/* CHA */
1326 		.dram_dq_b0 = 0,
1327 		.dram_dq_b1 = 1,
1328 
1329 		.dram_dmi_b0 = 0,
1330 		.dram_dmi_b1 = 1,
1331 	},
1332 	#if (CHANNEL_NUM>1)
1333 	{
1334 		/* CHB */
1335 		.dram_dq_b0 = 0,
1336 		.dram_dq_b1 = 1,
1337 
1338 		.dram_dmi_b0 = 0,
1339 		.dram_dmi_b1 = 1,
1340 	},
1341 	#endif
1342 	#if (CHANNEL_NUM>2)
1343 	{
1344 		/* CHC */
1345 		.dram_dq_b0 = 0,
1346 		.dram_dq_b1 = 1,
1347 
1348 		.dram_dmi_b0 = 0,
1349 		.dram_dmi_b1 = 1,
1350 	},
1351 	{
1352 		/* CHD */
1353 		.dram_dq_b0 = 0,
1354 		.dram_dq_b1 = 1,
1355 
1356 		.dram_dmi_b0 = 0,
1357 		.dram_dmi_b1 = 1,
1358 	},
1359 	#endif
1360 };
1361 
is_byte_mode(DRAMC_CTX_T * p)1362 static inline u8 is_byte_mode(DRAMC_CTX_T *p)
1363 {
1364 	return p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1? 1: 0;
1365 }
1366 
vSetDramMRCBTOnOff(DRAMC_CTX_T * p,U8 u1OnOff,U8 operating_fsp)1367 static void vSetDramMRCBTOnOff(DRAMC_CTX_T *p, U8 u1OnOff, U8 operating_fsp)
1368 {
1369 	if (u1OnOff)
1370 	{
1371 		// op[7] = !(p->dram_fsp), dram will switch to another FSP_OP automatically
1372 		if (operating_fsp)
1373 		{
1374 			MRWriteFldMulti(p, 13, P_Fld(0, MR13_FSP_OP) |
1375 								   P_Fld(1, MR13_FSP_WR) |
1376 								   P_Fld(1, MR13_CBT),
1377 								   TO_MR);
1378 		}
1379 		else
1380 		{
1381 			MRWriteFldMulti(p, 13, P_Fld(1, MR13_FSP_OP) |
1382 								   P_Fld(0, MR13_FSP_WR) |
1383 								   P_Fld(1, MR13_CBT),
1384 								   TO_MR);
1385 		}
1386 
1387 		if (p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1)
1388 		{
1389 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), P_Fld(1, CBT_WLEV_CTRL0_BYTEMODECBTEN) |
1390 				P_Fld(1, CBT_WLEV_CTRL0_CBT_CMP_BYTEMODE));    //BYTEMODECBTEN=1
1391 		}
1392 	}
1393 	else
1394 	{
1395 		if (operating_fsp)
1396 		{
1397 			// !! Remain MR13_FSP_OP = 0, because of system is at low frequency now.
1398 			MRWriteFldMulti(p, 13, P_Fld(0, MR13_FSP_OP) |
1399 								   P_Fld(1, MR13_FSP_WR) |
1400 								   P_Fld(0, MR13_CBT),
1401 								   TO_MR);
1402 		}
1403 		else
1404 		{
1405 			MRWriteFldMulti(p, 13, P_Fld(0, MR13_FSP_OP) |
1406 								   P_Fld(0, MR13_FSP_WR) |
1407 								   P_Fld(0, MR13_CBT),
1408 								   TO_MR);
1409 		}
1410 	}
1411 
1412 }
1413 
CBTEntryLP4(DRAMC_CTX_T * p,U8 operating_fsp,U16 operation_frequency)1414 static void CBTEntryLP4(DRAMC_CTX_T *p, U8 operating_fsp, U16 operation_frequency)
1415 {
1416 	struct cbt_pinmux *cp = &lp4_cp[p->channel];
1417 
1418 	#if MR_CBT_SWITCH_FREQ
1419 	if (p->dram_fsp == FSP_1)
1420 		DramcModeRegInit_CATerm(p, 1);
1421 	#endif
1422 
1423 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL),
1424 			0, MISC_STBCAL_DQSIENCG_NORMAL_EN);
1425 
1426 	CKEFixOnOff(p, p->rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
1427 
1428 	// yr: CA train old mode and CS traing need to check MRSRK at this point
1429 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u1GetRank(p), SWCMD_CTRL0_MRSRK);
1430 
1431 	//Step 0: MRW MR13 OP[0]=1 to enable CBT
1432 	vSetDramMRCBTOnOff(p, ENABLE, operating_fsp);
1433 
1434 	//Step 0.1: before CKE low, Let DQS=0 by R_DMwrite_level_en=1, spec: DQS_t has to retain a low level during tDQSCKE period
1435 	if (p->dram_cbt_mode[p->rank] == CBT_NORMAL_MODE)
1436 	{
1437 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
1438 				1, CBT_WLEV_CTRL0_WRITE_LEVEL_EN);
1439 
1440 		//TODO, pinmux
1441 		//force byte0 tx
1442 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
1443 			0x1, CBT_WLEV_CTRL0_DQSOEAOEN);
1444 
1445 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
1446 			(1 << cp->dram_dq_b0), CBT_WLEV_CTRL0_CBT_DQBYTE_OEAO_EN);
1447 	}
1448 
1449 	mcDELAY_US(1);
1450 
1451 	//Step 1.0: let CKE go low
1452 	CKEFixOnOff(p, p->rank, CKE_FIXOFF, CKE_WRITE_TO_ONE_CHANNEL);
1453 
1454 	// Adjust u1MR13Value
1455 	(operating_fsp == FSP_1)?
1456 			DramcMRWriteFldAlign(p, 13, 1, MR13_FSP_OP, JUST_TO_GLOBAL_VALUE):
1457 			DramcMRWriteFldAlign(p, 13, 0, MR13_FSP_OP, JUST_TO_GLOBAL_VALUE);
1458 
1459 	// Step 1.1 : let IO to O1 path valid
1460 	if (p->dram_cbt_mode[p->rank] == CBT_NORMAL_MODE)
1461 	{
1462 		// Let R_DMFIXDQIEN1=1 (byte1), 0xd8[13]  ==> Note: Do not enable again.
1463 		//Currently set in O1PathOnOff
1464 		//vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_PADCTRL), 0x3, PADCTRL_FIXDQIEN);
1465 
1466 		// Let DDRPHY RG_RX_ARDQ_SMT_EN_B1=1 (byte1)
1467 		//vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_B1_DQ3), 1, B1_DQ3_RG_RX_ARDQ_SMT_EN_B1);
1468 		O1PathOnOff(p, ON);
1469 	}
1470 
1471 	if (p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1)
1472 	{
1473 		// let IO to O1 path valid by DDRPHY RG_RX_ARDQ_SMT_EN_B0=1
1474 		//vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_B0_DQ3), 1, B0_DQ3_RG_RX_ARDQ_SMT_EN_B0);
1475 		O1PathOnOff(p, ON);
1476 	}
1477 
1478 	// Wait tCAENT
1479 	mcDELAY_US(1);
1480 }
1481 
CBTExitLP4(DRAMC_CTX_T * p,U8 operating_fsp,U8 operation_frequency)1482 static void CBTExitLP4(DRAMC_CTX_T *p, U8 operating_fsp, U8 operation_frequency)
1483 {
1484 	if (p->dram_cbt_mode[p->rank] == CBT_NORMAL_MODE || p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1)
1485 	{
1486 		//Step 1: CKE go high (Release R_DMCKEFIXOFF, R_DMCKEFIXON=1)
1487 		CKEFixOnOff(p, p->rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
1488 
1489 		//Step 2:wait tCATX, wait tFC
1490 		mcDELAY_US(1);
1491 
1492 		//Step 3: MRW to command bus training exit (MR13 OP[0]=0 to disable CBT)
1493 		vSetDramMRCBTOnOff(p, DISABLE, operating_fsp);
1494 
1495 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
1496 				 0, CBT_WLEV_CTRL0_WRITE_LEVEL_EN);
1497 	}
1498 
1499 	//Step 4:
1500 	//Disable O1 path output
1501 	if (p->dram_cbt_mode[p->rank] == CBT_NORMAL_MODE)
1502 	{
1503 		//Let DDRPHY RG_RX_ARDQ_SMT_EN_B1=0
1504 		//vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_B1_DQ3), 0, B1_DQ3_RG_RX_ARDQ_SMT_EN_B1);
1505 		O1PathOnOff(p, OFF);
1506 		//Let FIXDQIEN1=0 ==> Note: Do not enable again.
1507 		//Moved into O1PathOnOff
1508 		//vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_PADCTRL), 0, PADCTRL_FIXDQIEN);
1509 	}
1510 
1511 	if (p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1)
1512 	{
1513 		//Let DDRPHY RG_RX_ARDQ_SMT_EN_B0=0
1514 		//vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_B0_DQ3), 0, B0_DQ3_RG_RX_ARDQ_SMT_EN_B0);
1515 		O1PathOnOff(p, OFF);
1516 
1517 		//Disable Byte mode CBT enable bit
1518 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), P_Fld(0, CBT_WLEV_CTRL0_BYTEMODECBTEN) |
1519 			P_Fld(0, CBT_WLEV_CTRL0_CBT_CMP_BYTEMODE));    //BYTEMODECBTEN=1
1520 	}
1521 
1522 	// Wait tCAENT
1523 	mcDELAY_US(1);
1524 }
1525 
1526 /*
1527  * get_mck_ck_ratio -- get ratio of mck:ck
1528  *
1529  * TODO, remove later, get the ratio from dram ctx dfs table!!!!
1530  *
1531  *
1532  * return 1 means 1:1
1533  * return 0 means 1:2
1534  */
get_mck_ck_ratio(DRAMC_CTX_T * p)1535 static u8 get_mck_ck_ratio(DRAMC_CTX_T *p)
1536 {
1537 	/*
1538 	* as per DE's comments, LP5 mck:ck has only 1:1 and 1:2.
1539 	* read SHU_LP5_CMD.LP5_CMD1TO2EN to decide which one.
1540 	*/
1541 	u32 ratio;
1542 
1543 	ratio = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_LP5_CMD),
1544 		SHU_LP5_CMD_LP5_CMD1TO2EN);
1545 
1546 	msg5("LP5 MCK:CK=%s\n", ratio == 1 ? "1:1" : "1:2");
1547 
1548 	return ratio;
1549 }
1550 
get_cbtui_adjustable_maxvalue(DRAMC_CTX_T * p)1551 static u8 get_cbtui_adjustable_maxvalue(DRAMC_CTX_T *p)
1552 {
1553 	u8 ratio;
1554 
1555 	/*
1556 	* MCK:CK=1:1,
1557 	* ther are only 0~1 for ui adjust, if ui value is larger than 1, adjust MCK.
1558 	*
1559 	* MCK:CK=1:2,
1560 	* ther are only 0~3 for ui adjust, if ui value is larger than 3, adjust MCK.
1561 	*
1562 	* MCK:CK=1:4, (for LP4)
1563 	* ther are only 0~7 for ui adjust, if ui value is larger than 7, adjust MCK.
1564 	*
1565 	*/
1566 	ratio = get_mck_ck_ratio(p);
1567 
1568 	/* here just for LP5 */
1569 	return ratio == 1? 1: 3;
1570 }
1571 
get_ca_mck(DRAMC_CTX_T * p)1572 static inline u32 get_ca_mck(DRAMC_CTX_T *p)
1573 {
1574 	u32 dly;
1575 
1576 	dly = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA3));
1577 	return dly & 0x0FFFFFFFU;
1578 }
1579 
put_ca_mck(DRAMC_CTX_T * p,u32 ca_mck)1580 static inline void put_ca_mck(DRAMC_CTX_T *p, u32 ca_mck)
1581 {
1582 	u32 dly;
1583 
1584 	dly = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA3));
1585 	dly &= 0xF0000000U;
1586 	ca_mck &= 0x0FFFFFFFU;
1587 	dly |= ca_mck;
1588 
1589 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA3), dly);
1590 }
1591 
get_ca_ui(DRAMC_CTX_T * p)1592 static inline u32 get_ca_ui(DRAMC_CTX_T *p)
1593 {
1594 	u32 dly;
1595 
1596 	dly = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA7));
1597 	return dly & 0x0FFFFFFFU;
1598 }
1599 
put_ca_ui(DRAMC_CTX_T * p,u32 ca_ui)1600 static inline void put_ca_ui(DRAMC_CTX_T *p, u32 ca_ui)
1601 {
1602 	u32 dly;
1603 
1604 	dly = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA7));
1605 	dly &= 0xF0000000U;
1606 	ca_ui &= 0x0FFFFFFFU;
1607 	dly |= ca_ui;
1608 
1609 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA7), dly);
1610 
1611 	// Note: CKE UI must sync CA UI (CA and CKE delay circuit are same) @Lin-Yi
1612 	// To avoid tXP timing margin issue
1613 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA5), ca_ui & 0xF, SHU_SELPH_CA5_DLY_CKE);
1614 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA6), ca_ui & 0xF, SHU_SELPH_CA6_DLY_CKE1);
1615 }
1616 
xlate_ca_mck_ui(DRAMC_CTX_T * p,u32 ui_delta,u32 mck_old,u32 ui_old,u32 * mck_new,u32 * ui_new)1617 static void xlate_ca_mck_ui(DRAMC_CTX_T *p, u32 ui_delta,
1618 		u32 mck_old, u32 ui_old, u32 *mck_new, u32 *ui_new)
1619 {
1620 	u8 i;
1621 	u32 mask, max;
1622 	u32 bit_ui, bit_mck;
1623 	u32 ui_tmp = 0, mck_tmp = 0;
1624 
1625 	max = get_cbtui_adjustable_maxvalue(p);
1626 	mask = max;
1627 
1628 	for (i = 0; i < CATRAINING_NUM_LP5; i++) {
1629 		bit_mck = 0;
1630 		bit_ui = ((ui_old >> (i * 4)) & mask) + ui_delta;
1631 		if (bit_ui > max) {
1632 			bit_mck = bit_ui / (max + 1);
1633 			bit_ui = bit_ui % (max + 1);
1634 		}
1635 
1636 		mck_tmp += (bit_mck << (i * 4));
1637 		ui_tmp += (bit_ui << (i * 4));
1638 	}
1639 
1640 	if (ui_new)
1641 		*ui_new = ui_tmp;
1642 
1643 	if (mck_new)
1644 		*mck_new = mck_old + mck_tmp;
1645 }
1646 
get_ca_pi_per_ui(DRAMC_CTX_T * p)1647 static inline u8 get_ca_pi_per_ui(DRAMC_CTX_T *p)
1648 {
1649 #if __LP5_COMBO__
1650 	if (p->freq_sel == LP5_DDR4266)
1651 		return 64;
1652 	else
1653 #endif
1654 		return 32;
1655 }
1656 
get_capi_max(DRAMC_CTX_T * p)1657 static int get_capi_max(DRAMC_CTX_T *p)
1658 {
1659 	if (u1IsPhaseMode(p) == TRUE)
1660 	{
1661 		return 32;
1662 	}
1663 
1664 	return 64;
1665 }
1666 
adjust_ca_ui(DRAMC_CTX_T * p,U32 ca_mck,U32 ca_ui,S16 pi_dly)1667 static S16 adjust_ca_ui(DRAMC_CTX_T *p, U32 ca_mck,
1668 		U32 ca_ui, S16 pi_dly)
1669 {
1670 	S16 p2u;
1671 	S16 ui, pi;
1672 	U32 ui_new = 0, mck_new = 0;
1673 
1674 	if (pi_dly < get_capi_max(p))
1675 	{
1676 		return pi_dly;
1677 	}
1678 
1679 	p2u = get_ca_pi_per_ui(p);
1680 
1681 	ui = pi_dly / p2u;
1682 	pi = pi_dly % p2u;
1683 
1684 	xlate_ca_mck_ui(p, ui, ca_mck, ca_ui, &mck_new, &ui_new);
1685 
1686 	put_ca_ui(p, ui_new);
1687 	put_ca_mck(p, mck_new);
1688 	msg5("mck_new: 0x%x, ui_new: 0x%x, pi:%d\n",
1689 		mck_new, ui_new, pi);
1690 
1691 	return pi;
1692 }
1693 
get_cs_mck(DRAMC_CTX_T * p)1694 static inline u32 get_cs_mck(DRAMC_CTX_T *p)
1695 {
1696 	if (p->rank == RANK_1)
1697 		return u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA1),
1698 			SHU_SELPH_CA1_TXDLY_CS1);
1699 	else
1700 		return u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA1),
1701 			SHU_SELPH_CA1_TXDLY_CS);
1702 }
1703 
put_cs_mck(DRAMC_CTX_T * p,u32 cs_ui)1704 static inline void put_cs_mck(DRAMC_CTX_T *p, u32 cs_ui)
1705 {
1706 	if (p->rank == RANK_1)
1707 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA1),
1708 			cs_ui, SHU_SELPH_CA1_TXDLY_CS1);
1709 	else
1710 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA1),
1711 			cs_ui, SHU_SELPH_CA1_TXDLY_CS);
1712 }
1713 
get_cs_ui(DRAMC_CTX_T * p)1714 static inline u32 get_cs_ui(DRAMC_CTX_T *p)
1715 {
1716 	if (p->rank == RANK_1)
1717 		return u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA5),
1718 			SHU_SELPH_CA5_DLY_CS1);
1719 	else
1720 		return u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA5),
1721 			SHU_SELPH_CA5_DLY_CS);
1722 }
1723 
put_cs_ui(DRAMC_CTX_T * p,u32 cs_ui)1724 static inline void put_cs_ui(DRAMC_CTX_T *p, u32 cs_ui)
1725 {
1726 	if (p->rank == RANK_1)
1727 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA5),
1728 			cs_ui, SHU_SELPH_CA5_DLY_CS1);
1729 	else
1730 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA5),
1731 			cs_ui, SHU_SELPH_CA5_DLY_CS);
1732 }
1733 
1734 //void LP5_ShiftCSUI(DRAMC_CTX_T *p, S8 iShiftUI)
1735 //{
1736 //	REG_TRANSFER_T TransferUIRegs  = {DRAMC_REG_SHU_SELPH_CA5, SHU_SELPH_CA5_DLY_CS};
1737 //	REG_TRANSFER_T TransferMCKRegs = {DRAMC_REG_SHU_SELPH_CA1, SHU_SELPH_CA1_TXDLY_CS};
1738 //
1739 //	ExecuteMoveDramCDelay(p, TransferUIRegs[i], TransferMCKRegs[i], iShiftUI);
1740 //}
1741 
adjust_cs_ui(DRAMC_CTX_T * p,u32 cs_mck,u32 cs_ui,S16 pi_dly)1742 static S16 adjust_cs_ui(DRAMC_CTX_T *p, u32 cs_mck, u32 cs_ui, S16 pi_dly)
1743 {
1744 	S16 p2u;
1745 	S16 ui = 0, pi = 0;
1746 	u8 ratio;
1747 	u32 ui_max;
1748 	u32 cs_bit_mask, cs_ui_tmp, cs_mck_tmp;
1749 
1750 	if (pi_dly < get_capi_max(p))
1751 	{
1752 		return pi_dly;
1753 	}
1754 
1755 	p2u = get_ca_pi_per_ui(p);
1756 
1757 	ui = pi_dly / p2u;
1758 	pi = pi_dly % p2u;
1759 
1760 	ratio = get_mck_ck_ratio(p);
1761 	if (ratio) {
1762 		/* 1:1 */
1763 		cs_bit_mask = 1;
1764 	} else {
1765 		/* 1:2 */
1766 		cs_bit_mask = 3;
1767 	}
1768 
1769 	ui_max = get_cbtui_adjustable_maxvalue(p);
1770 	cs_ui_tmp = (cs_ui & cs_bit_mask) + ui;
1771 	cs_mck_tmp = 0;
1772 	if (cs_ui_tmp > ui_max) {
1773 		cs_mck_tmp = cs_ui_tmp / (ui_max + 1);
1774 		cs_ui_tmp = cs_ui_tmp % (ui_max + 1);
1775 	}
1776 
1777 	cs_mck_tmp += cs_mck;
1778 	put_cs_ui(p, cs_ui_tmp);
1779 	put_cs_mck(p, cs_mck_tmp);
1780 
1781 	msg5("csmck:%d, csui: %d, pi:%d before\n",
1782 			cs_mck, cs_ui, 0);
1783 	msg5("csmck:%d, csui: %d, pi:%d after\n",
1784 			cs_mck_tmp, cs_ui_tmp, pi);
1785 
1786 	return pi;
1787 }
1788 
get_capi_step(DRAMC_CTX_T * p)1789 static u32 get_capi_step(DRAMC_CTX_T *p)
1790 {
1791 	u32 step;
1792 
1793 	switch (p->freq_sel) {
1794 	case LP5_DDR800:
1795 	case LP5_DDR1200:
1796 	case LP5_DDR1600:
1797 	case LP5_DDR3733:
1798 		step = 8;
1799 		break;
1800 	default:
1801 		if (vGet_DDR_Loop_Mode(p) == SEMI_OPEN_LOOP_MODE)
1802 		{
1803 			step = 8;
1804 		}
1805 		else if (vGet_DDR_Loop_Mode(p) == OPEN_LOOP_MODE)
1806 		{
1807 			step = 16;
1808 		}
1809 		else
1810 		{
1811 			step = 1;
1812 		}
1813 		break;
1814 	}
1815 
1816 #if FOR_DV_SIMULATION_USED
1817 	return 8;
1818 #else
1819 	return step;
1820 #endif
1821 }
1822 
1823 #if CBT_O1_PINMUX_WORKAROUND
CBTCompareWordaroundDecodeO1Pinmux(DRAMC_CTX_T * p,u32 o1_value,U8 * uiLPDDR_O1_Mapping)1824 static u32 CBTCompareWordaroundDecodeO1Pinmux(DRAMC_CTX_T *p, u32 o1_value, U8 *uiLPDDR_O1_Mapping)
1825 {
1826 	U8 u1Idx;
1827 	U32 u4Result;
1828 
1829 	u4Result = 0;
1830 
1831 	for (u1Idx = 0;u1Idx < p->data_width;u1Idx++)
1832 		u4Result |= ((o1_value >> uiLPDDR_O1_Mapping[u1Idx]) & 0x1) << u1Idx;
1833 
1834 	return u4Result;
1835 }
1836 
CBTDelayCACLKCompareWorkaround(DRAMC_CTX_T * p)1837 static u32 CBTDelayCACLKCompareWorkaround(DRAMC_CTX_T *p)
1838 {
1839 	u8 u1pattern_index, u1ca_index, u1dq_index, u1dq_start, u1dq_end, u1ca_number_per_bit, u1bit_num_per_byte, u1pattern_choose;
1840 	U8 *uiLPDDR_O1_Mapping = NULL;
1841 	u32 u4TimeCnt, rdy, u4dq_o1, u4data_receive, u4ca_pattern, u4Result, u4Ready;
1842 	u8 u1pattern_num;
1843 
1844 	const U8 u1LP5CBT_Pattern_Mapping[2][7] =
1845 	{
1846 		{
1847 			1, 2, 4, 8, 16, 32, 64
1848 		},
1849 
1850 		{
1851 			126, 125, 123, 119, 111, 95, 63
1852 		},
1853 	};
1854 	const U8 u1LP4CBT_Pattern_Mapping[2][6] =
1855 	{
1856 		{
1857 			1, 2, 4, 8, 16, 32
1858 		},
1859 
1860 		{
1861 			62, 61, 59, 55, 47, 31
1862 		},
1863 	};
1864 
1865 	u4Result = 0;
1866 	u1bit_num_per_byte = 8;
1867 
1868 #if (__LP5_COMBO__)
1869 	if (is_lp5_family(p))
1870 	{
1871 		uiLPDDR_O1_Mapping = (U8 *)uiLPDDR5_O1_Mapping_POP[p->channel];
1872 		u1pattern_num = 8;
1873 		u1ca_number_per_bit = CATRAINING_NUM_LP5;
1874 		if (p->dram_cbt_mode[p->rank] == CBT_NORMAL_MODE)
1875 		{
1876 			u1dq_start = 0;
1877 			u1dq_end = 6;
1878 		}
1879 		else
1880 		{
1881 			u1dq_start = 0;
1882 			u1dq_end = 14;
1883 		}
1884 	}
1885 	else
1886 #endif
1887 	{
1888 		uiLPDDR_O1_Mapping = (U8 *)uiLPDDR4_O1_Mapping_POP[p->channel];
1889 		u1pattern_num = 4;
1890 		u1ca_number_per_bit = CATRAINING_NUM_LP4;
1891 		if (p->dram_cbt_mode[p->rank] == CBT_NORMAL_MODE)
1892 		{
1893 			u1dq_start = 8;
1894 			u1dq_end = 13;
1895 		}
1896 		else
1897 		{
1898 			u1dq_start = 0;
1899 			u1dq_end = 13;
1900 		}
1901 	}
1902 
1903 	vIO32WriteFldMulti(DRAMC_REG_CBT_WLEV_CTRL3, P_Fld(0x1, CBT_WLEV_CTRL3_CATRAIN_PAT_STOP0)
1904 		| P_Fld(0x1, CBT_WLEV_CTRL3_CATRAIN_PAT_STOP1));
1905 
1906 	for (u1pattern_index = 0; u1pattern_index < u1pattern_num; u1pattern_index++)
1907 	{
1908 		u1pattern_choose = (u1pattern_index > 3) ? (u1pattern_index % 2) : /* LP5 mapping */
1909 			((u1pattern_index > 1)? (3 - u1pattern_index) : u1pattern_index); /* LP5 & LP4 mapping */
1910 		for (u1ca_index = 0; u1ca_index < u1ca_number_per_bit; u1ca_index++)
1911 		{
1912 		#if (__LP5_COMBO__)
1913 			if (is_lp5_family(p))
1914 			{
1915 				u4ca_pattern = u1LP5CBT_Pattern_Mapping[u1pattern_choose][u1ca_index];
1916 			}
1917 			else
1918 		#endif
1919 			{
1920 				u4ca_pattern = u1LP4CBT_Pattern_Mapping[u1pattern_choose][u1ca_index];
1921 			}
1922 
1923 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL3), P_Fld((u1pattern_index+1), CBT_WLEV_CTRL3_CATRAIN_1PAT_SEL0)
1924 													   | P_Fld((u1ca_index+1), CBT_WLEV_CTRL3_CATRAIN_1PAT_SEL1));
1925 
1926 			u4TimeCnt = TIME_OUT_CNT;
1927 
1928 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), 1, CBT_WLEV_CTRL0_CBT_CAPATEN);
1929 
1930 			//Check CA training compare ready (dramc_conf_nao 0x3fc , CATRAIN_CMP_CPT)
1931 			do
1932 			{
1933 				u4Ready = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_STATUS1), CBT_WLEV_STATUS1_CATRAIN_CMP_CPT);
1934 				u4TimeCnt --;
1935 				mcDELAY_US(1);
1936 			}while ((u4Ready == 0) && (u4TimeCnt > 0));
1937 
1938 			if (u4TimeCnt == 0)//time out
1939 			{
1940 				msg("[CBTDelayCACLKCompare] Resp fail (time out)\n");
1941 				mcFPRINTF(fp_A60868, "[CBTDelayCACLKCompare] Resp fail (time out)\n");//Eddie Test
1942 				//return DRAM_FAIL;
1943 			}
1944 
1945 			u4dq_o1 = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQO1), MISC_DQO1_DQO1_RO);
1946 
1947 			u4dq_o1 = CBTCompareWordaroundDecodeO1Pinmux(p, u4dq_o1, uiLPDDR_O1_Mapping);
1948 
1949 			if (u1dq_end >= u1ca_number_per_bit)
1950 				u4ca_pattern |= u4ca_pattern << u1bit_num_per_byte;
1951 
1952 			u4dq_o1 ^= u4ca_pattern;
1953 
1954 			for(u1dq_index=u1dq_start; u1dq_index<=u1dq_end; u1dq_index++)
1955 			{
1956 				if ((p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1) && (u1dq_index == u1ca_number_per_bit))
1957 					u1dq_index = u1bit_num_per_byte;
1958 
1959 				u4data_receive = (u4dq_o1 >> u1dq_index) & 0x1;
1960 
1961 				if (u1dq_index < u1bit_num_per_byte)
1962 					u4Result |= u4data_receive << u1dq_index;
1963 				else
1964 					u4Result |= u4data_receive << u1dq_index - u1bit_num_per_byte;
1965 			}
1966 
1967 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), 0, CBT_WLEV_CTRL0_CBT_CAPATEN);
1968 
1969 		}
1970 		if (u4Result == ((0x1 << u1ca_number_per_bit) - 1))
1971 			break;
1972 	}
1973 	return u4Result;
1974 }
1975 
new_cbt_pat_compare_workaround(DRAMC_CTX_T * p,new_cbt_pat_cfg_t * ncm)1976 static u32 new_cbt_pat_compare_workaround(DRAMC_CTX_T *p, new_cbt_pat_cfg_t *ncm)
1977 {
1978 	u8 u1pattern_index, u1ca_index, u1dq_index, u1dq_start, u1dq_end, u1ca_number_per_bit, u1bit_num_per_byte;
1979 	U8 *uiLPDDR_O1_Mapping = NULL;
1980 	u32 u4TimeCnt, rdy, u4dq_o1, u4data_receive, u4ca_pattern_a, u4ca_pattern, u4Result, u4Ready;
1981 	u8 u1pattern_num;
1982 
1983 	u4Result = 0;
1984 	u1bit_num_per_byte = 8;
1985 
1986 #if (__LP5_COMBO__)
1987 	if (is_lp5_family(p))
1988 	{
1989 		uiLPDDR_O1_Mapping = (U8 *)uiLPDDR5_O1_Mapping_POP[p->channel];
1990 		u1pattern_num = 8;
1991 		u1ca_number_per_bit = 7;
1992 		if (p->dram_cbt_mode[p->rank] == CBT_NORMAL_MODE)
1993 		{
1994 			u1dq_start = 0;
1995 			u1dq_end = 6;
1996 		}
1997 		else
1998 		{
1999 			u1dq_start = 0;
2000 			u1dq_end = 14;
2001 		}
2002 	}
2003 	else
2004 #endif
2005 	{
2006 		uiLPDDR_O1_Mapping = (U8 *)uiLPDDR4_O1_Mapping_POP[p->channel];
2007 		u1pattern_num = 4;
2008 		u1ca_number_per_bit = 6;
2009 		if (p->dram_cbt_mode[p->rank] == CBT_NORMAL_MODE)
2010 		{
2011 			u1dq_start = 8;
2012 			u1dq_end = 13;
2013 		}
2014 		else
2015 		{
2016 			u1dq_start = 0;
2017 			u1dq_end = 13;
2018 		}
2019 	}
2020 
2021 	for (u1pattern_index = 0; u1pattern_index < u1pattern_num; u1pattern_index++)
2022 	{
2023 		u4ca_pattern_a = ((ncm->pat_a[u1pattern_index] >> ncm->ca_golden_sel) & 0x1) ? ((0x1 << u1ca_number_per_bit) - 1) : 0x0;
2024 
2025 		for (u1ca_index = 0; u1ca_index < u1ca_number_per_bit; u1ca_index++)
2026 		{
2027 			u4ca_pattern = u4ca_pattern_a & ~(0x1 << u1ca_index);
2028 
2029 			if ((ncm->pat_v[u1pattern_index] >> ncm->ca_golden_sel) & 0x1)
2030 				u4ca_pattern |= 0x1 << u1ca_index;
2031 
2032 			if (ncm->invert_num)
2033 				u4ca_pattern ^= (0x1 << u1ca_number_per_bit) - 1;
2034 
2035 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL5), P_Fld(u1pattern_index, CBT_WLEV_CTRL5_NEW_CBT_PAT_NUM)
2036 													   | P_Fld(u1ca_index, CBT_WLEV_CTRL5_NEW_CBT_CA_NUM));
2037 
2038 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL5), 1, CBT_WLEV_CTRL5_NEW_CBT_CAPATEN);
2039 
2040 			//Check CA training compare ready (dramc_conf_nao 0x3fc , CATRAIN_CMP_CPT)
2041 			do
2042 			{
2043 				u4Ready = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_STATUS2), CBT_WLEV_STATUS2_CBT_PAT_CMP_CPT);
2044 				u4TimeCnt --;
2045 				mcDELAY_US(1);
2046 			}while ((u4Ready == 0) && (u4TimeCnt > 0));
2047 
2048 			if (u4TimeCnt == 0)//time out
2049 			{
2050 				msg("[CBTDelayCACLKCompare] Resp fail (time out)\n");
2051 				mcFPRINTF(fp_A60868, "[CBTDelayCACLKCompare] Resp fail (time out)\n");//Eddie Test
2052 				//return DRAM_FAIL;
2053 			}
2054 
2055 			u4dq_o1 = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQO1), MISC_DQO1_DQO1_RO);
2056 
2057 			u4dq_o1 = CBTCompareWordaroundDecodeO1Pinmux(p, u4dq_o1, uiLPDDR_O1_Mapping);
2058 
2059 			if (u1dq_end >= u1ca_number_per_bit)
2060 				u4ca_pattern |= u4ca_pattern << u1bit_num_per_byte;
2061 
2062 			u4dq_o1 ^= u4ca_pattern;
2063 
2064 			for(u1dq_index=u1dq_start; u1dq_index<=u1dq_end; u1dq_index++)
2065 			{
2066 				if ((p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1) && (u1dq_index == u1ca_number_per_bit))
2067 					u1dq_index = u1bit_num_per_byte;
2068 
2069 				u4data_receive = (u4dq_o1 >> u1dq_index) & 0x1;
2070 
2071 				if (u1dq_index < u1bit_num_per_byte)
2072 					u4Result |= u4data_receive << u1dq_index;
2073 				else
2074 					u4Result |= u4data_receive << u1dq_index - u1bit_num_per_byte;
2075 			}
2076 
2077 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL5), 0, CBT_WLEV_CTRL5_NEW_CBT_CAPATEN);
2078 		}
2079 		if (u4Result == ((0x1 << u1ca_number_per_bit) - 1))
2080 			break;
2081 	}
2082 	return u4Result;
2083 }
2084 #endif
2085 
CBTDelayCACLK(DRAMC_CTX_T * p,S32 iDelay)2086 void CBTDelayCACLK(DRAMC_CTX_T *p, S32 iDelay)
2087 {
2088 	if (iDelay < 0)
2089 	{	/* Set CLK delay */
2090 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_CA_CMD0),
2091 			P_Fld(0, SHU_R0_CA_CMD0_RG_ARPI_CMD) |
2092 			P_Fld(-iDelay, SHU_R0_CA_CMD0_RG_ARPI_CLK) |
2093 			P_Fld(-iDelay, SHU_R0_CA_CMD0_RG_ARPI_CS));
2094 	}
2095 /*
2096 	else if (iDelay >= 64)
2097 	{
2098 		DramcCmdUIDelaySetting(p, 2);
2099 
2100 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_CA_CMD0),
2101 			P_Fld(iDelay - 64, SHU_R0_CA_CMD0_RG_ARPI_CMD) |
2102 			P_Fld(0, SHU_R0_CA_CMD0_RG_ARPI_CLK) |
2103 			P_Fld(0, SHU_R0_CA_CMD0_RG_ARPI_CS));
2104 	}
2105 */
2106 	else
2107 	{	/* Set CA output delay */
2108 //		DramcCmdUIDelaySetting(p, 0);
2109 
2110 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_CA_CMD0),
2111 			P_Fld(iDelay, SHU_R0_CA_CMD0_RG_ARPI_CMD) |
2112 			P_Fld(0, SHU_R0_CA_CMD0_RG_ARPI_CLK) |
2113 			P_Fld(0, SHU_R0_CA_CMD0_RG_ARPI_CS));
2114 	}
2115 }
2116 
CBTAdjustCS(DRAMC_CTX_T * p,int autok)2117 static void CBTAdjustCS(DRAMC_CTX_T *p, int autok)
2118 {
2119 	S32 iFirstCSPass = 0, iLastCSPass = 0, iCSFinalDelay;//iCSCenter
2120 
2121 	U8 backup_rank, ii;
2122 	u32 pi_dly;
2123 	u32 cs_ui, cs_mck;
2124 
2125 	backup_rank = u1GetRank(p);
2126 
2127 	cs_ui = get_cs_ui(p);
2128 	cs_mck = get_cs_mck(p);
2129 
2130 #if (SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_CBT)
2131 	if (p->femmc_Ready == 1)
2132 	{
2133 		CATrain_CsDelay[p->channel][p->rank] = p->pSavetimeData->u1CBTCsDelay_Save[p->channel][p->rank];
2134 	}
2135 #endif
2136 
2137 	// if dual rank, use average position of both rank
2138 	if(backup_rank == RANK_1)
2139 	{
2140 		iCSFinalDelay = (CATrain_CsDelay[p->channel][RANK_0] + CATrain_CsDelay[p->channel][RANK_1]) >> 1;
2141 	}
2142 	else
2143 	{
2144 		iCSFinalDelay = CATrain_CsDelay[p->channel][p->rank];
2145 	}
2146 
2147 	//Set CS output delay after training
2148 	/* p->rank = RANK_0, save to Reg Rank0 and Rank1, p->rank = RANK_1, save to Reg Rank1 */
2149 	for (ii = RANK_0; ii <= backup_rank; ii++)
2150 	{
2151 		vSetRank(p, ii);
2152 
2153 		pi_dly = adjust_cs_ui(p, cs_mck, cs_ui, iCSFinalDelay);
2154 
2155 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_CA_CMD0), pi_dly, SHU_R0_CA_CMD0_RG_ARPI_CS);
2156 	}
2157 
2158 	vSetRank(p, backup_rank);
2159 
2160 	msg("CS Dly: %d (%d~%d)\n", iCSFinalDelay, iFirstCSPass, iLastCSPass);
2161 }
2162 
2163 #if CA_PER_BIT_DELAY_CELL
CATrainingSetPerBitDelayCell(DRAMC_CTX_T * p,S16 * iCAFinalCenter,U8 ca_pin_num)2164 static void CATrainingSetPerBitDelayCell(DRAMC_CTX_T *p, S16 *iCAFinalCenter, U8 ca_pin_num)
2165 {
2166 	U8 *uiLPDDR_CA_Mapping = NULL;
2167 	U8 u1CA;
2168 	S8 iCA_PerBit_DelayLine[8] = {0};
2169 
2170 #if __LP5_COMBO__
2171 	if (is_lp5_family(p))
2172 	{
2173 		uiLPDDR_CA_Mapping = (U8 *)uiLPDDR5_CA_Mapping_POP[p->channel];
2174 	}
2175 	else
2176 #endif
2177 	{
2178 		uiLPDDR_CA_Mapping = (U8 *)uiLPDDR4_CA_Mapping_POP[p->channel];
2179 	}
2180 
2181 	for (u1CA = 0;u1CA < ca_pin_num;u1CA++)
2182 	{
2183 		iCA_PerBit_DelayLine[uiLPDDR_CA_Mapping[u1CA]] = iCAFinalCenter[u1CA];
2184 	}
2185 
2186 	// Set CA perbit delay line calibration results
2187 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_CA_TXDLY0),
2188 			P_Fld(iCA_PerBit_DelayLine[0], SHU_R0_CA_TXDLY0_TX_ARCA0_DLY) |
2189 			P_Fld(iCA_PerBit_DelayLine[1], SHU_R0_CA_TXDLY0_TX_ARCA1_DLY) |
2190 			P_Fld(iCA_PerBit_DelayLine[2], SHU_R0_CA_TXDLY0_TX_ARCA2_DLY) |
2191 			P_Fld(iCA_PerBit_DelayLine[3], SHU_R0_CA_TXDLY0_TX_ARCA3_DLY));
2192 
2193 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_CA_TXDLY1),
2194 			P_Fld(iCA_PerBit_DelayLine[4], SHU_R0_CA_TXDLY1_TX_ARCA4_DLY) |
2195 			P_Fld(iCA_PerBit_DelayLine[5], SHU_R0_CA_TXDLY1_TX_ARCA5_DLY) |
2196 			P_Fld(iCA_PerBit_DelayLine[6], SHU_R0_CA_TXDLY1_TX_ARCA6_DLY) |
2197 			P_Fld(iCA_PerBit_DelayLine[7], SHU_R0_CA_TXDLY1_TX_ARCA7_DLY));
2198 }
2199 #endif// end of CA_PER_BIT_DELAY_CELL
2200 
CBTSetCACLKResult(DRAMC_CTX_T * p,U32 u4MCK,U32 u4UI,S8 iFinalCACLK,U8 ca_pin_num)2201 static void CBTSetCACLKResult(DRAMC_CTX_T *p, U32 u4MCK, U32 u4UI, S8 iFinalCACLK, U8 ca_pin_num)
2202 {
2203 	U8 backup_rank, rank_i, uiCA;
2204 	S16 iCAFinalCenter[CATRAINING_NUM]={0}; //for CA_PER_BIT
2205 
2206 #if (SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_CBT)
2207 	if (p->femmc_Ready == 1)
2208 	{
2209 		CATrain_CmdDelay[p->channel][p->rank] = p->pSavetimeData->s1CBTCmdDelay_Save[p->channel][p->rank];
2210 		vSetCalibrationResult(p, DRAM_CALIBRATION_CA_TRAIN, DRAM_FAST_K);
2211 		#if CA_PER_BIT_DELAY_CELL
2212 			for (uiCA = 0; uiCA < ca_pin_num; uiCA++)
2213 				iCAFinalCenter[uiCA] = p->pSavetimeData->u1CBTCA_PerBit_DelayLine_Save[p->channel][p->rank][uiCA];
2214 		#endif
2215 	}
2216 #endif
2217 
2218 	iFinalCACLK = CATrain_CmdDelay[p->channel][p->rank];
2219 
2220 	msg("\n[CBTSetCACLKResult] CA Dly = %d\n", iFinalCACLK);
2221 
2222 	iFinalCACLK = adjust_ca_ui(p, u4MCK, u4UI, iFinalCACLK);
2223 
2224 	backup_rank = u1GetRank(p);
2225 
2226 	for (rank_i = RANK_0; rank_i <= backup_rank;rank_i++)
2227 	{
2228 		vSetRank(p, rank_i);
2229 
2230 		CBTDelayCACLK(p, iFinalCACLK);
2231 
2232 #if CA_PER_BIT_DELAY_CELL
2233 		CATrainingSetPerBitDelayCell(p, iCAFinalCenter, ca_pin_num);
2234 #endif
2235 	}
2236 
2237 	vSetRank(p, backup_rank);
2238 }
2239 
2240 #if (__LP5_COMBO__)
2241 /* Return (Vref_B0 | (Vref_B1 << 8) to support Byte mode */
GetCBTVrefPinMuxValue_lp5(DRAMC_CTX_T * p,U8 u1VrefLevel)2242 static U8 GetCBTVrefPinMuxValue_lp5(DRAMC_CTX_T *p, U8 u1VrefLevel)
2243 {
2244 	U8 u2VrefBit, u2Vref_org;
2245 	U16 u2Vref_new;
2246 
2247 	u2Vref_org = u1VrefLevel & 0x7f;
2248 
2249 	u2Vref_new = 0;
2250 
2251 	for (u2VrefBit = 0; u2VrefBit < 8; u2VrefBit++)
2252 	{
2253 		//msg("=== u2VrefBit: %d, %d\n",u2VrefBit,uiLPDDR4_O1_Mapping_POP[p->channel][u2VrefBit]);
2254 		if (u2Vref_org & (1 << u2VrefBit))
2255 		{
2256 			u2Vref_new |= (1 << uiLPDDR5_O1_Mapping_POP[p->channel][u2VrefBit]);
2257 		}
2258 	}
2259 
2260 	msg3("=== u2Vref_new: 0x%x --> 0x%x\n", u2Vref_org, u2Vref_new);
2261 
2262 	if (lp5_cp[p->channel].dram_dq_b0)
2263 		u2Vref_new >>= 8;
2264 
2265 	return u2Vref_new;
2266 }
2267 
2268 #endif
2269 
GetCBTVrefPinMuxValue(DRAMC_CTX_T * p,U8 u1VrefRange,U8 u1VrefLevel)2270 static U8 GetCBTVrefPinMuxValue(DRAMC_CTX_T *p, U8 u1VrefRange, U8 u1VrefLevel)
2271 {
2272 	U8 u2VrefBit, u2Vref_org;
2273 	U16 u2Vref_new;
2274 
2275 	if (p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1)
2276 		return ((u1VrefRange & 0x1) << 6) | (u1VrefLevel & 0x3f);
2277 
2278 	u2Vref_org = ((u1VrefRange & 0x1) << 6) | (u1VrefLevel & 0x3f);
2279 
2280 	u2Vref_new = 0;
2281 	for (u2VrefBit = 0; u2VrefBit < 8; u2VrefBit++)
2282 	{
2283 		//msg("=== u2VrefBit: %d, %d\n",u2VrefBit,uiLPDDR4_O1_Mapping_POP[p->channel][u2VrefBit]);
2284 		if (u2Vref_org & (1 << u2VrefBit))
2285 		{
2286 			u2Vref_new |= (1 << uiLPDDR4_O1_Mapping_POP[p->channel][u2VrefBit]);
2287 		}
2288 	}
2289 
2290 	msg3("=== u2Vref_new: 0x%x --> 0x%x\n", u2Vref_org, u2Vref_new);
2291 
2292 	if (lp4_cp[p->channel].dram_dq_b0)
2293 		u2Vref_new >>= 8;
2294 
2295 	return u2Vref_new;
2296 }
2297 
CBTSetVrefLP4(DRAMC_CTX_T * p,U8 u1VrefRange,U8 u1VrefLevel,U8 operating_fsp,U8 stateFlag)2298 static void CBTSetVrefLP4(DRAMC_CTX_T *p, U8 u1VrefRange, U8 u1VrefLevel, U8 operating_fsp, U8 stateFlag)
2299 {
2300 	U32 fld;
2301 	U8 u4DbgValue;
2302 	U8 u1VrefValue_pinmux;
2303 	struct cbt_pinmux *cp = &lp4_cp[p->channel];
2304 
2305 	if ((p->dram_cbt_mode[p->rank] == CBT_NORMAL_MODE) &&
2306 		(stateFlag == IN_CBT))
2307 	{
2308 		u1VrefValue_pinmux = GetCBTVrefPinMuxValue(p, u1VrefRange, u1VrefLevel);
2309 
2310 #if !REDUCE_LOG_FOR_PRELOADER
2311 		msg("\nCH_%d, RK_%d, Range=%d, VrefValue_pinmux = 0x%x\n", p->channel, p->rank, u1VrefRange, u1VrefValue_pinmux);
2312 #endif
2313 		u1MR12Value[p->channel][p->rank][operating_fsp] = ((u1VrefRange & 0x1) << 6) | u1VrefLevel;
2314 
2315 		fld = (cp->dram_dq_b0) ? CBT_WLEV_CTRL4_CBT_TXDQ_B1 : CBT_WLEV_CTRL4_CBT_TXDQ_B0;
2316 
2317 		//vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_WRITE_LEV), ((u1VrefRange&0x1) <<6) | (u1VrefLevel & 0x3f), WRITE_LEV_DMVREFCA);	//MR12, bit[25:20]=OP[5:0]	bit 26=OP[6]
2318 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4),
2319 			u1VrefValue_pinmux, fld);  //MR12, bit[25:20]=OP[5:0]  bit 26=OP[6]
2320 
2321 		 //DQS_SEL=1, DQS_B1_G=1, Toggle R_DMDQS_WLEV (1 to 0)
2322 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), (0x1 << cp->dram_dq_b0), CBT_WLEV_CTRL0_CBT_WLEV_DQS_SEL);
2323 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL3), 0xa, CBT_WLEV_CTRL3_DQSBX_G);
2324 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), 1, CBT_WLEV_CTRL0_CBT_WLEV_DQS_TRIG);
2325 		mcDELAY_US(1);
2326 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), 0, CBT_WLEV_CTRL0_CBT_WLEV_DQS_TRIG);
2327 
2328 	}
2329 	else
2330 	{
2331 		if (operating_fsp == FSP_1)
2332 		{
2333 			DramcMRWriteFldAlign(p, 13, 1, MR13_FSP_WR, TO_MR);
2334 		}
2335 
2336 		u4DbgValue = (((u1VrefRange & 0x1) << 6) | (u1VrefLevel & 0x3f));
2337 		u1MR12Value[p->channel][p->rank][operating_fsp] = u4DbgValue;
2338 		msg3("u4DbgValue = 0x%x\n", u4DbgValue);
2339 
2340 		DramcModeRegWriteByRank(p, p->rank, 12, u4DbgValue);
2341 	}
2342 
2343 	//wait tVREF_LONG
2344 	mcDELAY_US(1);
2345 }
2346 
2347 
2348 #if __LP5_COMBO__
is_training_mode1(DRAMC_CTX_T * p)2349 static inline u8 is_training_mode1(DRAMC_CTX_T *p)
2350 {
2351 	return is_lp5_family(p) && p->lp5_training_mode == TRAINING_MODE1? 1: 0;
2352 }
2353 
is_training_mode2(DRAMC_CTX_T * p)2354 static inline u8 is_training_mode2(DRAMC_CTX_T *p)
2355 {
2356 	return is_lp5_family(p) && p->lp5_training_mode == TRAINING_MODE2? 1: 0;
2357 }
2358 
is_phase_falling(DRAMC_CTX_T * p)2359 static inline u8 is_phase_falling(DRAMC_CTX_T *p)
2360 {
2361 	return is_lp5_family(p) && p->lp5_cbt_phase == CBT_PHASE_FALLING? 1: 0;
2362 }
2363 
force_dq7(DRAMC_CTX_T * p,u8 level)2364 static void force_dq7(DRAMC_CTX_T *p, u8 level)
2365 {
2366 	u32 fld_b0, fld_b1;
2367 	u8 dq;
2368 	u8 dramc_byte;
2369 	struct cbt_pinmux *cp = &lp5_cp[p->channel];
2370 	/*
2371 	 * TODO
2372 	 *
2373 	 * pinmux to selec dq7
2374 	 *
2375 	 */
2376 
2377 	fld_b0 = (cp->dram_dq_b0) ? CBT_WLEV_CTRL4_CBT_TXDQ_B1 : CBT_WLEV_CTRL4_CBT_TXDQ_B0;
2378 	fld_b1 = (cp->dram_dq_b1) ? CBT_WLEV_CTRL4_CBT_TXDQ_B1 : CBT_WLEV_CTRL4_CBT_TXDQ_B0;
2379 
2380 	dq = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4),
2381 		fld_b0);
2382 	dq &= ~(1 << (cp->dram_dq7_b0 % 8));
2383 	dq |= ((level & 1) << (cp->dram_dq7_b0 % 8));
2384 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4),
2385 		P_Fld(dq, fld_b0));
2386 
2387 	if (is_byte_mode(p)) {
2388 		dq = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4),
2389 			fld_b1);
2390 		dq &= ~(1 << (cp->dram_dq7_b1 % 8));
2391 		dq |= ((level & 1) << (cp->dram_dq7_b1 % 8));
2392 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4),
2393 			P_Fld(dq, fld_b1));
2394 	}
2395 }
2396 
force_dmi(DRAMC_CTX_T * p,u8 level)2397 static inline void force_dmi(DRAMC_CTX_T *p, u8 level)
2398 {
2399 	struct cbt_pinmux *cp = &lp5_cp[p->channel];
2400 
2401 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
2402 		P_Fld(level, (cp->dram_dmi_b0) ? CBT_WLEV_CTRL0_CBT_SW_DQM_B1_LP5 : CBT_WLEV_CTRL0_CBT_SW_DQM_B0_LP5));
2403 
2404 	if (is_byte_mode(p)) {
2405 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
2406 		P_Fld(level, (cp->dram_dmi_b1 ? CBT_WLEV_CTRL0_CBT_SW_DQM_B1_LP5 : CBT_WLEV_CTRL0_CBT_SW_DQM_B0_LP5)));
2407 	}
2408 }
2409 
toggle_wck(DRAMC_CTX_T * p,u8 toggle)2410 static void toggle_wck(DRAMC_CTX_T *p, u8 toggle)
2411 {
2412 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
2413 		P_Fld(toggle, CBT_WLEV_CTRL0_CBT_WLEV_WCKAO));
2414 }
2415 
set_vref_by_mrw(DRAMC_CTX_T * p,u8 vref)2416 static void set_vref_by_mrw(DRAMC_CTX_T *p, u8 vref)
2417 {
2418 	DramcModeRegWriteByRank(p, p->rank, 12, vref);
2419 }
2420 
set_vref_by_dq(DRAMC_CTX_T * p,u16 vref)2421 static void set_vref_by_dq(DRAMC_CTX_T *p, u16 vref)
2422 {
2423 	u8 dq;
2424 	struct cbt_pinmux *cp = &lp5_cp[p->channel];
2425 
2426 	force_dmi(p, 0);
2427 	/* wait tCBTRTW */
2428 	mcDELAY_US(1);
2429 
2430 	if (is_byte_mode(p)) {
2431 		/* DRAMC B0/B1 as TX */
2432 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
2433 			3, CBT_WLEV_CTRL0_CBT_DQBYTE_OEAO_EN);
2434 
2435 		/* Set DRAM Byte 1 */
2436 		dq = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4),
2437 			(cp->dram_dq_b1 ? CBT_WLEV_CTRL4_CBT_TXDQ_B1 : CBT_WLEV_CTRL4_CBT_TXDQ_B0));
2438 
2439 		/* Shall be carefully processed in case DQ[7] is changed */
2440 		dq &= (1 << (cp->dram_dq7_b1 % 8));
2441 		dq |= ((vref >> 8) & 0xff);
2442 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4),
2443 			P_Fld(dq, (cp->dram_dq_b1 ? CBT_WLEV_CTRL4_CBT_TXDQ_B1 : CBT_WLEV_CTRL4_CBT_TXDQ_B0)));
2444 	} else {
2445 		/* DRAMC B0 as TX */
2446 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
2447 			(1 << cp->dram_dq_b0), CBT_WLEV_CTRL0_CBT_DQBYTE_OEAO_EN);
2448 	}
2449 
2450 	/* Set DRAM Byte 0 */
2451 	dq = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4),
2452 		(cp->dram_dq_b0 ? CBT_WLEV_CTRL4_CBT_TXDQ_B1 : CBT_WLEV_CTRL4_CBT_TXDQ_B0));
2453 	dq &= (1 << (cp->dram_dq7_b0 % 8));
2454 	dq |= (vref & 0xff);
2455 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4),
2456 		P_Fld(dq, (cp->dram_dq_b0 ? CBT_WLEV_CTRL4_CBT_TXDQ_B1 : CBT_WLEV_CTRL4_CBT_TXDQ_B0)));
2457 
2458 	/* wait tDQStrain */
2459 	mcDELAY_US(1);
2460 	force_dmi(p, 1);
2461 	mcDELAY_US(1);
2462 	/* DRAMC B0/B1 as RX */
2463 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
2464 		0, CBT_WLEV_CTRL0_CBT_DQBYTE_OEAO_EN);
2465 	mcDELAY_US(1);
2466 }
2467 
switch_oe_tie(DRAMC_CTX_T * p,u8 sw)2468 static void switch_oe_tie(DRAMC_CTX_T *p, u8 sw)
2469 {
2470 	u8 dq_oe;
2471 	struct cbt_pinmux *cp = &lp5_cp[p->channel];
2472 
2473 	if (sw) {
2474 		/* Set DRAM Byte 0 */
2475 		if (cp->dram_dq_b0) {
2476 			dq_oe = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2477 				B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1);
2478 			dq_oe |= (1 << (cp->dram_dq7_b0 % 8));
2479 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2480 				P_Fld(dq_oe, B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1) |
2481 				P_Fld(1, B1_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B1));
2482 
2483 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2484 				P_Fld(1, B1_DQ2_RG_TX_ARDQM_OE_TIE_EN_B1) |
2485 				P_Fld(1, B1_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B1));
2486 		} else {
2487 			dq_oe = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2488 				B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0);
2489 			dq_oe |= (1 << (cp->dram_dq7_b0 % 8));
2490 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2491 				P_Fld(dq_oe, B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0) |
2492 				P_Fld(1, B0_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B0));
2493 
2494 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2495 				P_Fld(1, B0_DQ2_RG_TX_ARDQM_OE_TIE_EN_B0) |
2496 				P_Fld(1, B0_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B0));
2497 		}
2498 
2499 		/* Set DRAM Byte 1 */
2500 		if (is_byte_mode(p)) {
2501 			/* Set DRAM Byte 0 */
2502 			if (cp->dram_dq_b1) {
2503 				dq_oe = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2504 					B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1);
2505 				dq_oe |= (1 << (cp->dram_dq7_b1 % 8));
2506 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2507 					P_Fld(dq_oe, B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1) |
2508 					P_Fld(1, B1_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B1));
2509 
2510 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2511 					P_Fld(1, B1_DQ2_RG_TX_ARDQM_OE_TIE_EN_B1) |
2512 					P_Fld(1, B1_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B1));
2513 			} else {
2514 				dq_oe = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2515 					B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0);
2516 				dq_oe |= (1 << (cp->dram_dq7_b1 % 8));
2517 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2518 					P_Fld(dq_oe, B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0) |
2519 					P_Fld(1, B0_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B0));
2520 
2521 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2522 					P_Fld(1, B0_DQ2_RG_TX_ARDQM_OE_TIE_EN_B0) |
2523 					P_Fld(1, B0_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B0));
2524 			}
2525 		}
2526 	} else {
2527 		/* Set DRAM Byte 0 */
2528 		if (cp->dram_dq_b0) {
2529 			dq_oe = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2530 				B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1);
2531 			dq_oe &= ~(1 << (cp->dram_dq7_b0 % 8));
2532 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2533 				P_Fld(dq_oe, B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1) |
2534 				P_Fld(0, B1_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B1));
2535 
2536 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2537 				P_Fld(0, B1_DQ2_RG_TX_ARDQM_OE_TIE_EN_B1) |
2538 				P_Fld(0, B1_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B1));
2539 		} else {
2540 			dq_oe = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2541 				B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0);
2542 			dq_oe &= ~(1 << (cp->dram_dq7_b0 % 8));
2543 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2544 				P_Fld(dq_oe, B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0) |
2545 				P_Fld(0, B0_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B0));
2546 
2547 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2548 				P_Fld(0, B0_DQ2_RG_TX_ARDQM_OE_TIE_EN_B0) |
2549 				P_Fld(0, B0_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B0));
2550 		}
2551 
2552 		/* Set DRAM Byte 1 */
2553 		if (is_byte_mode(p)) {
2554 			/* Set DRAM Byte 0 */
2555 			if (cp->dram_dq_b1) {
2556 				dq_oe = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2557 					B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1);
2558 				dq_oe &= ~(1 << (cp->dram_dq7_b1 % 8));
2559 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2560 					P_Fld(dq_oe, B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1) |
2561 					P_Fld(0, B1_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B1));
2562 
2563 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2),
2564 					P_Fld(0, B1_DQ2_RG_TX_ARDQM_OE_TIE_EN_B1) |
2565 					P_Fld(0, B1_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B1));
2566 			} else {
2567 				dq_oe = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2568 					B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0);
2569 				dq_oe &= ~(0 << (cp->dram_dq7_b1 % 8));
2570 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2571 					P_Fld(dq_oe, B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0) |
2572 					P_Fld(0, B0_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B0));
2573 
2574 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2),
2575 					P_Fld(0, B0_DQ2_RG_TX_ARDQM_OE_TIE_EN_B0) |
2576 					P_Fld(0, B0_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B0));
2577 			}
2578 		}
2579 	}
2580 }
2581 
lp5_cbt_entry(DRAMC_CTX_T * p,u8 operating_fsp,u16 operation_frequency)2582 static void lp5_cbt_entry(DRAMC_CTX_T *p, u8 operating_fsp,
2583 		u16 operation_frequency)
2584 {
2585 	lp5heff_save_disable(p);
2586 
2587 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL),
2588 		0, MISC_STBCAL_DQSIENCG_NORMAL_EN);
2589 
2590 	/* TCMDEN and CATRAINEN use MRSRK */
2591 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0),
2592 		p->rank, SWCMD_CTRL0_MRSRK);
2593 
2594 	#if 0
2595 	if (p->rank == RANK_0) {
2596 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL),
2597 			P_Fld(0, CKECTRL_CKEFIXOFF) |
2598 			P_Fld(1, CKECTRL_CKEFIXON));
2599 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL),
2600 			P_Fld(1, CKECTRL_CKE1FIXOFF) |
2601 			P_Fld(0, CKECTRL_CKE1FIXON));
2602 	} else if (p->rank == RANK_1) {
2603 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL),
2604 			P_Fld(0, CKECTRL_CKE1FIXOFF) |
2605 			P_Fld(1, CKECTRL_CKE1FIXON));
2606 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL),
2607 			P_Fld(1, CKECTRL_CKEFIXOFF) |
2608 			P_Fld(0, CKECTRL_CKEFIXON));
2609 	}
2610 	#else
2611 	if (p->rank == RANK_0) {
2612 		CKEFixOnOff(p, RANK_0, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
2613 		CKEFixOnOff(p, RANK_1, CKE_FIXOFF, CKE_WRITE_TO_ONE_CHANNEL);
2614 	} else if (p->rank == RANK_1){
2615 		CKEFixOnOff(p, RANK_1, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
2616 		CKEFixOnOff(p, RANK_0, CKE_FIXOFF, CKE_WRITE_TO_ONE_CHANNEL);
2617 	}
2618 	#endif
2619 
2620 	/*
2621 	* APHY TX PI Spec mode option
2622 	* for K RK1, if RK0/1 DQ UI setting is not the same, it will fail
2623 	*/
2624 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_NEW_XRW2W_CTRL),
2625 		1, SHU_NEW_XRW2W_CTRL_TXPI_UPD_MODE);
2626 
2627 	/*
2628 	* APHY TX PI Spec mode option
2629 	* for K RK1, if RK0/1 DQ UI setting is not the same, it will fail
2630 	*/
2631 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_NEW_XRW2W_CTRL),
2632 		1, SHU_NEW_XRW2W_CTRL_TXPI_UPD_MODE);
2633 
2634 	/*
2635 	* APHY TX PI Spec mode option
2636 	* for K RK1, if RK0/1 DQ UI setting is not the same, it will fail
2637 	*/
2638 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_NEW_XRW2W_CTRL),
2639 		1, SHU_NEW_XRW2W_CTRL_TXPI_UPD_MODE);
2640 
2641 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
2642 		P_Fld(0x1, CBT_WLEV_CTRL0_WRITE_LEVEL_EN));
2643 
2644 	/*
2645 	 * TODO
2646 	 * BYTEMODE, PINMUX
2647 	 */
2648 	if (is_training_mode1(p)) {
2649 		/* DRAMC B0 as RX */
2650 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
2651 			0, CBT_WLEV_CTRL0_CBT_DQBYTE_OEAO_EN);
2652 	}
2653 
2654 	switch_oe_tie(p, 1);
2655 
2656 	/*
2657 	 * MR13 OP[6], cbt mode
2658 	 * 0, training mode 1
2659 	 * 1, training mode 2
2660 	 *
2661 	 * TODO
2662 	 * MR13 values??
2663 	 */
2664 	DramcModeRegWriteByRank(p, p->rank, 13, p->lp5_training_mode << 6);
2665 
2666 	if (operating_fsp == FSP_2) {
2667 		/*
2668 		 * dram will switch to another FSP_OP automatically
2669 		 */
2670 		DramcModeRegWriteByRank(p, p->rank, 16,
2671 			(2 << MR16_FSP_WR_SHIFT) |
2672 			(2 << MR16_FSP_OP_SHIFT) |
2673 			(p->lp5_cbt_phase << MR16_CBT_PHASE) |
2674 			/* CBT enabled fsp[2] */
2675 			(3 << MR16_FSP_CBT) |
2676 			(1 << MR16_VRCG));
2677 	} else if (operating_fsp == FSP_1) {
2678 		/*
2679 		 * dram will switch to another FSP_OP automatically
2680 		 */
2681 		DramcModeRegWriteByRank(p, p->rank, 16,
2682 			(1 << MR16_FSP_WR_SHIFT) |
2683 			(1<< MR16_FSP_OP_SHIFT) |
2684 			(p->lp5_cbt_phase << MR16_CBT_PHASE) |
2685 			/* CBT enabled fsp[1] */
2686 			(2 << MR16_FSP_CBT) |
2687 			(1 << MR16_VRCG));
2688 	} else {
2689 		/* FSP_0 */
2690 		DramcModeRegWriteByRank(p, p->rank, 16,
2691 			(0 << MR16_FSP_WR_SHIFT) |
2692 			(0 << MR16_FSP_OP_SHIFT) |
2693 			(p->lp5_cbt_phase << MR16_CBT_PHASE) |
2694 			/* CBT enabled fsp[0] */
2695 			(1 << MR16_FSP_CBT) |
2696 			(1 << MR16_VRCG));
2697 	}
2698 
2699 	/* wait tCBTWCKPRE_static */
2700 	mcDELAY_US(1);
2701 
2702 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL3),
2703 		P_Fld(0x5, CBT_WLEV_CTRL3_DQSBX_G) |
2704 		P_Fld(0x5, CBT_WLEV_CTRL3_DQSBY_G) |
2705 		P_Fld(0x5, CBT_WLEV_CTRL3_DQSBX1_G) |
2706 		P_Fld(0x5, CBT_WLEV_CTRL3_DQSBY1_G));
2707 
2708 	if (is_byte_mode(p)) {
2709 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), 3,
2710 			CBT_WLEV_CTRL0_CBT_WLEV_DQS_SEL);
2711 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), P_Fld(1, CBT_WLEV_CTRL0_BYTEMODECBTEN) |
2712 			P_Fld(1, CBT_WLEV_CTRL0_CBT_CMP_BYTEMODE));    //BYTEMODECBTEN=1
2713 	} else {
2714 		if (lp5_cp[p->channel].dram_dq7_b0)
2715 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), 0x2,
2716 				CBT_WLEV_CTRL0_CBT_WLEV_DQS_SEL);
2717 		else
2718 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), 0x1,
2719 				CBT_WLEV_CTRL0_CBT_WLEV_DQS_SEL);
2720 	}
2721 
2722 	/* toggle WCK */
2723 	toggle_wck(p, 1);
2724 
2725 	/* wait tWCK2DQ7H */
2726 	mcDELAY_US(1);
2727 
2728 	/* DQ[7] = High */
2729 	force_dq7(p, 1);
2730 
2731 	/* wait tDQ7HWCK to switch FSP */
2732 	mcDELAY_US(1);
2733 
2734 	/* stop toggle WCK */
2735 	toggle_wck(p, 0);
2736 
2737 	/* wait tDQ72DQ */
2738 	mcDELAY_US(1);
2739 
2740 	O1PathOnOff(p, 1);
2741 
2742 	/* start toggle WCK */
2743 	toggle_wck(p, 1);
2744 
2745 	/* Wait tCAENT */
2746 	mcDELAY_US(1);
2747 }
2748 
lp5_cbt_exit(DRAMC_CTX_T * p,u8 operating_fsp,u8 operation_frequency)2749 static void lp5_cbt_exit(DRAMC_CTX_T *p, u8 operating_fsp,
2750 		u8 operation_frequency)
2751 {
2752 	/* drive dq7 low */
2753 	force_dq7(p, 0);
2754 
2755 	/* wait tDQ7WCK */
2756 	mcDELAY_US(1);
2757 
2758 	/* stop wck toggle */
2759 	toggle_wck(p, 0);
2760 
2761 	/* wait tVREFCA_LOGNG */
2762 	mcDELAY_US(1);
2763 
2764 	if (operating_fsp == FSP_2) {
2765 		DramcModeRegWriteByRank(p, p->rank, 16,
2766 			(2 << MR16_FSP_WR_SHIFT) |
2767 			(2 << MR16_FSP_OP_SHIFT) |
2768 			(0 << MR16_CBT_PHASE) |
2769 			/* normal operation */
2770 			(0 << MR16_FSP_CBT) |
2771 			(1 << MR16_VRCG));
2772 	} else if (operating_fsp == FSP_1) {
2773 		DramcModeRegWriteByRank(p, p->rank, 16,
2774 			(1 << MR16_FSP_WR_SHIFT) |
2775 			(1 << MR16_FSP_OP_SHIFT) |
2776 			(0 << MR16_CBT_PHASE) |
2777 			/* normal operation */
2778 			(0 << MR16_FSP_CBT) |
2779 			(1 << MR16_VRCG));
2780 	} else {
2781 		DramcModeRegWriteByRank(p, p->rank, 16,
2782 			(0 << MR16_FSP_WR_SHIFT) |
2783 			(0 << MR16_FSP_OP_SHIFT) |
2784 			(0 << MR16_CBT_PHASE) |
2785 			/* normal operation */
2786 			(0 << MR16_FSP_CBT) |
2787 			(1 << MR16_VRCG));
2788 	}
2789 
2790 	/* wait tMRD */
2791 	mcDELAY_US(1);
2792 
2793 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0),
2794 		P_Fld(0x0, CBT_WLEV_CTRL0_WRITE_LEVEL_EN));
2795 	switch_oe_tie(p, 0);
2796 
2797 	/*
2798 	* APHY TX PI Spec mode option
2799 	* for K RK1, if RK0/1 DQ UI setting is not the same, it will fail
2800 	*/
2801 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_NEW_XRW2W_CTRL),
2802 		0, SHU_NEW_XRW2W_CTRL_TXPI_UPD_MODE);
2803 
2804 	/* Disable O1 path output */
2805 	O1PathOnOff(p, 0);
2806 
2807 	if (is_byte_mode(p)) {
2808 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), P_Fld(0, CBT_WLEV_CTRL0_BYTEMODECBTEN) |
2809 			P_Fld(0, CBT_WLEV_CTRL0_CBT_CMP_BYTEMODE));    //BYTEMODECBTEN=1
2810 	}
2811 
2812 	#if 0
2813 	if (p->rank == RANK_0) {
2814 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL),
2815 			P_Fld(0, CKECTRL_CKEFIXOFF) |
2816 			P_Fld(0, CKECTRL_CKEFIXON));
2817 	} else if (p->rank == RANK_1) {
2818 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL),
2819 			P_Fld(0, CKECTRL_CKE1FIXOFF) |
2820 			P_Fld(0, CKECTRL_CKE1FIXON));
2821 	}
2822 	#else
2823 	CKEFixOnOff(p, CKE_WRITE_TO_ALL_RANK, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
2824 	#endif
2825 
2826 	lp5heff_restore(p);
2827 }
2828 #endif
2829 
CBTEntryLP45(DRAMC_CTX_T * p,U8 u1FSP,U16 u2Freq)2830 static void CBTEntryLP45(DRAMC_CTX_T *p, U8 u1FSP, U16 u2Freq)
2831 {
2832 #if __LP5_COMBO__
2833 	if (is_lp5_family(p))
2834 	{
2835 		lp5_cbt_entry(p, u1FSP, u2Freq);
2836 	}
2837 	else
2838 #endif
2839 	{
2840 		if(p->dram_fsp == FSP_1)
2841 		{
2842 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_CA_CMD2), P_Fld(1, CA_CMD2_RG_TX_ARCMD_OE_DIS_CA)
2843 						| P_Fld(0, CA_CMD2_RG_TX_ARCA_OE_TIE_SEL_CA)
2844 						| P_Fld(0xff, CA_CMD2_RG_TX_ARCA_OE_TIE_EN_CA));
2845 			cbt_switch_freq(p, CBT_LOW_FREQ);
2846 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_CA_CMD2), P_Fld(0, CA_CMD2_RG_TX_ARCMD_OE_DIS_CA)
2847 						| P_Fld(1, CA_CMD2_RG_TX_ARCA_OE_TIE_SEL_CA)
2848 						| P_Fld(0xff, CA_CMD2_RG_TX_ARCA_OE_TIE_EN_CA));
2849 		}
2850 #if ENABLE_LP4Y_WA //@Darren, debugging for DFS stress
2851 		CmdBusTrainingLP4YWA(p, DISABLE);
2852 #endif
2853 		CBTEntryLP4(p, u1FSP, u2Freq);
2854 		if(p->dram_fsp == FSP_1)
2855 		{
2856 			cbt_switch_freq(p, CBT_HIGH_FREQ);
2857 		}
2858 	}
2859 }
2860 
CBTExitLP45(DRAMC_CTX_T * p,U8 u1FSP,U8 u2Freq,U8 stateFlag)2861 static void CBTExitLP45(DRAMC_CTX_T *p, U8 u1FSP, U8 u2Freq, U8 stateFlag)
2862 {
2863 	/* by yirong.wang
2864 	 * if stateFlag == OUT_CBT, it means we finished CBT, exit CBT
2865 	 * if stateFlag == IN_CBT, it means we are trying to setup vref by MRW
2866 	 *	 IN_CBT case, only for LP5 mode 1 and LP4 byte mode
2867 	 */
2868 #if __LP5_COMBO__
2869 	if (is_lp5_family(p))
2870 	{
2871 		if (stateFlag == OUT_CBT || is_training_mode1(p))
2872 		{
2873 			lp5_cbt_exit(p, u1FSP, u2Freq);
2874 		}
2875 	}
2876 	else
2877 #endif
2878 	{
2879 		if (stateFlag == OUT_CBT || p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1)
2880 		{
2881 			(p->dram_fsp == FSP_1)? cbt_switch_freq(p, CBT_LOW_FREQ): NULL;
2882 			CBTExitLP4(p, u1FSP, u2Freq);
2883 #if ENABLE_LP4Y_WA //@Darren, debugging for DFS stress
2884 			CmdBusTrainingLP4YWA(p, ENABLE);
2885 #endif
2886 		}
2887 	}
2888 }
2889 
CBTSetVrefLP45(DRAMC_CTX_T * p,U8 u1VrefRange,U8 u1VrefLevel,U8 u1FSP,U16 u2Freq,U8 stateFlag)2890 static void CBTSetVrefLP45(DRAMC_CTX_T *p, U8 u1VrefRange, U8 u1VrefLevel, U8 u1FSP, U16 u2Freq, U8 stateFlag)
2891 {
2892 	/* by yirong.wang
2893 	 * if stateFlag == OUT_CBT, it means we are not in CBT, setup vref by MRW
2894 	 * if stateFlag == IN_CBT, it means we are doing CBT
2895 	 *	 LP5 training mode 1 and LP4 byte mode, exit CBT and setup vref by MRW, then re-enter CBT
2896 	 *	 LP5 training mode 2 and LP4 normal mode, setup vref by DQ
2897 	 */
2898 #if __LP5_COMBO__
2899 	if (is_lp5_family(p))
2900 	{
2901 		if (stateFlag == IN_CBT && is_training_mode2(p))
2902 		{
2903 			/*
2904 			 * training mode2
2905 			 * TODO, according to pinmux to adjust u1VrefLevel
2906 			 */
2907 			set_vref_by_dq(p, GetCBTVrefPinMuxValue_lp5(p, u1VrefLevel));
2908 		}
2909 		else
2910 		{
2911 			if (stateFlag == IN_CBT && is_training_mode1(p))
2912 			{
2913 				lp5_cbt_exit(p, u1FSP, u2Freq);
2914 			}
2915 
2916 			set_vref_by_mrw(p, u1VrefLevel);
2917 
2918 			if (stateFlag == IN_CBT && is_training_mode1(p))
2919 			{
2920 				lp5_cbt_entry(p, u1FSP, u2Freq);
2921 			}
2922 		}
2923 	}
2924 	else
2925 #endif
2926 	{
2927 		if (stateFlag == IN_CBT && p->dram_cbt_mode[p->rank] == CBT_BYTE_MODE1)
2928 		{
2929 			// BYTE MODE: We are not in CBT now, set Vref & enter CBT
2930 			(p->dram_fsp == FSP_1)? cbt_switch_freq(p, CBT_LOW_FREQ): NULL;
2931 			CBTExitLP4(p, u1FSP, u2Freq);
2932 
2933 			CBTSetVrefLP4(p, u1VrefRange, u1VrefLevel, u1FSP, stateFlag);
2934 
2935 			CBTEntryLP4(p, u1FSP, u2Freq);
2936 			if(p->dram_fsp == FSP_1)
2937 			{
2938 				cbt_switch_freq(p, CBT_HIGH_FREQ);
2939 			}
2940 		}
2941 		else
2942 		{
2943 			CBTSetVrefLP4(p, u1VrefRange, u1VrefLevel, u1FSP, stateFlag);
2944 		}
2945 	}
2946 }
2947 
CmdBusTrainingLP45(DRAMC_CTX_T * p,int autok)2948 DRAM_STATUS_T CmdBusTrainingLP45(DRAMC_CTX_T *p, int autok)
2949 {
2950 	U8 u1FinalVref, u1FinalRange=0;
2951 	S8 iFinalCACLK;
2952 	U8 operating_fsp;
2953 	U16 operation_frequency;
2954 #if CA_PER_BIT_DELAY_CELL
2955 	S16 iCAFinalCenter[CATRAINING_NUM] = {0}; //for CA_PER_BIT
2956 #endif
2957 #if ENABLE_EYESCAN_GRAPH
2958 	U8 u1CBTEyeScanEnable;
2959 	U8 EyeScan_index[CATRAINING_NUM];
2960 #endif
2961 
2962 	S16 pi_step;
2963 	S16 pi_start, pi_end;
2964 	u32 ca_ui;
2965 	u32 ca_mck;
2966 	u32 ca_cmd0;
2967 	u8 ca_pin_num;
2968 	u16 p2u;
2969 
2970 	U32 u4RegBackupAddress[] =
2971 	{
2972 		(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL)),
2973 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL)),
2974 		(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL)),
2975 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ2)),
2976 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ2)),
2977 		(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0)),
2978 		(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL1)),
2979 		(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL2)),
2980 		(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL3)),
2981 		(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL4)),
2982 		(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0)),
2983 		(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL0)),
2984 
2985 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_VREF)),			//in O1PathOnOff()
2986 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_VREF)),			//in O1PathOnOff()
2987 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL)),	//in O1PathOnOff()
2988 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_PHY_VREF_SEL)),	//in O1PathOnOff()
2989 	};
2990 
2991 	p2u = get_ca_pi_per_ui(p);
2992 
2993 	pi_end = p2u * 2 - 1;
2994 
2995 #if FOR_DV_SIMULATION_USED == 1
2996 	pi_step = (vGet_DDR_Loop_Mode(p) == OPEN_LOOP_MODE) ? 16 : 8; //for simulation speed up
2997 #else
2998 	pi_step = get_capi_step(p);
2999 #endif
3000 
3001 	switch (p->freq_sel) {
3002 	case LP5_DDR4266:
3003 	case LP5_DDR800:
3004 	case LP5_DDR1200:
3005 	case LP5_DDR1600:
3006 	case LP5_DDR3733:
3007 	case LP5_DDR2400:
3008 	case LP5_DDR3200:
3009 	case LP5_DDR4800:
3010 	case LP5_DDR5500:
3011 	case LP5_DDR6000:
3012 	case LP5_DDR6400:
3013 		pi_start = -8;
3014 		break;
3015 
3016 	default:
3017 		#if CBT_MOVE_CA_INSTEAD_OF_CLK
3018 			pi_start = -16;
3019 			pi_end = p2u * 3 - 1;
3020 		#else
3021 			if (vGet_DDR_Loop_Mode(p) == SEMI_OPEN_LOOP_MODE)
3022 			{
3023 				pi_start = -24;
3024 			}
3025 			else if (vGet_DDR_Loop_Mode(p) == OPEN_LOOP_MODE)
3026 			{
3027 				pi_start = -16;
3028 			}
3029 			else
3030 			{
3031 				pi_start = -MAX_CLK_PI_DELAY;
3032 			}
3033 		#endif
3034 
3035 		break;
3036 	}
3037 
3038 #if MRW_CHECK_ONLY
3039 	mcSHOW_MRW_MSG("\n==[MR Dump] %s==\n", __func__);
3040 #endif
3041 
3042 #if __LP5_COMBO__
3043 	if (is_lp5_family(p))
3044 	{
3045 		u1FinalVref = u1MR12Value[p->channel][p->rank][p->dram_fsp];
3046 		ca_pin_num = CATRAINING_NUM_LP5;
3047 	}
3048 	else
3049 #endif
3050 	{
3051 		u1FinalRange = u1MR12Value[p->channel][p->rank][p->dram_fsp] >> 6;
3052 		u1FinalVref = u1MR12Value[p->channel][p->rank][p->dram_fsp] & 0x3f;
3053 		ca_pin_num = CATRAINING_NUM_LP4;
3054 	}
3055 
3056 #if ENABLE_EYESCAN_GRAPH
3057 	u1CBTEyeScanEnable =GetEyeScanEnable(p, 0);
3058 
3059 	for (u1vrefidx = 0; u1vrefidx < VREF_VOLTAGE_TABLE_NUM_LP5-1; u1vrefidx++)
3060 	{
3061 		for (uiCA = 0; uiCA < ca_pin_num; uiCA++)
3062 		{
3063 			for (ii = 0; ii < EYESCAN_BROKEN_NUM; ii++)
3064 			{
3065 				gEyeScan_Min[u1vrefidx][uiCA][ii] = EYESCAN_DATA_INVALID;
3066 				gEyeScan_Max[u1vrefidx][uiCA][ii] = EYESCAN_DATA_INVALID;
3067 			}
3068 		}
3069 	}
3070 #endif
3071 
3072 	vPrintCalibrationBasicInfo(p);
3073 	msg("pi_start=%d, pi_end=%d, pi_step=%d, new_cbt_mode=%d, autok=%d\n",
3074 			pi_start, pi_end, pi_step, p->new_cbt_mode, autok);
3075 
3076 #if __LP5_COMBO__
3077 	if (is_lp5_family(p))
3078 	{
3079 		msg("lp5_training_mode=%d, lp5_cbt_phase=%d\n", p->lp5_training_mode, p->lp5_cbt_phase);
3080 	}
3081 #endif
3082 
3083 	//Back up dramC register
3084 	DramcBackupRegisters(p, u4RegBackupAddress, ARRAY_SIZE(u4RegBackupAddress));
3085 
3086 	//default set FAIL
3087 	vSetCalibrationResult(p, DRAM_CALIBRATION_CA_TRAIN, DRAM_FAIL);
3088 
3089 #if CA_PER_BIT_DELAY_CELL
3090 	CATrainingSetPerBitDelayCell(p, iCAFinalCenter, ca_pin_num);
3091 #endif
3092 
3093 #if CBT_MOVE_CA_INSTEAD_OF_CLK
3094 	if (u1IsLP4Family(p->dram_type))
3095 	{
3096 		U8 u1CaPI = 0, u1CaUI = 0;
3097 
3098 		u1CaUI = 1;
3099 		u1CaPI = 0;
3100 
3101 		DramcCmdUIDelaySetting(p, u1CaUI);
3102 
3103 		CBTDelayCACLK(p, u1CaPI);
3104 	}
3105 #endif
3106 
3107 	/* read ca ui and mck */
3108 	ca_ui = get_ca_ui(p);
3109 	ca_mck = get_ca_mck(p);
3110 	ca_cmd0 = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_CA_CMD0));
3111 	(void)ca_cmd0;
3112 
3113 	vAutoRefreshSwitch(p, DISABLE); //When doing CA training, should make sure that auto refresh is disable
3114 
3115 	/*
3116 	 * TOOD
3117 	 *
3118 	 * here just pass simulation,
3119 	 * remove after ACTiming OK(ACTiming Table includes CATRAIN_INTV)
3120 	 */
3121 	//vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL1),
3122 	//		P_Fld(0x1F, CBT_WLEV_CTRL1_CATRAIN_INTV));
3123 	set_cbt_wlev_intv(p);
3124 
3125 	/*
3126 	 * tx_rank_sel is selected by SW
3127 	 * Lewis@20180509: tx_rank_sel is selected by SW in CBT if TMRRI design has changed.
3128 	 */
3129 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_SET0),
3130 		p->rank, TX_SET0_TXRANK);
3131 	/* TXRANKFIX should be write after TXRANK or the rank will be fix at rank 1 */
3132 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_SET0),
3133 		1, TX_SET0_TXRANKFIX);
3134 
3135 	iFinalCACLK = 0;
3136 	operating_fsp = p->dram_fsp;
3137 	operation_frequency = p->frequency;
3138 
3139 	// free-run dramc/ddrphy clk (DCMEN2=0, MIOCKCTRLOFF=1, PHYCLKDYNGEN=0, COMBCLKCTRL=0)
3140 	// free-run dram clk(APHYCKCG_FIXOFF =1, TCKFIXON=1)
3141 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL),
3142 		P_Fld(0, DRAMC_PD_CTRL_DCMEN2) |
3143 		P_Fld(1, DRAMC_PD_CTRL_MIOCKCTRLOFF) |
3144 		P_Fld(0, DRAMC_PD_CTRL_PHYCLKDYNGEN) |
3145 		P_Fld(0, DRAMC_PD_CTRL_COMBCLKCTRL) |
3146 		P_Fld(1, DRAMC_PD_CTRL_APHYCKCG_FIXOFF) |
3147 		P_Fld(1, DRAMC_PD_CTRL_TCKFIXON));
3148 
3149 	//Note : Assume that there is a default CS value that can apply for CA.
3150 	CBTEntryLP45(p, operating_fsp, operation_frequency);
3151 
3152 #if PINMUX_AUTO_TEST_PER_BIT_CA
3153 	CheckCADelayCell(p);
3154 #endif
3155 
3156 	//Step 3: set vref range and step by ddr type
3157 
3158 #if (SUPPORT_SAVE_TIME_FOR_CALIBRATION && (BYPASS_VREF_CAL || BYPASS_CBT))
3159 	if (p->femmc_Ready == 1)
3160 	{
3161 		u1FinalVref = p->pSavetimeData->u1CBTVref_Save[p->channel][p->rank];
3162 	}
3163 #endif
3164 
3165 	msg("\n[CmdBusTrainingLP45] Vref(ca) range %d: %d\n", u1FinalRange, u1FinalVref);
3166 
3167 #ifdef FOR_HQA_TEST_USED
3168 	gFinalCBTVrefCA[p->channel][p->rank] = u1FinalVref;
3169 #endif
3170 
3171 	//Set Vref after training
3172 	// BYTE MODE: Set Vref & enter CBT
3173 	CBTSetVrefLP45(p, u1FinalRange, u1FinalVref, operating_fsp, operation_frequency, IN_CBT);
3174 #if (SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_CBT)
3175 	#if CBT_MOVE_CA_INSTEAD_OF_CLK
3176 	// scan UI from 0, not from the UI we used to enter CBT
3177 	DramcCmdUIDelaySetting(p, 0);
3178 	ca_ui = get_ca_ui(p);
3179 	#endif
3180 #endif
3181 	put_ca_ui(p, ca_ui);
3182 	//Set CA_PI_Delay after training
3183 	CBTSetCACLKResult(p, ca_mck, ca_ui, iFinalCACLK, ca_pin_num);
3184 
3185 #if ENABLE_EYESCAN_GRAPH
3186 	gEyeScan_CaliDelay[0] = CATrain_CmdDelay[p->channel][p->rank] -pi_start;
3187 #endif
3188 
3189 	//msg("\nAverage CA Dly: %d\n", iFinalCACLK);
3190 
3191 	/* -------------  CS and CLK ---------- */
3192 	/* delay ca 1UI before K CS */
3193 #if __LP5_COMBO__
3194 	if (is_phase_falling(p)) {
3195 		ca_mck = get_ca_mck(p);
3196 		ca_ui = get_ca_ui(p);
3197 		xlate_ca_mck_ui(p, 1,
3198 				ca_mck, ca_ui,
3199 				&ca_mck_tmp, &ca_ui_tmp);
3200 		put_ca_mck(p, ca_mck_tmp);
3201 		put_ca_ui(p, ca_ui_tmp);
3202 	}
3203 #endif
3204 
3205 	CBTAdjustCS(p, autok);
3206 
3207 	/* restore ca mck and ui */
3208 #if __LP5_COMBO__
3209 	if (is_phase_falling(p)) {
3210 		put_ca_mck(p, ca_mck);
3211 		put_ca_ui(p, ca_ui);
3212 	}
3213 #endif
3214 
3215 //-------  Going to exit Command bus training(CBT) mode.-------------
3216 	CBTExitLP45(p, operating_fsp, operation_frequency, OUT_CBT);
3217 	CBTSetVrefLP45(p, u1FinalRange, u1FinalVref, operating_fsp, operation_frequency, OUT_CBT);
3218 
3219 #if __LP5_COMBO__
3220 	if (!is_lp5_family(p))
3221 #endif
3222 	{
3223 		if (p->dram_fsp == FSP_1)
3224 		{
3225 			#if MR_CBT_SWITCH_FREQ
3226 			DramcModeRegInit_CATerm(p, 0);
3227 			#else
3228 			DramcMRWriteFldAlign(p, 13, 1, MR13_FSP_OP, TO_MR);
3229 			#endif
3230 		}
3231 	}
3232 
3233 #if EYESCAN_LOG || defined(FOR_HQA_TEST_USED)
3234 	gFinalCBTVrefDQ[p->channel][p->rank] = u1FinalVref;
3235 #endif
3236 
3237 	msg3("\n[CmdBusTrainingLP45] Done\n");
3238 
3239 	//tx_rank_sel is selected by HW //Lewis@20180509: tx_rank_sel is selected by SW in CBT if TMRRI design has changed.
3240 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_SET0), 0, TX_SET0_TXRANK);
3241 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_SET0), 0, TX_SET0_TXRANKFIX); //TXRANKFIX should be write after TXRANK or the rank will be fix at rank 1
3242 
3243 	//Restore setting registers
3244 	DramcRestoreRegisters(p, u4RegBackupAddress, ARRAY_SIZE(u4RegBackupAddress));
3245 
3246 	return DRAM_OK;
3247 }
3248 #endif /* SIMUILATION_CBT */
3249 
3250 //-------------------------------------------------------------------------
3251 /** DramcWriteLeveling
3252  *	start Write Leveling Calibration.
3253  *	@param p				Pointer of context created by DramcCtxCreate.
3254  *	@param	apply			(U8): 0 don't apply the register we set  1 apply the register we set ,default don't apply.
3255  *	@retval status			(DRAM_STATUS_T): DRAM_OK or DRAM_FAIL
3256  */
3257 //-------------------------------------------------------------------------
3258 #define WRITE_LEVELING_MOVD_DQS 1//UI
3259 
u1MCK2UI_DivShift(DRAMC_CTX_T * p)3260 U8 u1MCK2UI_DivShift(DRAMC_CTX_T *p)
3261 {
3262 #if (__LP5_COMBO__ == TRUE)
3263 	if (TRUE == is_lp5_family(p))
3264 	{
3265 		//in LP4 1:8 mode, 8 small UI =  1 large UI
3266 		if (vGet_Div_Mode(p) == DIV4_MODE)
3267 		{
3268 			return MCK_TO_4UI_SHIFT;
3269 		}
3270 		else if (vGet_Div_Mode(p) == DIV16_MODE)
3271 		{
3272 			return MCK_TO_16UI_SHIFT;
3273 		}
3274 		else
3275 		{
3276 			return MCK_TO_8UI_SHIFT;
3277 		}
3278 	}
3279 	else
3280 #endif
3281 	{
3282 		//in LP4 1:8 mode, 8 small UI =  1 large UI
3283 		if (vGet_Div_Mode(p) == DIV4_MODE)
3284 		{
3285 			return MCK_TO_4UI_SHIFT;
3286 		}
3287 		else
3288 		{
3289 			return MCK_TO_8UI_SHIFT;
3290 		}
3291 	}
3292 }
3293 
ExecuteMoveDramCDelay(DRAMC_CTX_T * p,REG_TRANSFER_T ui_reg,REG_TRANSFER_T mck_reg,S8 iShiftUI)3294 static DRAM_STATUS_T ExecuteMoveDramCDelay(DRAMC_CTX_T *p,
3295 													REG_TRANSFER_T ui_reg,
3296 													REG_TRANSFER_T mck_reg,
3297 													S8 iShiftUI)
3298 {
3299 	S32 s4HighLevelDelay, s4DelaySum;
3300 	U32 u4TmpUI, u4TmpMCK;
3301 	U8 ucDataRateDivShift = 0;
3302 	DRAM_STATUS_T MoveResult;
3303 
3304 	ucDataRateDivShift = u1MCK2UI_DivShift(p);
3305 
3306 	u4TmpUI = u4IO32ReadFldAlign(DRAMC_REG_ADDR(ui_reg.u4Addr), ui_reg.u4Fld) & (~(1 << ucDataRateDivShift));
3307 	u4TmpMCK = u4IO32ReadFldAlign(DRAMC_REG_ADDR(mck_reg.u4Addr), mck_reg.u4Fld);
3308 	//msg("Base:  u4TmpMCK:%d,	u4TmpUI: %d,\n", u4TmpMCK, u4TmpUI);
3309 
3310 	s4HighLevelDelay = (u4TmpMCK << ucDataRateDivShift) + u4TmpUI;
3311 	s4DelaySum = (s4HighLevelDelay + iShiftUI);
3312 
3313 	if (s4DelaySum < 0)
3314 	{
3315 		u4TmpUI = 0;
3316 		u4TmpMCK = 0;
3317 		MoveResult = DRAM_FAIL;
3318 	}
3319 	else
3320 	{
3321 		u4TmpMCK = s4DelaySum >> ucDataRateDivShift;
3322 		u4TmpUI = s4DelaySum - (u4TmpMCK << ucDataRateDivShift);
3323 		MoveResult = DRAM_OK;
3324 	}
3325 
3326 	vIO32WriteFldAlign(DRAMC_REG_ADDR(ui_reg.u4Addr), u4TmpUI, ui_reg.u4Fld);
3327 	vIO32WriteFldAlign(DRAMC_REG_ADDR(mck_reg.u4Addr), u4TmpMCK, mck_reg.u4Fld);
3328 	//msg("[%d]  Final ==> u4TmpMCK:%d,  u4TmpUI: %d,\n", iShiftUI, u4TmpMCK, u4TmpUI);
3329 
3330 	return MoveResult;
3331 }
3332 
_LoopAryToDelay(DRAMC_CTX_T * p,REG_TRANSFER_T * ui_reg,REG_TRANSFER_T * mck_reg,U8 u8RG_num,S8 iShiftUI,BYTES_T eByteIdx)3333 static void _LoopAryToDelay(DRAMC_CTX_T *p,
3334 								  REG_TRANSFER_T *ui_reg,
3335 								  REG_TRANSFER_T *mck_reg,
3336 								  U8 u8RG_num,
3337 								  S8 iShiftUI,
3338 								  BYTES_T eByteIdx)
3339 {
3340 	U8 idx = 0, step = 1;
3341 	if (eByteIdx == BYTE_0)
3342 	{
3343 		idx = 0;
3344 		step = 2;
3345 	}
3346 	else if (eByteIdx == BYTE_1)
3347 	{
3348 		idx = 1;
3349 		step = 2;
3350 	}
3351 
3352 	for (; idx < u8RG_num; idx += step)
3353 	{
3354 		ExecuteMoveDramCDelay(p, ui_reg[idx], mck_reg[idx], iShiftUI);
3355 	}
3356 }
3357 
LP4_ShiftDQSUI(DRAMC_CTX_T * p,S8 iShiftUI,BYTES_T eByteIdx)3358 static void LP4_ShiftDQSUI(DRAMC_CTX_T *p, S8 iShiftUI, BYTES_T eByteIdx)
3359 {
3360 	// DQS / DQS_OEN
3361 	REG_TRANSFER_T TransferUIRegs[]  = {{DRAMC_REG_SHU_SELPH_DQS1, SHU_SELPH_DQS1_DLY_DQS0},		// Byte0
3362 										{DRAMC_REG_SHU_SELPH_DQS1, SHU_SELPH_DQS1_DLY_DQS1}};		// Byte1
3363 	REG_TRANSFER_T TransferMCKRegs[] = {{DRAMC_REG_SHU_SELPH_DQS0, SHU_SELPH_DQS0_TXDLY_DQS0},
3364 										{DRAMC_REG_SHU_SELPH_DQS0, SHU_SELPH_DQS0_TXDLY_DQS1}};
3365 
3366 	_LoopAryToDelay(p, TransferUIRegs, TransferMCKRegs,
3367 					   sizeof(TransferUIRegs) / sizeof(REG_TRANSFER_T),
3368 					   iShiftUI, eByteIdx);
3369 }
3370 
LP4_ShiftDQS_OENUI(DRAMC_CTX_T * p,S8 iShiftUI,BYTES_T eByteIdx)3371 void LP4_ShiftDQS_OENUI(DRAMC_CTX_T *p, S8 iShiftUI, BYTES_T eByteIdx)
3372 {
3373 	// DQS / DQS_OEN
3374 	REG_TRANSFER_T TransferUIRegs[]  = {{DRAMC_REG_SHU_SELPH_DQS1, SHU_SELPH_DQS1_DLY_OEN_DQS0},	// Byte0
3375 										{DRAMC_REG_SHU_SELPH_DQS1, SHU_SELPH_DQS1_DLY_OEN_DQS1}};	// Byte1
3376 	REG_TRANSFER_T TransferMCKRegs[] = {{DRAMC_REG_SHU_SELPH_DQS0, SHU_SELPH_DQS0_TXDLY_OEN_DQS0},
3377 										{DRAMC_REG_SHU_SELPH_DQS0, SHU_SELPH_DQS0_TXDLY_OEN_DQS1}};
3378 
3379 	_LoopAryToDelay(p, TransferUIRegs, TransferMCKRegs,
3380 					   sizeof(TransferUIRegs) / sizeof(REG_TRANSFER_T),
3381 					   iShiftUI, eByteIdx);
3382 }
3383 
ShiftDQUI(DRAMC_CTX_T * p,S8 iShiftUI,BYTES_T eByteIdx)3384 static void ShiftDQUI(DRAMC_CTX_T *p, S8 iShiftUI, BYTES_T eByteIdx)
3385 {
3386 	// Shift DQ / DQM / DQ_OEN / DQM_OEN
3387 	REG_TRANSFER_T TransferUIRegs[]  = {{DRAMC_REG_SHURK_SELPH_DQ3, SHURK_SELPH_DQ3_DLY_DQM0},		// Byte0
3388 										{DRAMC_REG_SHURK_SELPH_DQ3, SHURK_SELPH_DQ3_DLY_DQM1},		// Byte1
3389 										{DRAMC_REG_SHURK_SELPH_DQ2, SHURK_SELPH_DQ2_DLY_DQ0},		// Byte0
3390 										{DRAMC_REG_SHURK_SELPH_DQ2, SHURK_SELPH_DQ2_DLY_DQ1}};	// Byte1
3391 	REG_TRANSFER_T TransferMCKRegs[] = {{DRAMC_REG_SHURK_SELPH_DQ1, SHURK_SELPH_DQ1_TXDLY_DQM0},
3392 										{DRAMC_REG_SHURK_SELPH_DQ1, SHURK_SELPH_DQ1_TXDLY_DQM1},
3393 										{DRAMC_REG_SHURK_SELPH_DQ0, SHURK_SELPH_DQ0_TXDLY_DQ0},
3394 										{DRAMC_REG_SHURK_SELPH_DQ0, SHURK_SELPH_DQ0_TXDLY_DQ1}};
3395 
3396 	_LoopAryToDelay(p, TransferUIRegs, TransferMCKRegs,
3397 					sizeof(TransferUIRegs) / sizeof(REG_TRANSFER_T),
3398 					iShiftUI, eByteIdx);
3399 }
3400 
ShiftDQUI_AllRK(DRAMC_CTX_T * p,S8 iShiftUI,BYTES_T eByteIdx)3401 static void ShiftDQUI_AllRK(DRAMC_CTX_T *p, S8 iShiftUI, BYTES_T eByteIdx)
3402 {
3403 	U8 backup_rank, rk_i;
3404 	backup_rank = u1GetRank(p);
3405 
3406 	// Shift DQ / DQM / DQ_OEN / DQM_OEN
3407 	for (rk_i = RANK_0; rk_i < p->support_rank_num; rk_i++)
3408 	{
3409 		vSetRank(p, rk_i);
3410 		ShiftDQUI(p, iShiftUI, eByteIdx);
3411 	}
3412 	vSetRank(p, backup_rank);
3413 }
3414 
ShiftDQ_OENUI(DRAMC_CTX_T * p,S8 iShiftUI,BYTES_T eByteIdx)3415 static void ShiftDQ_OENUI(DRAMC_CTX_T *p, S8 iShiftUI, BYTES_T eByteIdx)
3416 {
3417 	REG_TRANSFER_T TransferUIRegs[]  = {{DRAMC_REG_SHURK_SELPH_DQ3, SHURK_SELPH_DQ3_DLY_OEN_DQM0},	// Byte0
3418 										{DRAMC_REG_SHURK_SELPH_DQ3, SHURK_SELPH_DQ3_DLY_OEN_DQM1},	// Byte1
3419 										{DRAMC_REG_SHURK_SELPH_DQ2, SHURK_SELPH_DQ2_DLY_OEN_DQ0},	// Byte0
3420 										{DRAMC_REG_SHURK_SELPH_DQ2, SHURK_SELPH_DQ2_DLY_OEN_DQ1}};	// Byte1
3421 	REG_TRANSFER_T TransferMCKRegs[] = {{DRAMC_REG_SHURK_SELPH_DQ1, SHURK_SELPH_DQ1_TXDLY_OEN_DQM0},
3422 										{DRAMC_REG_SHURK_SELPH_DQ1, SHURK_SELPH_DQ1_TXDLY_OEN_DQM1},
3423 										{DRAMC_REG_SHURK_SELPH_DQ0, SHURK_SELPH_DQ0_TXDLY_OEN_DQ0},
3424 										{DRAMC_REG_SHURK_SELPH_DQ0, SHURK_SELPH_DQ0_TXDLY_OEN_DQ1}};
3425 
3426 	_LoopAryToDelay(p, TransferUIRegs, TransferMCKRegs,
3427 					sizeof(TransferUIRegs) / sizeof(REG_TRANSFER_T),
3428 					iShiftUI, eByteIdx);
3429 }
3430 
ShiftDQ_OENUI_AllRK(DRAMC_CTX_T * p,S8 iShiftUI,BYTES_T eByteIdx)3431 void ShiftDQ_OENUI_AllRK(DRAMC_CTX_T *p, S8 iShiftUI, BYTES_T eByteIdx)
3432 {
3433 	U8 backup_rank, rk_i;
3434 	backup_rank = u1GetRank(p);
3435 
3436 	// Shift DQ / DQM / DQ_OEN / DQM_OEN
3437 	for (rk_i = RANK_0; rk_i < p->support_rank_num; rk_i++)
3438 	{
3439 		vSetRank(p, rk_i);
3440 		ShiftDQ_OENUI(p, iShiftUI, eByteIdx);
3441 	}
3442 	vSetRank(p, backup_rank);
3443 }
3444 
ShiftDQSWCK_UI(DRAMC_CTX_T * p,S8 iShiftUI,BYTES_T eByteIdx)3445 static void ShiftDQSWCK_UI(DRAMC_CTX_T *p, S8 iShiftUI, BYTES_T eByteIdx)
3446 {
3447 #if (__LP5_COMBO__ == TRUE)
3448 		if (TRUE == is_lp5_family(p))
3449 			LP5_ShiftWCKUI(p, iShiftUI, eByteIdx);
3450 		else
3451 #endif
3452 		{
3453 			LP4_ShiftDQSUI(p, iShiftUI, eByteIdx);
3454 			LP4_ShiftDQS_OENUI(p, iShiftUI, eByteIdx);
3455 		}
3456 }
3457 
u1IsLP4Div4DDR800(DRAMC_CTX_T * p)3458 U8 u1IsLP4Div4DDR800(DRAMC_CTX_T *p)
3459 {
3460 	if ((vGet_Div_Mode(p) == DIV4_MODE) && (p->frequency == 400))
3461 		return TRUE;
3462 	else
3463 		return FALSE;
3464 }
3465 
3466 //static void vSetDramMRWriteLevelingOnOff(DRAMC_CTX_T *p, U8 u1OnOff)
vSetDramMRWriteLevelingOnOff(DRAMC_CTX_T * p,U8 u1OnOff)3467 static void vSetDramMRWriteLevelingOnOff(DRAMC_CTX_T *p, U8 u1OnOff)
3468 {
3469 	// MR2 OP[7] to enable/disable write leveling
3470 	if (u1OnOff)
3471 		u1MR02Value[p->dram_fsp] |= 0x80;  // OP[7] WR LEV =1
3472 	else
3473 		u1MR02Value[p->dram_fsp] &= 0x7f;  // OP[7] WR LEV =0
3474 
3475 	DramcModeRegWriteByRank(p, p->rank, 2, u1MR02Value[p->dram_fsp]);
3476 }
3477 
u1IsPhaseMode(DRAMC_CTX_T * p)3478 U8 u1IsPhaseMode(DRAMC_CTX_T *p)
3479 {
3480 	if ((vGet_DDR_Loop_Mode(p) == OPEN_LOOP_MODE) || (vGet_DDR_Loop_Mode(p) == SEMI_OPEN_LOOP_MODE))
3481 		return TRUE;
3482 	else // DDR800_CLOSE_LOOP and NORMAL_CLOSE_LOOP
3483 		return FALSE;
3484 }
3485 
DramcTriggerAndWait(DRAMC_CTX_T * p,REG_TRANSFER_T TriggerReg,REG_TRANSFER_T RepondsReg)3486 static DRAM_STATUS_T DramcTriggerAndWait(DRAMC_CTX_T *p, REG_TRANSFER_T TriggerReg, REG_TRANSFER_T RepondsReg)
3487 {
3488 //	  U32 u4TimeCnt = TIME_OUT_CNT;
3489 	// @Darren, Rx HW AutoK simulation time
3490 	U32 u4TimeCnt = DDR_HW_AUTOK_POLLING_CNT;
3491 	DRAM_STATUS_T u4RespFlag = 0;
3492 
3493 	vIO32WriteFldAlign(DRAMC_REG_ADDR(TriggerReg.u4Addr), 0, TriggerReg.u4Fld); // Init EN status
3494 	vIO32WriteFldAlign(DRAMC_REG_ADDR(TriggerReg.u4Addr), 1, TriggerReg.u4Fld);
3495 	do
3496 	{
3497 		u4RespFlag = u4IO32ReadFldAlign(DRAMC_REG_ADDR(RepondsReg.u4Addr), RepondsReg.u4Fld);
3498 		u4TimeCnt --;
3499 		mcDELAY_US(1);
3500 	}while ((u4RespFlag == 0) && (u4TimeCnt > 0));
3501 
3502 	if (u4TimeCnt == 0)//time out
3503 	{
3504 		msg("[DramcTriggerAndWait] Wait 0x%x respond fail (time out)\n", RepondsReg.u4Addr);
3505 		return DRAM_FAIL;
3506 	}
3507 
3508 	return DRAM_OK;
3509 }
3510 
3511 #if (SIMULATION_WRITE_LEVELING == 1)
3512 #if !__ETT__
3513 #undef ASSERT
3514 #define ASSERT(x) \
3515 		if (!(x)) \
3516 			while (1)\
3517 				err("ASSERT FAIL at %s[%d]!\n", __FUNCTION__, __LINE__);
3518 #endif
3519 
3520 
3521 #define DQPI_PER_UI (32)
3522 #define STORAGED_DLY_UNIT (24)
WriteLevelingScanRange_PI(DRAMC_CTX_T * p,S32 * ps4DlyBegin,S32 * ps4DlyEnd,U8 * pu1PIStep,S16 * pPI_bound,WLEV_DELAY_BASED_T stDelayBase)3523 static void WriteLevelingScanRange_PI(DRAMC_CTX_T *p, S32 *ps4DlyBegin, S32 *ps4DlyEnd, U8 *pu1PIStep, S16 *pPI_bound, WLEV_DELAY_BASED_T stDelayBase)
3524 {
3525 	S32 s4DlyBegin = 0, s4DlyEnd;
3526 	U8 u1PIStep;
3527 	S16 PI_bound;
3528 
3529 	if (stDelayBase == PI_BASED)
3530 	{
3531 		// Giving PI scan range
3532 		s4DlyBegin = WRITE_LEVELING_MOVD_DQS * 32 - MAX_CLK_PI_DELAY - 1;
3533 		s4DlyEnd = s4DlyBegin + 64 - 1;
3534 
3535 		if ((vGet_DDR_Loop_Mode(p) == OPEN_LOOP_MODE))
3536 		{
3537 			u1PIStep = 16;
3538 			PI_bound = 32;
3539 		}
3540 		else if ((vGet_DDR_Loop_Mode(p) == SEMI_OPEN_LOOP_MODE))
3541 		{
3542 			u1PIStep = 8;
3543 			PI_bound = 32;
3544 		}
3545 		else
3546 		{
3547 			u1PIStep = 1;
3548 			PI_bound = 64;
3549 		}
3550 	}
3551 	else // stDelayBase == DLY_BASED
3552 	{
3553 		// Giving delay cell scan range
3554 		s4DlyBegin = 0;
3555 		s4DlyEnd = 2 * STORAGED_DLY_UNIT;
3556 
3557 		u1PIStep = 1;	 // One step is 1/4 delay cell
3558 		PI_bound = 1024; // No bounadary as delay cell based
3559 	}
3560 	msg2("Begin: %d, End: %d, Step: %d, Bound: %d\n", s4DlyBegin, s4DlyEnd, u1PIStep, PI_bound);
3561 
3562 	*ps4DlyBegin = s4DlyBegin;
3563 	*ps4DlyEnd = s4DlyEnd;
3564 	*pu1PIStep = u1PIStep;
3565 	*pPI_bound = PI_bound;
3566 
3567 }
3568 
3569 #if ENABLE_WDQS_MODE_2
WriteLevelingPosCal(DRAMC_CTX_T * p,WLEV_DELAY_BASED_T stDelayBase)3570 DRAM_STATUS_T WriteLevelingPosCal(DRAMC_CTX_T *p, WLEV_DELAY_BASED_T stDelayBase)
3571 {
3572 	DRAM_RANK_T backup_rank = u1GetRank(p);
3573 	U8 wrlevel_dqs_delay[DQS_NUMBER] = {0};
3574 	U8 rank_i = 0;
3575 
3576 	if((wrlevel_dqs_final_delay[RANK_0][0] - wrlevel_dqs_final_delay[RANK_1][0])>=9 ||
3577 		(wrlevel_dqs_final_delay[RANK_0][0] - wrlevel_dqs_final_delay[RANK_1][0])<=-9 ||
3578 		(wrlevel_dqs_final_delay[RANK_0][1] - wrlevel_dqs_final_delay[RANK_1][1])>=9 ||
3579 		(wrlevel_dqs_final_delay[RANK_0][1] - wrlevel_dqs_final_delay[RANK_1][1])<=-9 )
3580 	{
3581 		err("[WARNING] Larger WL R2R !!\n");
3582 		#if CHECK_HQA_CRITERIA
3583 		while(1);
3584 		#endif
3585 	}
3586 
3587 	wrlevel_dqs_delay[0] = (wrlevel_dqs_final_delay[RANK_0][0] + wrlevel_dqs_final_delay[RANK_1][0]) >> 1;
3588 	wrlevel_dqs_delay[1] = (wrlevel_dqs_final_delay[RANK_0][1] + wrlevel_dqs_final_delay[RANK_1][1]) >> 1;
3589 
3590 	wrlevel_dqs_final_delay[RANK_0][0] = wrlevel_dqs_final_delay[RANK_1][0] = wrlevel_dqs_delay[0];
3591 	wrlevel_dqs_final_delay[RANK_0][1] = wrlevel_dqs_final_delay[RANK_1][1] = wrlevel_dqs_delay[1];
3592 
3593 	for (rank_i = p->rank; rank_i < p->support_rank_num; rank_i++)
3594 	{
3595 		vSetRank(p, rank_i);
3596 
3597 		// set to best values for  DQS
3598 		if (stDelayBase == PI_BASED)
3599 		{
3600 			// Adjust DQS output delay.
3601 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), wrlevel_dqs_delay[0], SHU_R0_B0_DQ0_ARPI_PBYTE_B0);
3602 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), wrlevel_dqs_delay[1], SHU_R0_B1_DQ0_ARPI_PBYTE_B1);
3603 		}
3604 		else // stDelayBase == DLY_BASED
3605 		{
3606 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY3), wrlevel_dqs_delay[0], SHU_R0_B0_TXDLY3_TX_ARWCK_DLY_B0);
3607 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY3), wrlevel_dqs_delay[1], SHU_R0_B1_TXDLY3_TX_ARWCK_DLY_B1);
3608 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY3), wrlevel_dqs_delay[0], SHU_R0_B0_TXDLY3_TX_ARWCKB_DLY_B0);
3609 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY3), wrlevel_dqs_delay[1], SHU_R0_B1_TXDLY3_TX_ARWCKB_DLY_B1);
3610 		}
3611 	}
3612 
3613 	vSetRank(p, backup_rank);
3614 
3615 	msg("[WriteLevelingPosCal] DQS PI B0/B1 = %d/%d\n", wrlevel_dqs_delay[0], wrlevel_dqs_delay[1]);
3616 }
3617 #endif
3618 
3619 #define SET_PATTERN_MANUALLY_FOR_DEBUG 1
DramcWriteLeveling(DRAMC_CTX_T * p,u8 isAutoK,WLEV_DELAY_BASED_T stDelayBase)3620 DRAM_STATUS_T DramcWriteLeveling(DRAMC_CTX_T *p, u8 isAutoK, WLEV_DELAY_BASED_T stDelayBase)
3621 {
3622 // Note that below procedure is based on "ODT off"
3623 	DRAM_STATUS_T KResult = DRAM_FAIL;
3624 
3625 	U8 byte_i, rank_i, ucDoneFlg = 0;
3626 	DRAM_RANK_T backup_rank;
3627 
3628 	S32 wrlevel_dqs_delay[DQS_NUMBER]; // 3 is channel number
3629 
3630 	S32 s4DlyBegin, s4DlyEnd;
3631 	U8 u1PIStep;
3632 	U8 u1OverBoundCnt = 0;
3633 	S16 PI_bound = 64;
3634 
3635 	//When doing WriteLeveling, should make sure that auto refresh is disable
3636 	vAutoRefreshSwitch(p, DISABLE);
3637 
3638 	// error handling
3639 	if (!p)
3640 	{
3641 		err("context NULL\n");
3642 		return DRAM_FAIL;
3643 	}
3644 
3645 #if VENDER_JV_LOG
3646 		vPrintCalibrationBasicInfo_ForJV(p);
3647 #else
3648 		vPrintCalibrationBasicInfo(p);
3649 #endif
3650 
3651 
3652 	fgwrlevel_done = 0;
3653 	backup_rank = u1GetRank(p);
3654 
3655 	//DramcRankSwap(p, p->rank);
3656 	//tx_rank_sel is selected by SW //Lewis@20180604: tx_rank_sel is selected by SW in WL if TMRRI design has changed.
3657 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_SET0), p->rank, TX_SET0_TXRANK);
3658 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_SET0), 1, TX_SET0_TXRANKFIX); //TXRANKFIX should be write after TXRANK
3659 
3660 	// backup mode settings
3661 	U32 u4RegBackupAddress[] =
3662 	{
3663 		(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL)),
3664 		(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0)),
3665 		(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL1)),
3666 		(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL3)),
3667 		(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL5)),
3668 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_VREF)),			//in O1PathOnOff()
3669 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_VREF)),			//in O1PathOnOff()
3670 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL)),	//in O1PathOnOff()
3671 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_PHY_VREF_SEL)),	//in O1PathOnOff()
3672 		(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL))
3673 	};
3674 	DramcBackupRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
3675 
3676 
3677 	//default set DRAM FAIL
3678 	vSetCalibrationResult(p, DRAM_CALIBRATION_WRITE_LEVEL, DRAM_FAIL);
3679 
3680 #if MRW_CHECK_ONLY
3681 	mcSHOW_MRW_MSG("\n==[MR Dump] %s==\n", __func__);
3682 #endif
3683 
3684 	if (p->isWLevInitShift[p->channel] == FALSE)
3685 	{
3686 		// It must be PI_BASED or FAIL!!
3687 		ASSERT(stDelayBase == PI_BASED);
3688 
3689 		p->isWLevInitShift[p->channel] = TRUE;
3690 
3691 		// This flow would be excuted just one time, so all ranks(maybe rank0/1) should be adjusted at once.
3692 		ShiftDQUI_AllRK(p, -WRITE_LEVELING_MOVD_DQS, ALL_BYTES);
3693 		ShiftDQ_OENUI_AllRK(p, -WRITE_LEVELING_MOVD_DQS, ALL_BYTES);
3694 		ShiftDQSWCK_UI(p, -WRITE_LEVELING_MOVD_DQS, ALL_BYTES);
3695 
3696 #if (__LP5_COMBO__ == TRUE)
3697 		if (TRUE == is_lp5_family(p))
3698 		{
3699 			// For DLY based WCK leveling
3700 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ13), 0, SHU_B0_DQ13_RG_TX_ARDQ_DLY_LAT_EN_B0);
3701 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ13), 0, SHU_B1_DQ13_RG_TX_ARDQ_DLY_LAT_EN_B1);
3702 
3703 			// Set DQS DLY-based delay to 16
3704 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY3), STORAGED_DLY_UNIT, SHU_R0_B0_TXDLY3_TX_ARWCK_DLY_B0);
3705 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY3), STORAGED_DLY_UNIT, SHU_R0_B1_TXDLY3_TX_ARWCK_DLY_B1);
3706 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY3), STORAGED_DLY_UNIT, SHU_R0_B0_TXDLY3_TX_ARWCKB_DLY_B0);
3707 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY3), STORAGED_DLY_UNIT, SHU_R0_B1_TXDLY3_TX_ARWCKB_DLY_B1);
3708 		}
3709 #endif
3710 		// Set DQS PI-based delay to 0
3711 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), 0, SHU_R0_B0_DQ0_ARPI_PBYTE_B0);  //rank0, byte0, DQS delay
3712 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), 0, SHU_R0_B1_DQ0_ARPI_PBYTE_B1);  //rank0, byte1, DQS delay
3713 
3714 	}
3715 
3716 	// decide algorithm parameters according to freq.(PI mode/ phase mode)
3717 	WriteLevelingScanRange_PI(p, &s4DlyBegin, &s4DlyEnd, &u1PIStep, &PI_bound, stDelayBase);
3718 
3719 	// Not support autok to delay cell based mode.
3720 	if (stDelayBase == DLY_BASED)
3721 		isAutoK = FALSE;
3722 
3723 
3724 #if (SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_WRITELEVELING)
3725 	if (p->femmc_Ready == 1)
3726 	{
3727 		wrlevel_dqs_final_delay[p->rank][0] = p->pSavetimeData->u1WriteLeveling_bypass_Save[p->channel][p->rank][0];
3728 		wrlevel_dqs_final_delay[p->rank][1] = p->pSavetimeData->u1WriteLeveling_bypass_Save[p->channel][p->rank][1];
3729 
3730 		ucDoneFlg = 0xff;
3731 		KResult = DRAM_OK;
3732 		vSetCalibrationResult(p, DRAM_CALIBRATION_WRITE_LEVEL, DRAM_FAST_K);
3733 	}
3734 #endif
3735 
3736 	if (u1OverBoundCnt > 0)
3737 		ShiftDQSWCK_UI(p, -u1OverBoundCnt * (PI_bound / DQPI_PER_UI), ALL_BYTES);
3738 
3739 	if (ucDoneFlg == 0xff)
3740 	{
3741 		// all bytes are done
3742 		fgwrlevel_done = 1;
3743 		KResult = DRAM_OK;
3744 	}
3745 	else
3746 	{
3747 		KResult = DRAM_FAIL;
3748 		#if __FLASH_TOOL_DA__
3749 		PINInfo_flashtool.WL_ERR_FLAG|=(0x1<<(p->channel*2+p->rank));
3750 		#endif
3751 	}
3752 	vSetCalibrationResult(p, DRAM_CALIBRATION_WRITE_LEVEL, KResult);
3753 	msg2("pass bytecount = 0x%x (0xff: all bytes pass) \n\n", ucDoneFlg);
3754 
3755 #if defined(FOR_HQA_TEST_USED) && defined(FOR_HQA_REPORT_USED)
3756 	if (gHQALog_flag == 1)
3757 	{
3758 		for (byte_i = 0; byte_i < (p->data_width / DQS_BIT_NUMBER); byte_i++)
3759 		{
3760 			HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT1, "", "WriteLeveling_DQS", byte_i, wrlevel_dqs_final_delay[p->rank][byte_i], NULL);
3761 		}
3762 	}
3763 #endif
3764 
3765 #if (__LP5_COMBO__ == TRUE)
3766 	if (TRUE == is_lp5_family(p))
3767 
3768 		vSetLP5Dram_WCK2CK_WlevOnOff(p, DISABLE);
3769 	else
3770 #endif
3771 		vSetDramMRWriteLevelingOnOff(p, DISABLE); // Disable DDR write leveling mode:  issue MR2[7] to enable write leveling
3772 
3773 
3774 	// Write leveling enable OFF
3775 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL0), 0, CBT_WLEV_CTRL0_WRITE_LEVEL_EN);
3776 
3777 	//Disable DQ_O1, SELO1ASO=0 for power saving
3778 	O1PathOnOff(p, OFF);
3779 
3780 	//tx_rank_sel is selected by HW
3781 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_SET0), 0, TX_SET0_TXRANK);
3782 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_SET0), 0, TX_SET0_TXRANKFIX); //TXRANKFIX should be write after TXRANK
3783 
3784 	//restore registers.
3785 	DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
3786 
3787 	// Calculate DQS "PI" delay, nothing to do with delay cell
3788 	for (byte_i = 0; byte_i < (p->data_width / DQS_BIT_NUMBER); byte_i++)
3789 	{
3790 		msg("Write leveling (Byte %d): %d", byte_i, wrlevel_dqs_final_delay[p->rank][byte_i]);
3791 		reg_msg("Write leveling (Byte %d): %d", byte_i, wrlevel_dqs_final_delay[p->rank][byte_i]);
3792 		if (wrlevel_dqs_final_delay[p->rank][byte_i] >= PI_bound)
3793 		{
3794 			ShiftDQSWCK_UI(p, (wrlevel_dqs_final_delay[p->rank][byte_i] / PI_bound) * (PI_bound / DQPI_PER_UI), byte_i);
3795 
3796 			wrlevel_dqs_final_delay[p->rank][byte_i] %= PI_bound;
3797 		}
3798 
3799 		wrlevel_dqs_delay[byte_i] = wrlevel_dqs_final_delay[p->rank][byte_i];
3800 		msg(" => %d\n", wrlevel_dqs_delay[byte_i]);
3801 		reg_msg(" => %d\n", wrlevel_dqs_delay[byte_i]);
3802 	}
3803 
3804 	for (rank_i = p->rank; rank_i < RANK_MAX; rank_i++)
3805 	{
3806 		vSetRank(p, rank_i);
3807 
3808 		// set to best values for  DQS
3809 		if (stDelayBase == PI_BASED)
3810 		{
3811 			// Adjust DQS output delay.
3812 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), wrlevel_dqs_delay[0], SHU_R0_B0_DQ0_ARPI_PBYTE_B0);
3813 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), wrlevel_dqs_delay[1], SHU_R0_B1_DQ0_ARPI_PBYTE_B1);
3814 		}
3815 		else // stDelayBase == DLY_BASED
3816 		{
3817 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY3), wrlevel_dqs_delay[0], SHU_R0_B0_TXDLY3_TX_ARWCK_DLY_B0);
3818 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY3), wrlevel_dqs_delay[1], SHU_R0_B1_TXDLY3_TX_ARWCK_DLY_B1);
3819 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY3), wrlevel_dqs_delay[0], SHU_R0_B0_TXDLY3_TX_ARWCKB_DLY_B0);
3820 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY3), wrlevel_dqs_delay[1], SHU_R0_B1_TXDLY3_TX_ARWCKB_DLY_B1);
3821 		}
3822 	}
3823 
3824 	vSetRank(p, backup_rank);
3825 
3826 	msg3("[DramcWriteLeveling] Done\n\n");
3827 
3828 	return KResult;
3829 }
3830 #endif //SIMULATION_WRITE_LEVELING
3831 
3832 #if (SIMULATION_DUTY_CYC_MONITOR == 1)
FetchRGSettingVal(int step_val)3833 static U8 FetchRGSettingVal(int step_val)
3834 {
3835 	if (step_val <= 0)
3836 		return (U8)(-step_val);
3837 	else
3838 		return ((U8)step_val | 0x08);
3839 }
3840 
DramcDutyCycleMonitor(DRAMC_CTX_T * p)3841 DRAM_STATUS_T DramcDutyCycleMonitor(DRAMC_CTX_T *p)
3842 {
3843 	U8 backup_rank;
3844 //	  U8 u8ResultDutyCycMonitor[WHOLE_STEPS_NUM] = {0};
3845 
3846 	// error handling
3847 	if (!p)
3848 	{
3849 		err("context NULL\n");
3850 		return DRAM_FAIL;
3851 	}
3852 
3853 	vAutoRefreshSwitch(p, DISABLE);
3854 	//CKEFixOnOff(p, p->rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
3855 
3856 	backup_rank = u1GetRank(p);
3857 
3858 	RunTime_SW_Cmd(p, RUNTIME_SWCMD_CAS_FS);
3859 
3860 	int i = -7;
3861 	for (i = -7; i <= 7; i++)
3862 	{
3863 		// MRW MR30 OP[7:4] = i(Set DCAU) and OP[3:0] = i(Set DCAL)
3864 		U8 u8RGSettingVal = FetchRGSettingVal(i);
3865 		err("Set value %d into MR30\n", u8RGSettingVal);
3866 		MRWriteFldMulti(p, 30, P_Fld(u8RGSettingVal, MR30_DCAU) |
3867 							   P_Fld(u8RGSettingVal, MR30_DCAL),
3868 							   TO_MR);
3869 
3870 		// Start duty cycle monitor
3871 		DramcMRWriteFldAlign(p, 26, 1, MR26_DCM_START_STOP, TO_MR);
3872 
3873 		// Delay tDCMM(2us)
3874 		mcDELAY_US(2);
3875 
3876 		// Duty cycle monitor Flip 0 -> 1, and store result of flip = 0
3877 		DramcMRWriteFldAlign(p, 26, 1, MR26_DCM_FLIP, TO_MR);
3878 
3879 		// Delay tDCMM(2us)
3880 		mcDELAY_US(2);
3881 
3882 		// Duty cycle monitor Flip 1 -> 0, and store result of flip = 1
3883 		DramcMRWriteFldAlign(p, 26, 0, MR26_DCM_FLIP, TO_MR);
3884 
3885 		// Delay tDCMM(2us)
3886 		mcDELAY_US(2);
3887 
3888 		// Stop Duty cycle monitor
3889 		DramcMRWriteFldAlign(p, 26, 0, MR26_DCM_START_STOP, TO_MR);
3890 
3891 		// Delay tMRD
3892 		mcDELAY_US(2);
3893 
3894 		err("Wait tMRD and MRR MR26\n");
3895 
3896 		///TODO:  Read back result MR25[5:2]
3897 		// Store result into u8ResultDutyCycMonitor[]
3898 
3899 	}
3900 	///TODO:  Find and set a best MR30 variables
3901 
3902 	RunTime_SW_Cmd(p, RUNTIME_SWCMD_CAS_OFF);
3903 
3904 	vAutoRefreshSwitch(p, ENABLE);
3905 	//CKEFixOnOff(p, p->rank, CKE_DYNAMIC, CKE_WRITE_TO_ONE_CHANNEL);
3906 
3907 	vSetRank(p, backup_rank);
3908 }
3909 #endif // SIMULATION_DUTY_CYC_MONITOR
3910 
vResetDelayChainBeforeCalibration(DRAMC_CTX_T * p)3911 void vResetDelayChainBeforeCalibration(DRAMC_CTX_T *p)
3912 {
3913 	U8 u1RankIdx, u1RankIdxBak;
3914 	U32 u4WbrBackup = GetDramcBroadcast();
3915 
3916 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
3917 	u1RankIdxBak = u1GetRank(p);
3918 
3919 	for(u1RankIdx=RANK_0; u1RankIdx<RANK_MAX; u1RankIdx++)
3920 	{
3921 		vSetRank(p, u1RankIdx);
3922 
3923 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_R0_CA_TXDLY0,
3924 			P_Fld(0, SHU_R0_CA_TXDLY0_TX_ARCA0_DLY) |
3925 			P_Fld(0, SHU_R0_CA_TXDLY0_TX_ARCA1_DLY) |
3926 			P_Fld(0, SHU_R0_CA_TXDLY0_TX_ARCA2_DLY) |
3927 			P_Fld(0, SHU_R0_CA_TXDLY0_TX_ARCA3_DLY));
3928 
3929 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_R0_CA_TXDLY1,
3930 			P_Fld(0, SHU_R0_CA_TXDLY1_TX_ARCA4_DLY) |
3931 			P_Fld(0, SHU_R0_CA_TXDLY1_TX_ARCA5_DLY) |
3932 			P_Fld(0, SHU_R0_CA_TXDLY1_TX_ARCA6_DLY) |
3933 			P_Fld(0, SHU_R0_CA_TXDLY1_TX_ARCA7_DLY));
3934 
3935 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_R0_B0_TXDLY0, P_Fld(0, SHU_R0_B0_TXDLY0_TX_ARDQ0_DLY_B0)
3936 			| P_Fld(0, SHU_R0_B0_TXDLY0_TX_ARDQ1_DLY_B0)
3937 			| P_Fld(0, SHU_R0_B0_TXDLY0_TX_ARDQ2_DLY_B0)
3938 			| P_Fld(0, SHU_R0_B0_TXDLY0_TX_ARDQ3_DLY_B0));
3939 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_R0_B0_TXDLY1, P_Fld(0, SHU_R0_B0_TXDLY1_TX_ARDQ4_DLY_B0)
3940 			| P_Fld(0, SHU_R0_B0_TXDLY1_TX_ARDQ5_DLY_B0)
3941 			| P_Fld(0, SHU_R0_B0_TXDLY1_TX_ARDQ6_DLY_B0)
3942 			| P_Fld(0, SHU_R0_B0_TXDLY1_TX_ARDQ7_DLY_B0));
3943 
3944 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_R0_B1_TXDLY0, P_Fld(0, SHU_R0_B1_TXDLY0_TX_ARDQ0_DLY_B1)
3945 			| P_Fld(0, SHU_R0_B1_TXDLY0_TX_ARDQ1_DLY_B1)
3946 			| P_Fld(0, SHU_R0_B1_TXDLY0_TX_ARDQ2_DLY_B1)
3947 			| P_Fld(0, SHU_R0_B1_TXDLY0_TX_ARDQ3_DLY_B1));
3948 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_R0_B1_TXDLY1, P_Fld(0, SHU_R0_B1_TXDLY1_TX_ARDQ4_DLY_B1)
3949 			| P_Fld(0, SHU_R0_B1_TXDLY1_TX_ARDQ5_DLY_B1)
3950 			| P_Fld(0, SHU_R0_B1_TXDLY1_TX_ARDQ6_DLY_B1)
3951 			| P_Fld(0, SHU_R0_B1_TXDLY1_TX_ARDQ7_DLY_B1));
3952 		vIO32WriteFldAlign_All(DDRPHY_REG_SHU_R0_B0_TXDLY3, 0x0, SHU_R0_B0_TXDLY3_TX_ARDQM0_DLY_B0);
3953 		vIO32WriteFldAlign_All(DDRPHY_REG_SHU_R0_B1_TXDLY3, 0x0, SHU_R0_B1_TXDLY3_TX_ARDQM0_DLY_B1);
3954 	}
3955 
3956 	vSetRank(p, u1RankIdxBak);
3957 	DramcBroadcastOnOff(u4WbrBackup);
3958 }
3959 
3960 
3961 //Reset PHY to prevent glitch when change DQS gating delay or RX DQS input delay
3962 // [Lynx] Evere_st : cannot reset single channel. All DramC and All Phy have to reset together.
DramPhyReset(DRAMC_CTX_T * p)3963 void DramPhyReset(DRAMC_CTX_T *p)
3964 {
3965 	// Evere_st change reset order : reset DQS before DQ, move PHY reset to final.
3966 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RX_SET0), 1, RX_SET0_RDATRST);// read data counter reset
3967 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1), 1, MISC_CTRL1_R_DMPHYRST);
3968 
3969 	//RG_ARCMD_RESETB & RG_ARDQ_RESETB_B0/1 only reset once at init, Justin Chan.
3970 	///TODO: need to confirm RG_ARCMD_RESETB & RG_ARDQ_RESETB_B0/1 is reset at mem.c
3971 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ9),
3972 			P_Fld(0, B0_DQ9_RG_RX_ARDQS0_STBEN_RESETB_B0) |
3973 			P_Fld(0, B0_DQ9_RG_RX_ARDQ_STBEN_RESETB_B0));
3974 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ9),
3975 			P_Fld(0, B1_DQ9_RG_RX_ARDQS0_STBEN_RESETB_B1) |
3976 			P_Fld(0, B1_DQ9_RG_RX_ARDQ_STBEN_RESETB_B1));
3977 	mcDELAY_US(1);//delay 10ns
3978 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ9),
3979 			P_Fld(1, B1_DQ9_RG_RX_ARDQS0_STBEN_RESETB_B1) |
3980 			P_Fld(1, B1_DQ9_RG_RX_ARDQ_STBEN_RESETB_B1));
3981 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ9),
3982 			P_Fld(1, B0_DQ9_RG_RX_ARDQS0_STBEN_RESETB_B0) |
3983 			P_Fld(1, B0_DQ9_RG_RX_ARDQ_STBEN_RESETB_B0));
3984 
3985 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1), 0, MISC_CTRL1_R_DMPHYRST);
3986 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RX_SET0), 0, RX_SET0_RDATRST);// read data counter reset
3987 }
3988 
3989 #if SIMULATION_LP4_ZQ
3990 //-------------------------------------------------------------------------
3991 /** DramcZQCalibration
3992  *	start Dram ZQ calibration.
3993  *	@param p				Pointer of context created by DramcCtxCreate.
3994  *	@retval status			(DRAM_STATUS_T): DRAM_OK or DRAM_FAIL
3995  */
3996 //-------------------------------------------------------------------------
3997 #if ZQ_SWCMD_MODE
ZQ_SWCMD_MODE_Cal(DRAMC_CTX_T * p,U8 rank)3998 static DRAM_STATUS_T ZQ_SWCMD_MODE_Cal(DRAMC_CTX_T *p, U8 rank)
3999 {
4000 	U32 u4Response;
4001 	U32 u4TimeCnt = TIME_OUT_CNT;
4002 	U32 u4SWCMDEN, u4SWCMDCTRL, u4SPDCTRL, u4CKECTRL;
4003 
4004 	// Backup rank, CKE fix on/off, HW MIOCK control settings
4005 	u4SWCMDEN = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN));
4006 	u4SWCMDCTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0));
4007 	u4SPDCTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL));
4008 	u4CKECTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL));
4009 
4010 	msg3("[ZQCalibration]\n");
4011 	//mcFPRINTF(fp_A60501, "[ZQCalibration]\n");
4012 
4013 	// Disable HW MIOCK control to make CLK always on
4014 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 1, DRAMC_PD_CTRL_APHYCKCG_FIXOFF);
4015 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 1, DRAMC_PD_CTRL_TCKFIXON);
4016 	mcDELAY_US(1);
4017 
4018 	//if CKE2RANK=1, only need to set CKEFIXON, it will apply to both rank.
4019 	CKEFixOnOff(p, rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
4020 
4021 	//select rank
4022 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), rank, SWCMD_CTRL0_SWTRIG_ZQ_RK);
4023 
4024 	//ZQCAL Start
4025 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 1, SWCMD_EN_ZQCEN_SWTRIG);
4026 
4027 	do
4028 	{
4029 		u4Response = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP3), SPCMDRESP3_ZQC_SWTRIG_RESPONSE);
4030 		u4TimeCnt --;
4031 		mcDELAY_US(1);	// Wait tZQCAL(min) 1us or wait next polling
4032 
4033 		msg3("%d- ", u4TimeCnt);
4034 		//mcFPRINTF(fp_A60501, "%d- ", u4TimeCnt);
4035 	}while((u4Response==0) &&(u4TimeCnt>0));
4036 
4037 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_ZQCEN_SWTRIG);
4038 
4039 	if(u4TimeCnt==0)//time out
4040 	{
4041 		vSetCalibrationResult(p, DRAM_CALIBRATION_ZQ, DRAM_FAIL);
4042 		msg("ZQCAL Start fail (time out)\n");
4043 		//mcFPRINTF(fp_A60501, "ZQCAL Start fail (time out)\n");
4044 		return DRAM_FAIL;
4045 	}
4046 
4047 	// [JC] delay tZQCAL
4048 	mcDELAY_US(1);
4049 	u4TimeCnt = TIME_OUT_CNT;
4050 
4051 	//ZQCAL Latch
4052 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 1, SWCMD_EN_ZQLATEN_SWTRIG);
4053 	do
4054 	{
4055 		u4Response = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP3), SPCMDRESP3_ZQLAT_SWTRIG_RESPONSE);
4056 		u4TimeCnt --;
4057 		mcDELAY_US(1);// Wait tZQLAT 30ns or wait next polling
4058 
4059 		msg3("%d=", u4TimeCnt);
4060 		//mcFPRINTF(fp_A60501, "%d= ", u4TimeCnt);
4061 	}while((u4Response==0) &&(u4TimeCnt>0));
4062 
4063 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_ZQLATEN_SWTRIG);
4064 
4065 	if(u4TimeCnt==0)//time out
4066 	{
4067 		vSetCalibrationResult(p, DRAM_CALIBRATION_ZQ, DRAM_FAIL);
4068 		msg("ZQCAL Latch fail (time out)\n");
4069 		//mcFPRINTF(fp_A60501, "ZQCAL Latch fail (time out)\n");
4070 		return DRAM_FAIL;
4071 	}
4072 
4073 	// [JC] delay tZQLAT
4074 	mcDELAY_US(1);
4075 
4076 	// Restore rank, CKE fix on, HW MIOCK control settings
4077 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), u4SWCMDEN);
4078 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u4SWCMDCTRL);
4079 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), u4SPDCTRL);
4080 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL), u4CKECTRL);
4081 
4082 	vSetCalibrationResult(p, DRAM_CALIBRATION_ZQ, DRAM_OK);
4083 	msg3("\n[DramcZQCalibration] Done\n\n");
4084 	//mcFPRINTF(fp_A60501, "\n[DramcZQCalibration] Done\n\n");
4085 
4086 	return DRAM_OK;
4087 }
4088 #endif
4089 #if ZQ_RTSWCMD_MODE
ZQ_RTSWCMD_MODE_Cal(DRAMC_CTX_T * p,U8 rank)4090 DRAM_STATUS_T ZQ_RTSWCMD_MODE_Cal(DRAMC_CTX_T *p, U8 rank)
4091 {
4092 	U32 u4Response;
4093 	U32 u4TimeCnt = TIME_OUT_CNT;
4094 	U32 u4SWCMDEN, u4SWCMDCTRL, u4MPCCTRL, u4RTSWCMD, u4SPDCTRL, u4CKECTRL;
4095 
4096 	// Backup rank, CKE fix on/off, HW MIOCK control settings
4097 	u4SWCMDEN = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN));
4098 	u4SWCMDCTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL2));
4099 	u4MPCCTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_MPC_CTRL));
4100 	u4RTSWCMD = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_RTSWCMD_CNT));
4101 	u4SPDCTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL));
4102 	u4CKECTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL));
4103 
4104 	msg3("[ZQCalibration]\n");
4105 	//mcFPRINTF(fp_A60501, "[ZQCalibration]\n");
4106 
4107 	// Disable HW MIOCK control to make CLK always on
4108 	//vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 1, DRAMC_PD_CTRL_APHYCKCG_FIXOFF);
4109 	//vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 1, DRAMC_PD_CTRL_TCKFIXON);
4110 	mcDELAY_US(1);
4111 
4112 	//if CKE2RANK=1, only need to set CKEFIXON, it will apply to both rank.
4113 	//CKEFixOnOff(p, rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
4114 
4115 	//select rank
4116 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL2),
4117 		P_Fld(rank, SWCMD_CTRL2_RTSWCMD_RK) |
4118 		P_Fld(0x20, SWCMD_CTRL2_RTSWCMD_AGE));
4119 
4120 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MPC_CTRL), 0x1, MPC_CTRL_RTSWCMD_HPRI_EN);
4121 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RTSWCMD_CNT), 0x2a, RTSWCMD_CNT_RTSWCMD_CNT);
4122 
4123 	//ZQCAL Start
4124 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0x5, SWCMD_EN_RTSWCMD_SEL);
4125 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 1, SWCMD_EN_RTSWCMDEN);
4126 
4127 	do
4128 	{
4129 		u4Response = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP3), SPCMDRESP3_RTSWCMD_RESPONSE);
4130 		u4TimeCnt --;
4131 		mcDELAY_US(1);	// Wait tZQCAL(min) 1us or wait next polling
4132 
4133 		msg3("%d- ", u4TimeCnt);
4134 		//mcFPRINTF(fp_A60501, "%d- ", u4TimeCnt);
4135 	}while((u4Response==0) &&(u4TimeCnt>0));
4136 
4137 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_RTSWCMDEN);
4138 
4139 	if(u4TimeCnt==0)//time out
4140 	{
4141 		vSetCalibrationResult(p, DRAM_CALIBRATION_ZQ, DRAM_FAIL);
4142 		msg("ZQCAL Start fail (time out)\n");
4143 		//mcFPRINTF(fp_A60501, "ZQCAL Start fail (time out)\n");
4144 		return DRAM_FAIL;
4145 	}
4146 
4147 	// [JC] delay tZQCAL
4148 	mcDELAY_US(1);
4149 	u4TimeCnt = TIME_OUT_CNT;
4150 
4151 	//ZQCAL Latch
4152 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0x6, SWCMD_EN_RTSWCMD_SEL);
4153 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 1, SWCMD_EN_RTSWCMDEN);
4154 
4155 	do
4156 	{
4157 		u4Response = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP3), SPCMDRESP3_RTSWCMD_RESPONSE);
4158 		u4TimeCnt --;
4159 		mcDELAY_US(1);// Wait tZQLAT 30ns or wait next polling
4160 
4161 		msg3("%d=", u4TimeCnt);
4162 		//mcFPRINTF(fp_A60501, "%d= ", u4TimeCnt);
4163 	}while((u4Response==0) &&(u4TimeCnt>0));
4164 
4165 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_RTSWCMDEN);
4166 
4167 	if(u4TimeCnt==0)//time out
4168 	{
4169 		vSetCalibrationResult(p, DRAM_CALIBRATION_ZQ, DRAM_FAIL);
4170 		msg("ZQCAL Latch fail (time out)\n");
4171 		//mcFPRINTF(fp_A60501, "ZQCAL Latch fail (time out)\n");
4172 		return DRAM_FAIL;
4173 	}
4174 
4175 	// [JC] delay tZQLAT
4176 	mcDELAY_US(1);
4177 
4178 	// Restore rank, CKE fix on, HW MIOCK control settings
4179 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), u4SWCMDEN);
4180 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL2), u4SWCMDCTRL);
4181 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_MPC_CTRL), u4MPCCTRL);
4182 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_RTSWCMD_CNT), u4RTSWCMD);
4183 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), u4SPDCTRL);
4184 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL), u4CKECTRL);
4185 
4186 	vSetCalibrationResult(p, DRAM_CALIBRATION_ZQ, DRAM_OK);
4187 	msg3("\n[DramcZQCalibration] Done\n\n");
4188 	//mcFPRINTF(fp_A60501, "\n[DramcZQCalibration] Done\n\n");
4189 
4190 	return DRAM_OK;
4191 }
4192 #endif
4193 #if ZQ_SCSM_MODE
ZQ_SCSM_MODE_Cal(DRAMC_CTX_T * p,U8 rank)4194 DRAM_STATUS_T ZQ_SCSM_MODE_Cal(DRAMC_CTX_T *p, U8 rank)
4195 {
4196 	U32 u4Response;
4197 	U32 u4TimeCnt = TIME_OUT_CNT;
4198 	U32 u4SWCMDEN, u4MPCCTRL, u4SWCMDCTRL, u4SPDCTRL, u4CKECTRL;
4199 
4200 	// Backup rank, CKE fix on/off, HW MIOCK control settings
4201 	u4SWCMDEN = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN));
4202 	u4SWCMDCTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0));
4203 	u4MPCCTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_MPC_OPTION));
4204 	u4SPDCTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL));
4205 	u4CKECTRL = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL));
4206 
4207 	msg3("[ZQCalibration]\n");
4208 	//mcFPRINTF(fp_A60501, "[ZQCalibration]\n");
4209 
4210 	// Disable HW MIOCK control to make CLK always on
4211 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 1, DRAMC_PD_CTRL_APHYCKCG_FIXOFF);
4212 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 1, DRAMC_PD_CTRL_TCKFIXON);
4213 	mcDELAY_US(1);
4214 
4215 	//if CKE2RANK=1, only need to set CKEFIXON, it will apply to both rank.
4216 	CKEFixOnOff(p, rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
4217 
4218 	//Use rank swap or MRSRK to select rank
4219 	//DramcRankSwap(p, p->rank);
4220 	//!!R_DMMRSRK(R_DMMPCRKEN=1) specify rank0 or rank1
4221 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), rank, SWCMD_CTRL0_MRSRK);
4222 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MPC_OPTION), 1, MPC_OPTION_MPCRKEN);
4223 
4224 	//ZQCAL Start
4225 	//R_DMZQCEN, 0x1E4[4]=1 for ZQCal Start
4226 	//Wait zqc_response=1 (dramc_conf_nao, 0x3b8[4])
4227 	//R_DMZQCEN, 0x1E4[4]=0
4228 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 1, SWCMD_EN_ZQCEN);
4229 	do
4230 	{
4231 		u4Response = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP), SPCMDRESP_ZQC_RESPONSE);
4232 		u4TimeCnt --;
4233 		mcDELAY_US(1);	// Wait tZQCAL(min) 1us or wait next polling
4234 
4235 		msg3("%d- ", u4TimeCnt);
4236 		//mcFPRINTF(fp_A60501, "%d- ", u4TimeCnt);
4237 	}while((u4Response==0) &&(u4TimeCnt>0));
4238 
4239 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_ZQCEN);
4240 
4241 	if(u4TimeCnt==0)//time out
4242 	{
4243 		vSetCalibrationResult(p, DRAM_CALIBRATION_ZQ, DRAM_FAIL);
4244 		msg("ZQCAL Start fail (time out)\n");
4245 		//mcFPRINTF(fp_A60501, "ZQCAL Start fail (time out)\n");
4246 		return DRAM_FAIL;
4247 	}
4248 
4249 	// [JC] delay tZQCAL
4250 	mcDELAY_US(1);
4251 	u4TimeCnt = TIME_OUT_CNT;
4252 
4253 	//ZQCAL Latch
4254 	//R_DMZQLATEN, 0x1E4[6]=1 for ZQCal latch
4255 	//Wait zqlat_response=1 (dramc_conf_nao, 0x3b8[28])
4256 	//R_DMZQLATEN, 0x1E4[6]=0
4257 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 1, SWCMD_EN_ZQLATEN);
4258 	do
4259 	{
4260 		u4Response = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP), SPCMDRESP_ZQLAT_RESPONSE);
4261 		u4TimeCnt --;
4262 		mcDELAY_US(1);// Wait tZQLAT 30ns or wait next polling
4263 
4264 		msg3("%d=", u4TimeCnt);
4265 		//mcFPRINTF(fp_A60501, "%d= ", u4TimeCnt);
4266 	}while((u4Response==0) &&(u4TimeCnt>0));
4267 
4268 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_ZQLATEN);
4269 
4270 	if(u4TimeCnt==0)//time out
4271 	{
4272 		vSetCalibrationResult(p, DRAM_CALIBRATION_ZQ, DRAM_FAIL);
4273 		msg("ZQCAL Latch fail (time out)\n");
4274 		//mcFPRINTF(fp_A60501, "ZQCAL Latch fail (time out)\n");
4275 		return DRAM_FAIL;
4276 	}
4277 
4278 	// [JC] delay tZQLAT
4279 	mcDELAY_US(1);
4280 
4281 	// Restore rank, CKE fix on, HW MIOCK control settings
4282 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), u4SWCMDEN);
4283 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL2), u4SWCMDCTRL);
4284 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_MPC_CTRL), u4MPCCTRL);
4285 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), u4SPDCTRL);
4286 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_CKECTRL), u4CKECTRL);
4287 
4288 	vSetCalibrationResult(p, DRAM_CALIBRATION_ZQ, DRAM_OK);
4289 	msg3("\n[DramcZQCalibration] Done\n\n");
4290 	//mcFPRINTF(fp_A60501, "\n[DramcZQCalibration] Done\n\n");
4291 
4292 	return DRAM_OK;
4293 }
4294 #endif
4295 
DramcZQCalibration(DRAMC_CTX_T * p,U8 rank)4296 DRAM_STATUS_T DramcZQCalibration(DRAMC_CTX_T *p, U8 rank)
4297 {
4298 	#if ZQ_SWCMD_MODE
4299 	return ZQ_SWCMD_MODE_Cal(p, rank);
4300 	#elif ZQ_RTSWCMD_MODE
4301 	return ZQ_RTSWCMD_MODE_Cal(p, rank);
4302 	#else //ZQ_SCSM_MODE
4303 	return ZQ_SCSM_MODE_Cal(p, rank);
4304 	#endif
4305 }
4306 #endif
4307 
4308 #if (SIMULATION_GATING == 1)
4309 #define GATING_PATTERN_NUM_LP5		0x23
4310 #define GATING_GOLDEND_DQSCNT_LP5	0x4646
4311 #define RXDQS_GATING_AUTO_DBG_REG_NUM		6
4312 /* Preamble & Postamble setting. Currently use macro to define.
4313  * Later may use speed or MR setting to decide
4314  * !!! REVIEW !!!
4315  */
4316 
4317 #if GATING_ADJUST_TXDLY_FOR_TRACKING
4318 U8 u1TXDLY_Cal_min =0xff, u1TXDLY_Cal_max=0;
4319 U8 ucbest_coarse_mck_backup[RANK_MAX][DQS_NUMBER];
4320 U8 ucbest_coarse_ui_backup[RANK_MAX][DQS_NUMBER];
4321 U8 ucbest_coarse_mck_P1_backup[RANK_MAX][DQS_NUMBER];
4322 U8 ucbest_coarse_ui_P1_backup[RANK_MAX][DQS_NUMBER];
4323 #endif
4324 
4325 
4326 struct rxdqs_gating_cal {
4327 	U8 dqsien_dly_mck;
4328 	U8 dqsien_dly_ui;
4329 	U8 dqsien_dly_pi;
4330 
4331 	U8 dqsien_dly_mck_p1;
4332 	U8 dqsien_dly_ui_p1;
4333 
4334 	U8 dqsien_pi_adj_step;
4335 
4336 	U8 dqsien_pi_per_ui;
4337 	U8 dqsien_ui_per_mck;
4338 	U8 dqsien_freq_div;
4339 };
4340 
4341 struct rxdqs_gating_trans {
4342 	U8 dqs_lead[DQS_NUMBER];
4343 	U8 dqs_lag[DQS_NUMBER];
4344 	U8 dqs_high[DQS_NUMBER];
4345 #if GATING_LEADLAG_LOW_LEVEL_CHECK
4346 	U8 dqs_low[DQS_NUMBER];
4347 #endif
4348 	U8 dqs_transition[DQS_NUMBER];
4349 	U8 dqs_transitioned[DQS_NUMBER];
4350 	U8 dqsien_dly_mck_leadlag[DQS_NUMBER];
4351 	U8 dqsien_dly_ui_leadlag[DQS_NUMBER];
4352 	U8 dqsien_dly_pi_leadlag[DQS_NUMBER];
4353 };
4354 
4355 struct rxdqs_gating_best_win {
4356 	U8 best_dqsien_dly_mck[DQS_NUMBER];
4357 	U8 best_dqsien_dly_ui[DQS_NUMBER];
4358 	U8 best_dqsien_dly_pi[DQS_NUMBER];
4359 	U8 best_dqsien_dly_mck_p1[DQS_NUMBER];
4360 	U8 best_dqsien_dly_ui_p1[DQS_NUMBER];
4361 	U8 best_dqsien_dly_pi_p1[DQS_NUMBER];
4362 };
4363 
4364 struct rxdqs_gating_auto_param {
4365 	U8 early_break;
4366 	U8 dbg_mode;
4367 
4368 	U8 init_mck;
4369 	U8 init_ui;
4370 	U8 end_mck;
4371 	U8 end_ui;
4372 	U8 pi_offset;
4373 
4374 	U8 burst_len;
4375 };
4376 
4377 #define ENABLE_GATING_AUTOK_WA			1
4378 
4379 #if ENABLE_GATING_AUTOK_WA
4380 U8 __wa__gating_swk_for_autok = 0;
4381 U8 __wa__gating_autok_init_ui[RANK_MAX] = { 0 };
4382 #endif
4383 
4384 #if (__LP5_COMBO__)
u1GetLp5ReadLatency(DRAMC_CTX_T * p)4385 static U8 u1GetLp5ReadLatency(DRAMC_CTX_T *p)
4386 {
4387 	U8 read_latency;
4388 	U8 rl, ckr, dvfsc;
4389 
4390 	const U8 au1MR2MappingToRL_wo_dvfsc[2][12] = {
4391 		{3, 4, 5, 6, 8, 9, 10, 12, 13, 14, 15, 17}, /* CKR = 4:1 */
4392 		{6, 8, 10, 12, 16, 18}, /* CKR = 2:1 */
4393 	};
4394 
4395 	///TODO: Spec has not specify these values
4396 	const U8 au1MR2MappingToRL_wi_dvfsc[2][6] = {
4397 		{0xff, 0xff, 0xff, 0xff, 0xff, 0xff}, /* CKR = 4:1 */
4398 		{0xff, 0xff, 0xff, 0xff, 0xff, 0xff}, /* CKR = 2:1 */
4399 	};
4400 
4401 	ckr = (u1MR18Value[p->dram_fsp] >> 7) & 0x1;
4402 	dvfsc = !!(u1MR19Value[p->dram_fsp] & 0x3);
4403 	rl = (u1MR02Value[p->dram_fsp] & 0xf);
4404 
4405 	if (dvfsc)
4406 		read_latency = au1MR2MappingToRL_wi_dvfsc[ckr][rl];
4407 	else
4408 		read_latency = au1MR2MappingToRL_wo_dvfsc[ckr][rl];
4409 
4410 	/* note that the uint of RL is nCK, convert to nWCK */
4411 	if (ckr == 0)
4412 		read_latency *= 4;
4413 	else
4414 		read_latency *= 2;
4415 
4416 	msg("ckr = %d, dvfsc = %d, rl = %d, read_latency = %d\n",
4417 		ckr, dvfsc, rl, read_latency);
4418 
4419 	return read_latency;
4420 }
4421 #endif
4422 
u1GetGatingStartPos(DRAMC_CTX_T * p,U8 u1AutoK)4423 static U8 u1GetGatingStartPos(DRAMC_CTX_T *p, U8 u1AutoK)
4424 {
4425 	const U8 au1MR2MappingToRL[2][8] = {{6, 10, 14, 20, 24, 28, 32, 36},   //normal mode
4426 										{6, 10, 16, 22, 26, 32, 36, 40}};  //byte mode
4427 	U8 u1MR0_LatencyMode;
4428 	U8 u1MR2RLValue;
4429 
4430 	u1MR2RLValue = u1MR02Value[p->dram_fsp] & 0x7; //MR2 Op[2:0]
4431 	U8 u1RX_Path_delay_UI, u1RealRL,u1StartUI, u1ExtraMCKfor1_4mode;
4432 	U8 u1MCK2CK_UI, u1ReadDQSINCTL, u1DQSINCTL_UI;
4433 	U8 u4TDQSCK_UI_min;
4434 	U8 u1GatingAheadDQS_UI;
4435 
4436 	/* LPDDR5 uses same bit */
4437 	if(gu2MR0_Value[p->rank] == 0xffff)  //MR0 is not ready
4438 	{
4439 		u1MR0_LatencyMode = CBT_NORMAL_MODE;
4440 	}
4441 	else
4442 	{
4443 		u1MR0_LatencyMode = (gu2MR0_Value[p->rank]>>1) & 0x1; //MR0 OP[1],	0:normal mode,	1:byte mode
4444 	}
4445 
4446 #if (__LP5_COMBO__)
4447 	if (is_lp5_family(p)) {
4448 		u4TDQSCK_UI_min = 500 * p->frequency *2/ 1000000;
4449 		u1RealRL = u1GetLp5ReadLatency(p);
4450 	} else
4451 #endif
4452 	{
4453 		u4TDQSCK_UI_min = 1500 * p->frequency *2/ 1000000;
4454 		u1RealRL = au1MR2MappingToRL[u1MR0_LatencyMode][u1MR2RLValue];
4455 	}
4456 
4457 	///TODO: A60868 does not support LP5 DIV4, current setting is not provided for LP5
4458 	if(vGet_Div_Mode(p) == DIV4_MODE)
4459 	{
4460 		u1MCK2CK_UI = 4;
4461 		u1ExtraMCKfor1_4mode = 1;
4462 		u1GatingAheadDQS_UI = 3;
4463 	}
4464 	else if (vGet_Div_Mode(p) == DIV8_MODE)
4465 	{
4466 		u1MCK2CK_UI = 8;
4467 		u1ExtraMCKfor1_4mode = 0;
4468 #if (__LP5_COMBO__)
4469 	if (is_lp5_family(p)) {
4470 		if (p->frequency <= 1600)
4471 			u1GatingAheadDQS_UI = 1 * u1MCK2CK_UI;
4472 		else if (p->frequency == 1866)
4473 			u1GatingAheadDQS_UI = 4;
4474 		else
4475 			u1GatingAheadDQS_UI = 8;
4476 	} else
4477 #endif
4478 		u1GatingAheadDQS_UI = 5;
4479 	}
4480 	else
4481 	{
4482 		/* DIV16, only for LP5 */
4483 		u1MCK2CK_UI = 16;
4484 		u1ExtraMCKfor1_4mode = 0;
4485 		u1GatingAheadDQS_UI = 8;
4486 	}
4487 
4488 	// RX_Path_delay_UI = RL*2 + tDQSCK_UI<1500~3500ps> - PHY_interanl<skip 30ps> - GatingAheadDQS<2UI> + if(1:4 mod)+1MCK
4489 	u1RX_Path_delay_UI = (u1RealRL<<1) + u4TDQSCK_UI_min - u1GatingAheadDQS_UI + (u1MCK2CK_UI*u1ExtraMCKfor1_4mode);
4490 
4491 	u1ReadDQSINCTL = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RK_DQSCTL), MISC_SHU_RK_DQSCTL_DQSINCTL);
4492 	u1DQSINCTL_UI = u1ReadDQSINCTL * u1MCK2CK_UI;
4493 
4494 	if(u1AutoK)
4495 		u1RX_Path_delay_UI += 0; //HW K start position =  gating min position(1500ns)
4496 	else
4497 		u1RX_Path_delay_UI -= 3; //SW K start position = gating min position(1500ns) -3UI
4498 
4499 	if(u1RX_Path_delay_UI >= u1DQSINCTL_UI)
4500 		u1StartUI = u1RX_Path_delay_UI - u1DQSINCTL_UI;
4501 	else
4502 	{
4503 		u1StartUI =0;
4504 		err("GatingStartPos err! Need to fine-tune default DQSINCTL value.\n(RX_Path_delay_UI %d) < DQSINCTL_UI %d)\n", u1RX_Path_delay_UI, u1DQSINCTL_UI);
4505 		#if __ETT__
4506 		while(1);
4507 		#endif
4508 	}
4509 
4510 	msg("[GatingStartPos] MR0_LatencyMode %d, u1RealRL %d , u4TDQSCK_UI_min %d, 1:4ExtraMCK %d\n", u1MR0_LatencyMode, u1RealRL, u4TDQSCK_UI_min, u1ExtraMCKfor1_4mode);
4511 	reg_msg("[GatingStartPos] MR0_LatencyMode %d, u1RealRL %d , u4TDQSCK_UI_min %d, 1:4ExtraMCK %d\n", u1MR0_LatencyMode, u1RealRL, u4TDQSCK_UI_min, u1ExtraMCKfor1_4mode);
4512 
4513 	if(u1AutoK)
4514 	{
4515 		msg("RX_Path_delay_UI(%d) - DQSINCTL_UI(%d) = u1StartUI(%d)\n", u1RX_Path_delay_UI, u1DQSINCTL_UI, u1StartUI);
4516 		reg_msg("RX_Path_delay_UI(%d) - DQSINCTL_UI(%d) = u1StartUI(%d)\n", u1RX_Path_delay_UI, u1DQSINCTL_UI, u1StartUI);
4517 	}
4518 	else
4519 	{
4520 		msg("RX_Path_delay_UI(%d) -3 - DQSINCTL_UI(%d) = u1StartUI(%d)\n", u1RX_Path_delay_UI, u1DQSINCTL_UI, u1StartUI);
4521 		reg_msg("RX_Path_delay_UI(%d) -3 - DQSINCTL_UI(%d) = u1StartUI(%d)\n", u1RX_Path_delay_UI, u1DQSINCTL_UI, u1StartUI);
4522 	}
4523 
4524 	return u1StartUI;
4525 }
4526 
4527 #if GATING_RODT_LATANCY_EN
get_rodt_mck2ui(DRAMC_CTX_T * p)4528 U8 get_rodt_mck2ui(DRAMC_CTX_T *p)
4529 {
4530 	if (vGet_Div_Mode(p) == DIV16_MODE)
4531 		return 8;
4532 	else if (vGet_Div_Mode(p) == DIV8_MODE)
4533 		return 4;
4534 	else
4535 		return 2;
4536 }
4537 #endif
4538 
rxdqs_gating_bypass(DRAMC_CTX_T * p)4539 static u8 rxdqs_gating_bypass(DRAMC_CTX_T *p)
4540 {
4541 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_GatingCal
4542 		if (p->femmc_Ready == 1) {
4543 			msg("[FAST_K] Bypass Gating Calibration\n");
4544 			return 1;
4545 		}
4546 #endif
4547 
4548 	return 0;
4549 }
4550 
rxdqs_gating_fastk_save_restore(DRAMC_CTX_T * p,struct rxdqs_gating_best_win * best_win,struct rxdqs_gating_cal * gating_cal)4551 static void rxdqs_gating_fastk_save_restore(DRAMC_CTX_T *p,
4552 	struct rxdqs_gating_best_win *best_win,
4553 	struct rxdqs_gating_cal *gating_cal)
4554 {
4555 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION
4556 		u8 ui_per_mck = gating_cal->dqsien_ui_per_mck;
4557 		u8 freq_div = gating_cal->dqsien_freq_div;
4558 		u8 ch = p->channel;
4559 		u8 rk = p->rank;
4560 		u8 dqs_i;
4561 
4562 		if (p->femmc_Ready == 1) {
4563 			for (dqs_i = 0; dqs_i < p->data_width/DQS_BIT_NUMBER; dqs_i++) {
4564 				best_win->best_dqsien_dly_mck[dqs_i] =
4565 					p->pSavetimeData->u1Gating_MCK_Save[ch][rk][dqs_i];
4566 				best_win->best_dqsien_dly_ui[dqs_i] =
4567 					p->pSavetimeData->u1Gating_UI_Save[ch][rk][dqs_i];
4568 				best_win->best_dqsien_dly_pi[dqs_i] =
4569 					p->pSavetimeData->u1Gating_PI_Save[ch][rk][dqs_i];
4570 
4571 				/* Calculate P1 */
4572 				best_win->best_dqsien_dly_ui_p1[dqs_i] =
4573 					best_win->best_dqsien_dly_mck[dqs_i] * ui_per_mck +
4574 					best_win->best_dqsien_dly_ui[dqs_i] + freq_div; /* Total UI for Phase1 */
4575 				best_win->best_dqsien_dly_mck_p1[dqs_i] =
4576 					best_win->best_dqsien_dly_ui_p1[dqs_i] / ui_per_mck;
4577 				best_win->best_dqsien_dly_ui_p1[dqs_i] =
4578 					best_win->best_dqsien_dly_ui_p1[dqs_i] % ui_per_mck;
4579 				best_win->best_dqsien_dly_pi_p1[dqs_i] = best_win->best_dqsien_dly_pi[dqs_i];
4580 
4581 				vSetCalibrationResult(p, DRAM_CALIBRATION_GATING, DRAM_FAST_K);
4582 
4583 				msg("[FAST_K] CH%d RK%d best DQS%d dly(MCK, UI, PI) = (%d, %d, %d)\n",
4584 					ch, rk, dqs_i, best_win->best_dqsien_dly_mck[dqs_i],
4585 					best_win->best_dqsien_dly_ui[dqs_i],
4586 					best_win->best_dqsien_dly_pi[dqs_i]);
4587 				msg("[FAST_K] CH%d RK%d best DQS%d P1 dly(MCK, UI, PI) = (%d, %d, %d)\n",
4588 					ch, rk, dqs_i, best_win->best_dqsien_dly_mck_p1[dqs_i],
4589 					best_win->best_dqsien_dly_ui_p1[dqs_i],
4590 					best_win->best_dqsien_dly_pi_p1[dqs_i]);
4591 
4592 			}
4593 		}
4594 #endif
4595 }
4596 
rxdqs_gating_misc_process(DRAMC_CTX_T * p,struct rxdqs_gating_best_win * rxdqs_best_win)4597 static void rxdqs_gating_misc_process(DRAMC_CTX_T *p,
4598 	struct rxdqs_gating_best_win *rxdqs_best_win)
4599 {
4600 #if GATING_ADJUST_TXDLY_FOR_TRACKING
4601 	U8 u1TX_dly_DQSgated = 0;
4602 #endif
4603 	U8 dqs_i;
4604 
4605 	/* Set result of useless bytes (if any) as 0. */
4606 	for (dqs_i = (p->data_width/DQS_BIT_NUMBER); dqs_i < DQS_NUMBER; dqs_i++) {
4607 		rxdqs_best_win->best_dqsien_dly_mck[dqs_i] =
4608 			rxdqs_best_win->best_dqsien_dly_ui[dqs_i] =
4609 			rxdqs_best_win->best_dqsien_dly_pi[dqs_i]= 0;
4610 		rxdqs_best_win->best_dqsien_dly_mck_p1[dqs_i] =
4611 			rxdqs_best_win->best_dqsien_dly_ui_p1[dqs_i] =
4612 			rxdqs_best_win->best_dqsien_dly_pi_p1[dqs_i]= 0;
4613 
4614 #if GATING_ADJUST_TXDLY_FOR_TRACKING
4615 		ucbest_coarse_mck_backup[p->rank][dqs_i] =
4616 			ucbest_coarse_ui_backup[p->rank][dqs_i] = 0;
4617 		ucbest_coarse_mck_P1_backup[p->rank][dqs_i] =
4618 			ucbest_coarse_ui_P1_backup[p->rank][dqs_i] = 0;
4619 #endif
4620 	}
4621 
4622 	for (dqs_i=0; dqs_i<(p->data_width/DQS_BIT_NUMBER); dqs_i++) {
4623 #ifdef FOR_HQA_REPORT_USED
4624 		HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT6, "DQSINCTL ", "", 0,
4625 			u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RK_DQSCTL), MISC_SHU_RK_DQSCTL_DQSINCTL), NULL);
4626 		HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT0,
4627 			"Gating_Center_", "2T", dqs_i, rxdqs_best_win->best_dqsien_dly_mck[dqs_i], NULL);
4628 		HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT0,
4629 			"Gating_Center_", "05T", dqs_i, rxdqs_best_win->best_dqsien_dly_ui[dqs_i], NULL);
4630 		HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT0,
4631 			"Gating_Center_", "PI", dqs_i, rxdqs_best_win->best_dqsien_dly_pi[dqs_i], NULL);
4632 #endif
4633 
4634 		/*TINFO="best DQS%d delay(2T, 0.5T, PI) = (%d, %d, %d)\n", dqs_i, rxdqs_best_win.best_dqsien_dly_mck[dqs_i], rxdqs_best_win.best_dqsien_dly_ui[dqs_i], rxdqs_best_win.best_dqsien_dly_pi[dqs_i])); */
4635 		msg("best DQS%d dly(MCK, UI, PI) = (%d, %d, %d)\n", dqs_i,
4636 			rxdqs_best_win->best_dqsien_dly_mck[dqs_i],
4637 			rxdqs_best_win->best_dqsien_dly_ui[dqs_i],
4638 			rxdqs_best_win->best_dqsien_dly_pi[dqs_i]);
4639 		reg_msg("best DQS%d dly(MCK, UI, PI) = (%d, %d, %d)\n", dqs_i,
4640 			rxdqs_best_win->best_dqsien_dly_mck[dqs_i],
4641 			rxdqs_best_win->best_dqsien_dly_ui[dqs_i],
4642 			rxdqs_best_win->best_dqsien_dly_pi[dqs_i]);
4643 		/* cc mark mcFPRINTF(fp_A60501,"best DQS%d dly(MCK, UI, PI) = (%d, %d, %d)\n", dqs_i,
4644 			rxdqs_best_win.best_dqsien_dly_mck[dqs_i],
4645 			rxdqs_best_win.best_dqsien_dly_ui[dqs_i],
4646 			rxdqs_best_win.best_dqsien_dly_pi[dqs_i]);
4647 		*/
4648 
4649 #if GATING_ADJUST_TXDLY_FOR_TRACKING
4650 	u1TX_dly_DQSgated = (rxdqs_best_win->best_dqsien_dly_mck[dqs_i] << 4) +
4651 		rxdqs_best_win->best_dqsien_dly_ui[dqs_i];
4652 
4653 	if (vGet_Div_Mode(p) == DIV16_MODE)
4654 		u1TX_dly_DQSgated >>= 4;
4655 	else if (vGet_Div_Mode(p) == DIV8_MODE)
4656 		u1TX_dly_DQSgated >>= 3;
4657 	else
4658 		u1TX_dly_DQSgated >>= 2;
4659 
4660 	if (u1TX_dly_DQSgated < u1TXDLY_Cal_min)
4661 		u1TXDLY_Cal_min = u1TX_dly_DQSgated;
4662 
4663 	ucbest_coarse_ui_backup[p->rank][dqs_i] = rxdqs_best_win->best_dqsien_dly_ui[dqs_i];
4664 	ucbest_coarse_mck_backup[p->rank][dqs_i] = rxdqs_best_win->best_dqsien_dly_mck[dqs_i];
4665 #endif
4666 	}
4667 
4668 	msg("\n");
4669 	//cc mark mcFPRINTF(fp_A60501,"\n");
4670 
4671 	for (dqs_i=0; dqs_i<(p->data_width/DQS_BIT_NUMBER); dqs_i++) {
4672 		/*TINFO="best DQS%d P1 delay(2T, 0.5T, PI) = (%d, %d, %d)\n", dqs_i, rxdqs_best_win.best_dqsien_dly_mck_p1[dqs_i], rxdqs_best_win.best_dqsien_dly_ui_p1[dqs_i], rxdqs_best_win.best_dqsien_dly_pi_p1[dqs_i]*/
4673 		msg("best DQS%d P1 dly(MCK, UI, PI) = (%d, %d, %d)\n", dqs_i,
4674 			rxdqs_best_win->best_dqsien_dly_mck_p1[dqs_i],
4675 			rxdqs_best_win->best_dqsien_dly_ui_p1[dqs_i],
4676 			rxdqs_best_win->best_dqsien_dly_pi_p1[dqs_i]);
4677 		reg_msg("best DQS%d P1 dly(MCK, UI, PI) = (%d, %d, %d)\n", dqs_i,
4678 			rxdqs_best_win->best_dqsien_dly_mck_p1[dqs_i],
4679 			rxdqs_best_win->best_dqsien_dly_ui_p1[dqs_i],
4680 			rxdqs_best_win->best_dqsien_dly_pi_p1[dqs_i]);
4681 		/* cc mark mcFPRINTF(fp_A60501,"best DQS%d P1 dly(2T, 0.5T, PI) = (%d, %d, %d)\n", dqs_i,
4682 			rxdqs_best_win.best_dqsien_dly_mck_p1[dqs_i],
4683 			rxdqs_best_win.best_dqsien_dly_ui_p1[dqs_i],
4684 			rxdqs_best_win.best_dqsien_dly_pi_p1[dqs_i]);
4685 		*/
4686 
4687 #if GATING_ADJUST_TXDLY_FOR_TRACKING
4688 		// find max gating TXDLY (should be in P1)
4689 		u1TX_dly_DQSgated = (rxdqs_best_win->best_dqsien_dly_mck_p1[dqs_i] << 4) +
4690 			rxdqs_best_win->best_dqsien_dly_ui_p1[dqs_i];
4691 
4692 		if (vGet_Div_Mode(p) == DIV16_MODE)
4693 			u1TX_dly_DQSgated >>= 4;
4694 		else if (vGet_Div_Mode(p) == DIV8_MODE)
4695 			u1TX_dly_DQSgated >>= 3;
4696 		else
4697 			u1TX_dly_DQSgated >>= 2;
4698 
4699 		if(u1TX_dly_DQSgated > u1TXDLY_Cal_max)
4700 			u1TXDLY_Cal_max = u1TX_dly_DQSgated;
4701 
4702 		ucbest_coarse_ui_P1_backup[p->rank][dqs_i] = rxdqs_best_win->best_dqsien_dly_ui_p1[dqs_i];
4703 		ucbest_coarse_mck_P1_backup[p->rank][dqs_i] = rxdqs_best_win->best_dqsien_dly_mck_p1[dqs_i];
4704 #endif
4705 	}
4706 
4707 #if RDSEL_TRACKING_EN
4708 	//Byte 0
4709 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_INI_UIPI),
4710 		(ucbest_coarse_mck_backup[p->rank][0] << 4) | (ucbest_coarse_ui_backup[p->rank][0]),
4711 		SHU_R0_B0_INI_UIPI_CURR_INI_UI_B0);//UI
4712 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_INI_UIPI), rxdqs_best_win->best_dqsien_dly_pi[0],
4713 		SHU_R0_B0_INI_UIPI_CURR_INI_PI_B0); //PI
4714 	//Byte 1
4715 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_INI_UIPI),
4716 		(ucbest_coarse_mck_backup[p->rank][1] << 4) | (ucbest_coarse_ui_backup[p->rank][1]),
4717 		SHU_R0_B1_INI_UIPI_CURR_INI_UI_B1);//UI
4718 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_INI_UIPI),
4719 		rxdqs_best_win->best_dqsien_dly_pi[1], SHU_R0_B1_INI_UIPI_CURR_INI_PI_B1); //PI
4720 #endif
4721 
4722 }
4723 
rxdqs_gating_auto_cal_reset(DRAMC_CTX_T * p)4724 static void rxdqs_gating_auto_cal_reset(DRAMC_CTX_T *p)
4725 {
4726 	/* Reset internal autok status and logic */
4727 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQSIEN_AUTOK_CFG0),
4728 		P_Fld(0x1, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_RK0_SW_RST) |
4729 		P_Fld(0x1, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_RK1_SW_RST) |
4730 		P_Fld(0x1, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_SW_RST));
4731 
4732 	mcDELAY_US(1);
4733 
4734 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQSIEN_AUTOK_CFG0),
4735 		P_Fld(0x0, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_RK0_SW_RST) |
4736 		P_Fld(0x0, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_RK1_SW_RST) |
4737 		P_Fld(0x0, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_SW_RST));
4738 }
rxdqs_gating_auto_cal_cfg(DRAMC_CTX_T * p,struct rxdqs_gating_auto_param * auto_param)4739 static void rxdqs_gating_auto_cal_cfg(DRAMC_CTX_T *p,
4740 	struct rxdqs_gating_auto_param *auto_param)
4741 {
4742 	/* Before start calibration, reset all state machine and all rank's state */
4743 	rxdqs_gating_auto_cal_reset(p);
4744 
4745 
4746 	/*-----------
4747 	 * Normal Setting, Same as SW calibration
4748 	 *---------------*/
4749 	if (p->frequency == 800) {
4750 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL1),
4751 			0x1, MISC_STBCAL1_STBCNT_SW_RST);
4752 	}
4753 
4754 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL1),
4755 		0x1, MISC_STBCAL1_STBCNT_SHU_RST_EN);
4756 
4757 	/* SELPH_MODE = BY RANK */
4758 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2),
4759 		0x1, MISC_STBCAL2_DQSIEN_SELPH_BY_RANK_EN);
4760 
4761 	if (p->dram_type == TYPE_LPDDR5) {
4762 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2),
4763 			0x1, MISC_STBCAL2_STB_PICG_EARLY_1T_EN);
4764 	}
4765 
4766 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL1),
4767 		0x1, MISC_STBCAL1_DIS_PI_TRACK_AS_NOT_RD);
4768 
4769 	/* PICG_EARLY_EN */
4770 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6),
4771 		0x1, B0_DQ6_RG_RX_ARDQ_OP_BIAS_SW_EN_B0);
4772 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6),
4773 		0x1, B0_DQ6_RG_RX_ARDQ_OP_BIAS_SW_EN_B0);
4774 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2),
4775 		0x1, MISC_STBCAL2_STB_PICG_EARLY_1T_EN);
4776 
4777 	/* BURST_MODE */
4778 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_STBCAL),
4779 		0x1, MISC_SHU_STBCAL_DQSIEN_BURST_MODE);
4780 
4781 #if (__LP5_COMBO__)
4782 	if (is_lp5_family(p)) {
4783 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ9),
4784 			0x1, B0_DQ9_RG_RX_ARDQS0_DQSIENMODE_B0);
4785 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ9),
4786 			0x1, B1_DQ9_RG_RX_ARDQS0_DQSIENMODE_B1);
4787 	} else
4788 #endif
4789 	{
4790 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ9),
4791 			0x1, B0_DQ9_RG_RX_ARDQS0_DQSIENMODE_B0);
4792 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ9),
4793 			0x1, B1_DQ9_RG_RX_ARDQS0_DQSIENMODE_B1);
4794 	}
4795 
4796 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6),
4797 		0x2, B0_DQ6_RG_RX_ARDQ_BIAS_VREF_SEL_B0);
4798 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6),
4799 		0x2, B1_DQ6_RG_RX_ARDQ_BIAS_VREF_SEL_B1);
4800 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL),
4801 		0x1, MISC_STBCAL_DQSIENMODE);
4802 
4803 	/* New Rank Mode */
4804 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2),
4805 		P_Fld(0x1, MISC_STBCAL2_STB_IG_XRANK_CG_RST) |
4806 		P_Fld(0x1, MISC_STBCAL2_STB_RST_BY_RANK) |
4807 		P_Fld(0x1, MISC_STBCAL2_DQSIEN_SELPH_BY_RANK_EN));
4808 
4809 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2),
4810 		0x1, B0_PHY2_RG_RX_ARDQS_DQSIEN_UI_LEAD_LAG_EN_B0);
4811 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2),
4812 		0x1, B1_PHY2_RG_RX_ARDQS_DQSIEN_UI_LEAD_LAG_EN_B1);
4813 
4814 	/* dummy read */
4815 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DUMMY_RD),
4816 		0x1, DUMMY_RD_DUMMY_RD_PA_OPT);
4817 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CG_CTRL0),
4818 		0x1, MISC_CG_CTRL0_RG_CG_PHY_OFF_DIABLE);
4819 
4820 	//Yulia add workaround for auto K pattern length. : Apply for all project before IPM_V2
4821 	//Dummy read BL should be controlled by DQSIEN_AUTOK_BURST_LENGTH, but now we can only use dummy read length(DMY_RD_LEN)
4822 	//DMY_RD_LEN (0 for BL8, 1 for BL16, 3 for BL32)
4823 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RK_DUMMY_RD_ADR), 3/*auto_param->burst_len*/, RK_DUMMY_RD_ADR_DMY_RD_LEN);
4824 
4825 	/* Decide by HW  Although Dummy read used, but TA2 has higher priority */
4826 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4),
4827 		0x4, TEST2_A4_TESTAGENTRKSEL);
4828 
4829 	//vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2), 1,
4830 	//	MISC_STBCAL2_STBENCMPEN);
4831 
4832 	/*-----------
4833 	 * Auto calibration setting
4834 	 *-------------------*/
4835 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQSIEN_AUTOK_CFG0),
4836 		P_Fld(auto_param->init_mck, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_INI_MCK) |
4837 		P_Fld(auto_param->init_ui, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_INI__UI) |
4838 		P_Fld(auto_param->end_mck, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_END_MCK) |
4839 		P_Fld(auto_param->end_ui, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_END__UI) |
4840 		P_Fld(auto_param->pi_offset, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_PI_OFFSET) |
4841 		P_Fld(p->rank, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_CUR_RANK) |
4842 		P_Fld(auto_param->burst_len, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_BURST_LENGTH) |
4843 		P_Fld(0x1, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_B0_EN) |
4844 		P_Fld(0x1, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_B1_EN));
4845 
4846 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQSIEN_AUTOK_CFG0),
4847 		auto_param->dbg_mode, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_DEBUG_MODE_EN);
4848 
4849 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQSIEN_AUTOK_CFG0),
4850 		auto_param->early_break, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_EARLY_BREAK_EN);
4851 
4852 	/*---------
4853 	 * DV settings
4854 	 *-------------------*/
4855 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL),
4856 		0x0, MISC_STBCAL_PICGEN);
4857 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_STBCAL),
4858 		P_Fld(0x0, MISC_SHU_STBCAL_STBCALEN) |
4859 		P_Fld(0x0, MISC_SHU_STBCAL_STB_SELPHCALEN));
4860 
4861 	msg("[Gating] AUTO K with param:\n");
4862 	msg("\tinit_mck: %d, init_ui: %d, end_mck: %d, end_ui: %d\n",
4863 		auto_param->init_mck, auto_param->init_ui,
4864 		auto_param->end_mck, auto_param->end_ui);
4865 	msg("\tpi_offset: %d, early_break: %s\n", auto_param->pi_offset,
4866 		(auto_param->early_break)? "ENABLE" : "DISABLE");
4867 }
4868 
rxdqs_gating_auto_cal_trigger(DRAMC_CTX_T * p)4869 static void rxdqs_gating_auto_cal_trigger(DRAMC_CTX_T *p)
4870 {
4871 	msg("[Gating] AUTO K start...\n");
4872 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQSIEN_AUTOK_CFG0),
4873 		0x1, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_GO);
4874 }
4875 
rxdqs_gating_auto_cal_stop(DRAMC_CTX_T * p)4876 static void rxdqs_gating_auto_cal_stop(DRAMC_CTX_T *p)
4877 {
4878 	msg("[Gating] AUTO K stop...\n");
4879 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQSIEN_AUTOK_CFG0),
4880 		0x0, MISC_DQSIEN_AUTOK_CFG0_DQSIEN_AUTOK_GO);
4881 
4882 	rxdqs_gating_auto_cal_reset(p);
4883 }
4884 
4885 
rxdqs_gating_set_final_result(DRAMC_CTX_T * p,U8 mck2ui,struct rxdqs_gating_best_win * best_win)4886 static void rxdqs_gating_set_final_result(DRAMC_CTX_T *p, U8 mck2ui,
4887 	struct rxdqs_gating_best_win *best_win)
4888 {
4889 #if GATING_RODT_LATANCY_EN
4890 	U8 reg_mck, reg_ui;
4891 	U8 value;
4892 	U8 reg_mck_rodt[DQS_NUMBER], reg_ui_rodt[DQS_NUMBER];
4893 	U8 reg_mck_rodt_p1[DQS_NUMBER], reg_ui_rodt_p1[DQS_NUMBER];
4894 	U8 dqs_i;
4895 #endif
4896 
4897 #if GATING_RODT_LATANCY_EN
4898 	for (dqs_i = 0; dqs_i < (p->data_width / DQS_BIT_NUMBER); dqs_i++) {
4899 		reg_mck = best_win->best_dqsien_dly_mck[dqs_i];
4900 		reg_ui = best_win->best_dqsien_dly_ui[dqs_i];
4901 
4902 		value = (reg_mck * mck2ui) + reg_ui;
4903 
4904 		if (value >= 11) {
4905 			U8 rodt_mck2ui = get_rodt_mck2ui(p);
4906 
4907 			value -= 11;
4908 			reg_mck_rodt[dqs_i] = value / rodt_mck2ui;
4909 			reg_ui_rodt[dqs_i] = value % rodt_mck2ui;
4910 
4911 			reg_mck_rodt_p1[dqs_i] = reg_mck_rodt[dqs_i];
4912 			reg_ui_rodt_p1[dqs_i] = reg_ui_rodt[dqs_i];
4913 		} else {
4914 
4915 			reg_mck_rodt[dqs_i] = 0;
4916 			reg_ui_rodt[dqs_i] = 0;
4917 			reg_mck_rodt_p1[dqs_i] = 4;
4918 			reg_ui_rodt_p1[dqs_i] = 4;
4919 			msg("[Warning] RODT cannot be -11UI for B%d\n",
4920 				dqs_i);
4921 		}
4922 
4923 		msg("DQS%d Final RODTEN: (%2d, %2d)\n",
4924 			dqs_i, reg_mck_rodt[dqs_i], reg_ui_rodt[dqs_i]);
4925 		msg("DQS%d Final RODTEN_P1: (%2d, %2d)\n",
4926 			dqs_i, reg_mck_rodt_p1[dqs_i], reg_ui_rodt_p1[dqs_i]);
4927 	}
4928 #endif
4929 
4930 	/* Set DQSIEN delay in MCK and UI */
4931 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_MCK_UI_DLY),
4932 		P_Fld(best_win->best_dqsien_dly_mck[0],
4933 		SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P0_B0) |
4934 		P_Fld(best_win->best_dqsien_dly_ui[0],
4935 		SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P0_B0) |
4936 		P_Fld(best_win->best_dqsien_dly_mck_p1[0],
4937 		SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P1_B0) |
4938 		P_Fld(best_win->best_dqsien_dly_ui_p1[0],
4939 		SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P1_B0));
4940 
4941 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B1_DQSIEN_MCK_UI_DLY),
4942 		P_Fld(best_win->best_dqsien_dly_mck[1],
4943 		SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P0_B1) |
4944 		P_Fld(best_win->best_dqsien_dly_ui[1],
4945 		SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P0_B1) |
4946 		P_Fld(best_win->best_dqsien_dly_mck_p1[1],
4947 		SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P1_B1) |
4948 		P_Fld(best_win->best_dqsien_dly_ui_p1[1],
4949 		SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P1_B1));
4950 
4951 #if GATING_RODT_LATANCY_EN
4952 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_RODTEN_MCK_UI_DLY),
4953 		P_Fld(reg_mck_rodt[0],
4954 		SHU_RK_B0_RODTEN_MCK_UI_DLY_RODTEN_MCK_P0_B0) |
4955 		P_Fld(reg_ui_rodt[0],
4956 		SHU_RK_B0_RODTEN_MCK_UI_DLY_RODTEN_UI_P0_B0) |
4957 		P_Fld(reg_mck_rodt_p1[0],
4958 		SHU_RK_B0_RODTEN_MCK_UI_DLY_RODTEN_MCK_P1_B0) |
4959 		P_Fld(reg_ui_rodt_p1[0],
4960 		SHU_RK_B0_RODTEN_MCK_UI_DLY_RODTEN_UI_P1_B0));
4961 
4962 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B1_RODTEN_MCK_UI_DLY),
4963 		P_Fld(reg_mck_rodt[1],
4964 		SHU_RK_B1_RODTEN_MCK_UI_DLY_RODTEN_MCK_P0_B1) |
4965 		P_Fld(reg_ui_rodt[1],
4966 		SHU_RK_B1_RODTEN_MCK_UI_DLY_RODTEN_UI_P0_B1) |
4967 		P_Fld(reg_mck_rodt_p1[1],
4968 		SHU_RK_B1_RODTEN_MCK_UI_DLY_RODTEN_MCK_P1_B1) |
4969 		P_Fld(reg_ui_rodt_p1[1],
4970 		SHU_RK_B1_RODTEN_MCK_UI_DLY_RODTEN_UI_P1_B1));
4971 #endif
4972 
4973 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_PI_DLY),
4974 		best_win->best_dqsien_dly_pi[0],
4975 		SHU_RK_B0_DQSIEN_PI_DLY_DQSIEN_PI_B0);
4976 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B1_DQSIEN_PI_DLY),
4977 		best_win->best_dqsien_dly_pi[1],
4978 		SHU_RK_B1_DQSIEN_PI_DLY_DQSIEN_PI_B1);
4979 
4980 }
4981 
4982 
4983 /* By autoK: Set the result calibrated by HW to RG */
rxdqs_gating_auto_xlate(DRAMC_CTX_T * p,struct rxdqs_gating_best_win * best_win,struct rxdqs_gating_cal * rxdqs_cal)4984 static void rxdqs_gating_auto_xlate(DRAMC_CTX_T *p,
4985 	struct rxdqs_gating_best_win *best_win,
4986 	struct rxdqs_gating_cal *rxdqs_cal)
4987 {
4988 	u8 mck, ui, pi;
4989 	U8 mck_p1, ui_p1;
4990 	u8 mck2ui, freq_div;
4991 	U8 total_ui;
4992 #if GATING_RODT_LATANCY_EN
4993 	U8 mck_rodt, ui_rodt;
4994 	U8 mck_rodt_p1, ui_rodt_p1;
4995 #endif
4996 	U16 value;
4997 	u8 dqs_i;
4998 
4999 	/* Transfer HW unit to RG unit */
5000 	for (dqs_i = 0; dqs_i < p->data_width/DQS_BIT_NUMBER; dqs_i++) {
5001 		mck = best_win->best_dqsien_dly_mck[dqs_i];
5002 		ui = best_win->best_dqsien_dly_ui[dqs_i];
5003 		pi = best_win->best_dqsien_dly_pi[dqs_i];
5004 		mck2ui = rxdqs_cal->dqsien_ui_per_mck;
5005 		freq_div = rxdqs_cal->dqsien_freq_div;
5006 
5007 		if (vGet_Div_Mode(p) == DIV16_MODE)
5008 			total_ui = (mck << 4) + ui; /* 1:16 mode */
5009 		else if (vGet_Div_Mode(p) == DIV8_MODE)
5010 			total_ui = (mck << 3) + ui; /* 1: 8 mode */
5011 		else
5012 			total_ui = (mck << 2) + ui; /* 1: 4 mode */
5013 
5014 		/* RG is always 1:16 mode */
5015 		mck = (total_ui >> 4);
5016 		ui = (total_ui & 0xf);
5017 
5018 		value = mck * mck2ui + ui; /* Total UI number */
5019 		mck_p1 = (value + freq_div) / mck2ui;
5020 		ui_p1 = (value + freq_div) % mck2ui;
5021 
5022 		msg("[Gating][RG] DQS%d Final result: (%d, %d, %d)\n", dqs_i, mck, ui, pi);
5023 		msg("[Gating][RG] DQS%d Final result P1: (%d, %d)\n", dqs_i, mck_p1, ui_p1);
5024 
5025 		best_win->best_dqsien_dly_mck[dqs_i] = mck;
5026 		best_win->best_dqsien_dly_ui[dqs_i] = ui;
5027 		best_win->best_dqsien_dly_pi[dqs_i] = pi;
5028 
5029 		best_win->best_dqsien_dly_mck_p1[dqs_i] = mck_p1;
5030 		best_win->best_dqsien_dly_ui_p1[dqs_i] = ui_p1;
5031 		best_win->best_dqsien_dly_pi_p1[dqs_i] = pi;
5032 	}
5033 }
5034 
5035 #define RXDQS_GATING_AUTO_CAL_STATUS_BYTE_OFFSET		0x40
5036 
rxdqs_gating_auto_cal_status(DRAMC_CTX_T * p,struct rxdqs_gating_auto_param * auto_param,struct rxdqs_gating_best_win * best_win)5037 static DRAM_STATUS_T rxdqs_gating_auto_cal_status(DRAMC_CTX_T *p,
5038 	struct rxdqs_gating_auto_param *auto_param,
5039 	struct rxdqs_gating_best_win *best_win)
5040 {
5041 	U8 mck_center[DQS_NUMBER], ui_center[DQS_NUMBER], pi_center[DQS_NUMBER];
5042 	U8 mck_left[DQS_NUMBER], ui_left[DQS_NUMBER], pi_left[DQS_NUMBER];
5043 	U8 mck_right[DQS_NUMBER], ui_right[DQS_NUMBER], pi_right[DQS_NUMBER];
5044 	U8 done[DQS_NUMBER] = { 0 }, error[DQS_NUMBER] = { 0 };
5045 	DRAM_STATUS_T ret;
5046 	U8 done_bytes, total_bytes;
5047 	U8 byte_ofst;
5048 	U8 dqs_i;
5049 
5050 	total_bytes = p->data_width / DQS_BIT_NUMBER;
5051 	done_bytes = 0;
5052 	ret = DRAM_OK;
5053 
5054 	while (done_bytes < total_bytes) {
5055 		for (dqs_i = 0; dqs_i < (p->data_width / DQS_BIT_NUMBER); dqs_i++) {
5056 			/* If already done, skip this byte */
5057 			if (done[dqs_i])
5058 				continue;
5059 
5060 			byte_ofst = dqs_i * RXDQS_GATING_AUTO_CAL_STATUS_BYTE_OFFSET;
5061 
5062 			done[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5063 				DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS0 + byte_ofst),
5064 				DQSIEN_AUTOK_B0_RK0_STATUS0_AUTOK_DONE_B0_RK0);
5065 			error[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5066 				DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS0 + byte_ofst),
5067 				DQSIEN_AUTOK_B0_RK0_STATUS0_AUTOK_ERR_B0_RK0);
5068 
5069 			/* If autok fail, done flag will not be asserted. */
5070 			if (done[dqs_i] || error[dqs_i]) {
5071 				/* Done and Pass */
5072 				if (error[dqs_i] == 0) {
5073 					mck_center[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5074 						DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS0 + byte_ofst),
5075 						DQSIEN_AUTOK_B0_RK0_STATUS0_DQSIEN_AUTOK_C_MCK_B0_RK0);
5076 					ui_center[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5077 						DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS0 + byte_ofst),
5078 						DQSIEN_AUTOK_B0_RK0_STATUS0_DQSIEN_AUTOK_C__UI_B0_RK0);
5079 					pi_center[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5080 						DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS0 + byte_ofst),
5081 						DQSIEN_AUTOK_B0_RK0_STATUS0_DQSIEN_AUTOK_C__PI_B0_RK0);
5082 
5083 					mck_left[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5084 						DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS1 + byte_ofst),
5085 						DQSIEN_AUTOK_B0_RK0_STATUS1_DQSIEN_AUTOK_L_MCK_B0_RK0);
5086 					ui_left[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5087 						DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS1 + byte_ofst),
5088 						DQSIEN_AUTOK_B0_RK0_STATUS1_DQSIEN_AUTOK_L__UI_B0_RK0);
5089 					pi_left[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5090 						DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS1 + byte_ofst),
5091 						DQSIEN_AUTOK_B0_RK0_STATUS1_DQSIEN_AUTOK_L__PI_B0_RK0);
5092 
5093 					/* If early break mode not enabled, right boundary could be found */
5094 					if (auto_param->early_break == DISABLE) {
5095 						mck_right[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5096 							DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS1 + byte_ofst),
5097 							DQSIEN_AUTOK_B0_RK0_STATUS1_DQSIEN_AUTOK_R_MCK_B0_RK0);
5098 						ui_right[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5099 							DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS1 + byte_ofst),
5100 							DQSIEN_AUTOK_B0_RK0_STATUS1_DQSIEN_AUTOK_R__UI_B0_RK0);
5101 						pi_right[dqs_i] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5102 							DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_STATUS1 + byte_ofst),
5103 							DQSIEN_AUTOK_B0_RK0_STATUS1_DQSIEN_AUTOK_R__PI_B0_RK0);
5104 					}
5105 				}
5106 				else
5107 				{
5108 					/* If error occurred for this byte, it will be treated as a DONE condition */
5109 					done[dqs_i] = 1;
5110 				}
5111 
5112 				if (auto_param->dbg_mode == ENABLE) {
5113 					U32 dbg_reg_addr;
5114 					U32 dbg_reg_idx;
5115 					U32 dbg_reg_val;
5116 
5117 					dbg_reg_addr = DRAMC_REG_ADDR(
5118 						DDRPHY_REG_DQSIEN_AUTOK_B0_RK0_DBG_STATUS0 + byte_ofst);
5119 					for (dbg_reg_idx = 0;
5120 						dbg_reg_idx < RXDQS_GATING_AUTO_DBG_REG_NUM;
5121 						dbg_reg_idx++, dbg_reg_addr += 4) {
5122 						dbg_reg_val = u4IO32Read4B(dbg_reg_addr);
5123 
5124 						err("B%d Gating AUTOK DBG Status-%d: [0x%08x]\n",
5125 							dqs_i, dbg_reg_idx, dbg_reg_val);
5126 					}
5127 				}
5128 				done_bytes++;
5129 			}
5130 		}
5131 
5132 		mcDELAY_MS(1);
5133 	}
5134 
5135 	/* Log it */
5136 	for (dqs_i = 0; dqs_i < (p->data_width / DQS_BIT_NUMBER); dqs_i++) {
5137 		msg("[Gating][%s] AUTOK of CH-%d, Rk-%d, Byte-%d:\n",
5138 			error[dqs_i]? "Fail" : "Pass", p->channel, p->rank, dqs_i);
5139 
5140 		if (done[dqs_i]) {
5141 			if (error[dqs_i] == 0) {
5142 				msg("\tcenter(%2d, %2d, %2d)\n",
5143 							mck_center[dqs_i], ui_center[dqs_i], pi_center[dqs_i]);
5144 				msg("\tleft(%2d, %2d, %2d)\n",
5145 							mck_left[dqs_i], ui_left[dqs_i], pi_left[dqs_i]);
5146 
5147 				if (auto_param->early_break == DISABLE) {
5148 					msg("\tright(%2d, %2d, %2d)\n",
5149 								mck_right[dqs_i], ui_right[dqs_i], pi_right[dqs_i]);
5150 				}
5151 			}
5152 			if (error[dqs_i]) {
5153 				ret = DRAM_FAIL;
5154 			} else {
5155 				/* If passed, shall set the result to RG */
5156 				best_win->best_dqsien_dly_mck[dqs_i] = mck_center[dqs_i];
5157 				best_win->best_dqsien_dly_ui[dqs_i] = ui_center[dqs_i];
5158 				best_win->best_dqsien_dly_pi[dqs_i] = pi_center[dqs_i];
5159 			}
5160 		}
5161 	}
5162 
5163 	rxdqs_gating_auto_cal_stop(p);
5164 
5165 	return ret;
5166 }
5167 
dramc_rx_dqs_gating_auto_cal(DRAMC_CTX_T * p)5168 static DRAM_STATUS_T dramc_rx_dqs_gating_auto_cal(DRAMC_CTX_T *p)
5169 {
5170 	struct rxdqs_gating_auto_param auto_param;
5171 	struct rxdqs_gating_best_win rxdqs_best_win;
5172 	struct rxdqs_gating_cal rxdqs_cal;
5173 	DRAM_STATUS_T ret;
5174 	U8 start_ui, end_ui;
5175 	U8 mck2ui_hw;
5176 
5177 	U32 reg_backup_address[ ] = {
5178 		(DRAMC_REG_ADDR(DRAMC_REG_DUMMY_RD)),
5179 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CG_CTRL0)),
5180 		(DRAMC_REG_ADDR(DRAMC_REG_TEST2_A4)),
5181 		(DRAMC_REG_ADDR(DRAMC_REG_RK_DUMMY_RD_ADR))
5182 	};
5183 
5184 	DramcBackupRegisters(p, reg_backup_address,
5185 		sizeof (reg_backup_address) / sizeof (U32));
5186 
5187 	ret = DRAM_OK;
5188 
5189 	memset((void *)&auto_param, 0, sizeof auto_param);
5190 	memset((void *)&rxdqs_best_win, 0, sizeof rxdqs_best_win);
5191 	memset((void *)&rxdqs_cal, 0, sizeof rxdqs_cal);
5192 
5193 	if (vGet_Div_Mode(p) == DIV4_MODE)
5194 		rxdqs_cal.dqsien_freq_div = 2;
5195 	else
5196 		rxdqs_cal.dqsien_freq_div = 4;
5197 	rxdqs_cal.dqsien_ui_per_mck = DQS_GW_UI_PER_MCK;
5198 
5199 	if (!rxdqs_gating_bypass(p)) {
5200 		/* 60868 has different mck2ui relations for HW and RG */
5201 		if (vGet_Div_Mode(p) == DIV16_MODE)
5202 			mck2ui_hw = 16;
5203 		else if (vGet_Div_Mode(p) == DIV8_MODE)
5204 			mck2ui_hw = 8;
5205 		else
5206 			mck2ui_hw = 4;
5207 
5208 #if ENABLE_GATING_AUTOK_WA
5209 		if (__wa__gating_autok_init_ui[p->rank] > 3)
5210 			start_ui = __wa__gating_autok_init_ui[p->rank] - 3;
5211 		else
5212 #endif
5213 		start_ui = u1GetGatingStartPos(p, AUTOK_ON);
5214 		end_ui = start_ui + 32;
5215 
5216 		/* Set auto calibration params */
5217 		auto_param.early_break = ENABLE;
5218 		auto_param.dbg_mode = ENABLE;
5219 		auto_param.init_mck = start_ui / mck2ui_hw;
5220 		auto_param.init_ui = start_ui % mck2ui_hw;
5221 		auto_param.end_mck = end_ui / mck2ui_hw;
5222 		auto_param.end_ui = end_ui % mck2ui_hw;
5223 		auto_param.pi_offset = 2; /* 2 ^ 2 = 4 */
5224 		auto_param.burst_len = RXDQS_BURST_LEN_8;
5225 
5226 #if FOR_DV_SIMULATION_USED == 1
5227 		cal_sv_rand_args_t *psra = get_psra();
5228 
5229 		if (psra) {
5230 			auto_param.early_break =
5231 					psra->dqsien_autok_early_break_en? ENABLE: DISABLE;
5232 			auto_param.dbg_mode =
5233 					psra->dqsien_autok_dbg_mode_en? ENABLE: DISABLE;
5234 			auto_param.pi_offset =
5235 					psra->dqsien_autok_pi_offset? ENABLE: DISABLE;
5236 		}
5237 #endif /* FOR_DV_SIMULATION_USED == 1 */
5238 
5239 		rxdqs_gating_auto_cal_cfg(p, &auto_param);
5240 
5241 		/* Trigger HW auto k */
5242 		rxdqs_gating_auto_cal_trigger(p);
5243 
5244 		ret = rxdqs_gating_auto_cal_status(p, &auto_param, &rxdqs_best_win);
5245 		if (ret == DRAM_OK)
5246 			vSetCalibrationResult(p, DRAM_CALIBRATION_GATING, DRAM_OK);
5247 
5248 		rxdqs_gating_auto_xlate(p, &rxdqs_best_win, &rxdqs_cal);
5249 	}
5250 
5251 	rxdqs_gating_fastk_save_restore(p, &rxdqs_best_win, &rxdqs_cal);
5252 	rxdqs_gating_set_final_result(p, rxdqs_cal.dqsien_ui_per_mck, &rxdqs_best_win);
5253 
5254 	rxdqs_gating_misc_process(p, &rxdqs_best_win);
5255 	DramcRestoreRegisters(p, reg_backup_address,
5256 		sizeof (reg_backup_address) / sizeof (U32));
5257 
5258 	DramPhyReset(p);
5259 
5260 	return ret;
5261 }
5262 
rxdqs_gating_sw_cal_init(DRAMC_CTX_T * p,U8 use_enhanced_rdqs)5263 static void rxdqs_gating_sw_cal_init(DRAMC_CTX_T *p, U8 use_enhanced_rdqs)
5264 {
5265 
5266 	/* Disable Per-Bank ref */
5267 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_CONF0),  0, SHU_CONF0_PBREFEN);
5268 
5269 	/*----------------
5270 	 * From DV
5271 	 *------------------------*/
5272 	if (p->frequency == 800) {
5273 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL1),
5274 			0x1, MISC_STBCAL1_STBCNT_SW_RST);
5275 	}
5276 
5277 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL1),
5278 		0x1, MISC_STBCAL1_STBCNT_SHU_RST_EN);
5279 
5280 	/* SELPH_MODE = BY RANK */
5281 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2),
5282 		0x1, MISC_STBCAL2_DQSIEN_SELPH_BY_RANK_EN);
5283 
5284 	if (p->dram_type == TYPE_LPDDR5) {
5285 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2),
5286 			0x1, MISC_STBCAL2_STB_PICG_EARLY_1T_EN);
5287 	}
5288 
5289 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL1),
5290 		0x1, MISC_STBCAL1_DIS_PI_TRACK_AS_NOT_RD);
5291 
5292 	/* PICG_EARLY_EN */
5293 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6),
5294 		0x1, B0_DQ6_RG_RX_ARDQ_OP_BIAS_SW_EN_B0);
5295 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6),
5296 		0x1, B0_DQ6_RG_RX_ARDQ_OP_BIAS_SW_EN_B0);
5297 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2),
5298 		0x1, MISC_STBCAL2_STB_PICG_EARLY_1T_EN);
5299 
5300 	/* BURST_MODE */
5301 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_STBCAL),
5302 		0x1, MISC_SHU_STBCAL_DQSIEN_BURST_MODE);
5303 
5304 #if (__LP5_COMBO__)
5305 	if (is_lp5_family(p)) {
5306 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ9),
5307 			0x1, B0_DQ9_RG_RX_ARDQS0_DQSIENMODE_B0);
5308 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ9),
5309 			0x1, B1_DQ9_RG_RX_ARDQS0_DQSIENMODE_B1);
5310 	} else
5311 #endif
5312 	{
5313 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ9),
5314 			0x1, B0_DQ9_RG_RX_ARDQS0_DQSIENMODE_B0);
5315 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ9),
5316 			0x1, B1_DQ9_RG_RX_ARDQS0_DQSIENMODE_B1);
5317 	}
5318 
5319 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6),
5320 		0x2, B0_DQ6_RG_RX_ARDQ_BIAS_VREF_SEL_B0);
5321 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6),
5322 		0x2, B1_DQ6_RG_RX_ARDQ_BIAS_VREF_SEL_B1);
5323 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL),
5324 		0x1, MISC_STBCAL_DQSIENMODE);
5325 
5326 	/* New Rank Mode */
5327 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2),
5328 		P_Fld(0x1, MISC_STBCAL2_STB_IG_XRANK_CG_RST) |
5329 		P_Fld(0x1, MISC_STBCAL2_STB_RST_BY_RANK) |
5330 		P_Fld(0x1, MISC_STBCAL2_DQSIEN_SELPH_BY_RANK_EN));
5331 
5332 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2),
5333 		0x1, B0_PHY2_RG_RX_ARDQS_DQSIEN_UI_LEAD_LAG_EN_B0);
5334 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2),
5335 		0x1, B1_PHY2_RG_RX_ARDQS_DQSIEN_UI_LEAD_LAG_EN_B1);
5336 
5337 	//DramcHWGatingOnOff(p, 0);
5338 
5339 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2), 1,
5340 		MISC_STBCAL2_STBENCMPEN);
5341 
5342 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RX_SET0), 0,
5343 		RX_SET0_DM4TO1MODE);
5344 
5345 	/* enable &reset DQS counter */
5346 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2), 1,
5347 		MISC_STBCAL2_DQSG_CNT_EN);
5348 	mcDELAY_US(4); /* wait 1 auto refresh after DQS Counter enable */
5349 
5350 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2), 1,
5351 		MISC_STBCAL2_DQSG_CNT_RST);
5352 	mcDELAY_US(1); /* delay 2T */
5353 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2), 0,
5354 		MISC_STBCAL2_DQSG_CNT_RST);
5355 
5356 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1),
5357 		u1GetRank(p), MISC_CTRL1_R_DMSTBENCMP_RK_OPT);
5358 	DramcEngine2Init(p, 0x55000000,
5359 		0xaa000000 | GATING_PATTERN_NUM_LP5, TEST_AUDIO_PATTERN, 0, TE_NO_UI_SHIFT);
5360 
5361 	if (use_enhanced_rdqs) {
5362 		/* TBD. Enter Enhanced RDQS training mode */
5363 	}
5364 }
5365 
rxdqs_gating_set_dqsien_dly(DRAMC_CTX_T * p,U8 dly_ui,struct rxdqs_gating_cal * rxdqs_cal)5366 static void rxdqs_gating_set_dqsien_dly(DRAMC_CTX_T *p, U8 dly_ui,
5367 	struct rxdqs_gating_cal *rxdqs_cal)
5368 {
5369 	U32 reg_mck, reg_ui;
5370 	U32 reg_mck_p1, reg_ui_p1;
5371 #if GATING_RODT_LATANCY_EN
5372 	U32 reg_mck_rodt, reg_ui_rodt;
5373 	U32 reg_mck_rodt_p1, reg_ui_rodt_p1;
5374 #endif
5375 	U8 mck2ui = rxdqs_cal->dqsien_ui_per_mck;
5376 
5377 	rxdqs_cal->dqsien_dly_mck = dly_ui / rxdqs_cal->dqsien_ui_per_mck;
5378 	rxdqs_cal->dqsien_dly_ui = dly_ui % rxdqs_cal->dqsien_ui_per_mck;
5379 	rxdqs_cal->dqsien_dly_mck_p1 = (dly_ui + rxdqs_cal->dqsien_freq_div) / mck2ui;
5380 	rxdqs_cal->dqsien_dly_ui_p1 = (dly_ui + rxdqs_cal->dqsien_freq_div) % mck2ui;
5381 
5382 	reg_mck = rxdqs_cal->dqsien_dly_mck;
5383 	reg_ui = rxdqs_cal->dqsien_dly_ui;
5384 	reg_mck_p1 = rxdqs_cal->dqsien_dly_mck_p1;
5385 	reg_ui_p1 = rxdqs_cal->dqsien_dly_ui_p1;
5386 
5387 #if GATING_RODT_LATANCY_EN
5388 	value = (reg_mck * mck2ui) + reg_ui;
5389 
5390 	if (value >= 11) {
5391 		/* For RODT, MCK2UI is different from Gating */
5392 		U8 rodt_mck2ui = get_rodt_mck2ui(p);
5393 
5394 		value -= 11;
5395 		reg_mck_rodt = value / rodt_mck2ui;
5396 		reg_ui_rodt = value % rodt_mck2ui;
5397 
5398 		reg_mck_rodt_p1 = reg_mck_rodt;
5399 		reg_ui_rodt_p1 = reg_ui_rodt;
5400 	} else {
5401 
5402 		reg_mck_rodt = 0;
5403 		reg_ui_rodt = 0;
5404 		reg_mck_rodt_p1 = 4;
5405 		reg_ui_rodt_p1 = 4;
5406 		msg("[Warning] RODT cannot be -11UI\n");
5407 	}
5408 #endif
5409 
5410 	/* Set DQSIEN delay in MCK and UI */
5411 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_MCK_UI_DLY),
5412 		P_Fld(reg_mck,
5413 		SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P0_B0) |
5414 		P_Fld(reg_ui,
5415 		SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P0_B0) |
5416 		P_Fld(reg_mck_p1,
5417 		SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P1_B0) |
5418 		P_Fld(reg_ui_p1,
5419 		SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P1_B0));
5420 
5421 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B1_DQSIEN_MCK_UI_DLY),
5422 		P_Fld(reg_mck,
5423 		SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P0_B1) |
5424 		P_Fld(reg_ui,
5425 		SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P0_B1) |
5426 		P_Fld(reg_mck_p1,
5427 		SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P1_B1) |
5428 		P_Fld(reg_ui_p1,
5429 		SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P1_B1));
5430 
5431 #if GATING_RODT_LATANCY_EN
5432 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_RODTEN_MCK_UI_DLY),
5433 		P_Fld(reg_mck_rodt,
5434 		SHU_RK_B0_RODTEN_MCK_UI_DLY_RODTEN_MCK_P0_B0) |
5435 		P_Fld(reg_ui_rodt,
5436 		SHU_RK_B0_RODTEN_MCK_UI_DLY_RODTEN_UI_P0_B0) |
5437 		P_Fld(reg_mck_rodt_p1,
5438 		SHU_RK_B0_RODTEN_MCK_UI_DLY_RODTEN_MCK_P1_B0) |
5439 		P_Fld(reg_ui_rodt_p1,
5440 		SHU_RK_B0_RODTEN_MCK_UI_DLY_RODTEN_UI_P1_B0));
5441 
5442 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B1_RODTEN_MCK_UI_DLY),
5443 		P_Fld(reg_mck_rodt,
5444 		SHU_RK_B1_RODTEN_MCK_UI_DLY_RODTEN_MCK_P0_B1) |
5445 		P_Fld(reg_ui_rodt,
5446 		SHU_RK_B1_RODTEN_MCK_UI_DLY_RODTEN_UI_P0_B1) |
5447 		P_Fld(reg_mck_rodt_p1,
5448 		SHU_RK_B1_RODTEN_MCK_UI_DLY_RODTEN_MCK_P1_B1) |
5449 		P_Fld(reg_ui_rodt_p1,
5450 		SHU_RK_B1_RODTEN_MCK_UI_DLY_RODTEN_UI_P1_B1));
5451 #endif
5452 }
5453 
rxdqs_gating_sw_cal_trigger(DRAMC_CTX_T * p,struct rxdqs_gating_cal * rxdqs_cal)5454 static void rxdqs_gating_sw_cal_trigger(DRAMC_CTX_T *p,
5455 	struct rxdqs_gating_cal *rxdqs_cal)
5456 {
5457 #if 0//ENABLE_DDR800_OPEN_LOOP_MODE_OPTION -> No 0.5UI after A60868
5458 	if (u1IsPhaseMode(p) == TRUE) {
5459 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0),
5460 			rxdqs_cal->dqsien_dly_pi >> 4, SHU_R0_B0_DQ0_DA_ARPI_DDR400_0D5UI_RK0_B0);
5461 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0),
5462 			rxdqs_cal->dqsien_dly_pi >> 4, SHU_R0_B1_DQ0_DA_ARPI_DDR400_0D5UI_RK0_B1);
5463 	} else
5464 #endif
5465 	{
5466 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_PI_DLY),
5467 			rxdqs_cal->dqsien_dly_pi, SHU_RK_B0_DQSIEN_PI_DLY_DQSIEN_PI_B0);
5468 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B1_DQSIEN_PI_DLY),
5469 			rxdqs_cal->dqsien_dly_pi, SHU_RK_B1_DQSIEN_PI_DLY_DQSIEN_PI_B1);
5470 	}
5471 	DramPhyReset(p);
5472 
5473 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2), 1,
5474 		MISC_STBCAL2_DQSG_CNT_RST);
5475 	mcDELAY_US(1);
5476 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2), 0,
5477 		MISC_STBCAL2_DQSG_CNT_RST);
5478 
5479 	/* enable TE2, audio pattern */
5480 	DramcEngine2Run(p, TE_OP_READ_CHECK, TEST_AUDIO_PATTERN);
5481 }
5482 
rxdqs_gating_get_leadlag(DRAMC_CTX_T * p,struct rxdqs_gating_trans * rxdqs_trans,struct rxdqs_gating_cal * rxdqs_cal)5483 static void rxdqs_gating_get_leadlag(DRAMC_CTX_T *p,
5484 	struct rxdqs_gating_trans *rxdqs_trans,
5485 	struct rxdqs_gating_cal *rxdqs_cal)
5486 {
5487 	U8 dqs_i;
5488 	U8 debounce_thrd_PI = 16;
5489 
5490 	for (dqs_i = 0; dqs_i < (p->data_width / DQS_BIT_NUMBER); dqs_i++) {
5491 		if (dqs_i == 0) {
5492 			rxdqs_trans->dqs_lead[0] = u4IO32ReadFldAlign(
5493 				DRAMC_REG_ADDR(DDRPHY_REG_MISC_PHY_RGS_STBEN_B0),
5494 				MISC_PHY_RGS_STBEN_B0_AD_RX_ARDQS0_STBEN_LEAD_B0);
5495 			rxdqs_trans->dqs_lag[0] = u4IO32ReadFldAlign(
5496 				DRAMC_REG_ADDR(DDRPHY_REG_MISC_PHY_RGS_STBEN_B0),
5497 				MISC_PHY_RGS_STBEN_B0_AD_RX_ARDQS0_STBEN_LAG_B0);
5498 		} else {
5499 			rxdqs_trans->dqs_lead[1] = u4IO32ReadFldAlign(
5500 				DRAMC_REG_ADDR(DDRPHY_REG_MISC_PHY_RGS_STBEN_B1),
5501 				MISC_PHY_RGS_STBEN_B1_AD_RX_ARDQS0_STBEN_LEAD_B1);
5502 			rxdqs_trans->dqs_lag[1] = u4IO32ReadFldAlign(
5503 				DRAMC_REG_ADDR(DDRPHY_REG_MISC_PHY_RGS_STBEN_B1),
5504 				MISC_PHY_RGS_STBEN_B1_AD_RX_ARDQS0_STBEN_LAG_B1);
5505 		}
5506 
5507 		if ((rxdqs_trans->dqs_lead[dqs_i] == 1) &&
5508 			(rxdqs_trans->dqs_lag[dqs_i] == 1)) {
5509 			rxdqs_trans->dqs_high[dqs_i]++;
5510 			rxdqs_trans->dqs_transition[dqs_i] = 1;
5511 
5512 			/* Record the latest value that causes (lead, lag) = (1, 1) */
5513 			rxdqs_trans->dqsien_dly_mck_leadlag[dqs_i] =
5514 				rxdqs_cal->dqsien_dly_mck;
5515 			rxdqs_trans->dqsien_dly_ui_leadlag[dqs_i] =
5516 				rxdqs_cal->dqsien_dly_ui;
5517 			rxdqs_trans->dqsien_dly_pi_leadlag[dqs_i] =
5518 				rxdqs_cal->dqsien_dly_pi;
5519 		} else if ((rxdqs_trans->dqs_high[dqs_i] *
5520 			rxdqs_cal->dqsien_pi_adj_step) >= debounce_thrd_PI) {
5521 			/* Consecutive 16 PI DQS high for de-glitch */
5522 			if (((rxdqs_trans->dqs_lead[dqs_i] == 1) &&
5523 				(rxdqs_trans->dqs_lag[dqs_i] == 0)) ||
5524 				((rxdqs_trans->dqs_lead[dqs_i] == 0) &&
5525 				(rxdqs_trans->dqs_lag[dqs_i] == 1))) {
5526 				rxdqs_trans->dqs_transition[dqs_i]++;
5527 			}
5528 		#if GATING_LEADLAG_LOW_LEVEL_CHECK
5529 			else if ((rxdqs_trans->dqs_lead[dqs_i] == 0) &&
5530 				(rxdqs_trans->dqs_lag[dqs_i] == 0)){
5531 				if ((rxdqs_trans->dqs_low[dqs_i] *
5532 					 rxdqs_cal->dqsien_pi_adj_step) >= debounce_thrd_PI) {
5533 				/* (lead, lag) = (0, 0), transition done */
5534 				rxdqs_trans->dqs_transitioned[dqs_i] = 1;
5535 					}
5536 				rxdqs_trans->dqs_low[dqs_i]++;
5537 			}else {
5538 				  rxdqs_trans->dqs_high[dqs_i] = 0;
5539 				  rxdqs_trans->dqs_low[dqs_i] = 0;
5540 				}
5541 		#else
5542 		else {
5543 			/* (lead, lag) = (0, 0), transition done */
5544 			rxdqs_trans->dqs_transitioned[dqs_i] = 1;
5545 		}
5546 		#endif
5547 		} else {
5548 			/* Lead/lag = (1, 1) number is too few. Reset dqs_high */
5549 			rxdqs_trans->dqs_high[dqs_i] = 0;
5550 		#if GATING_LEADLAG_LOW_LEVEL_CHECK
5551 			rxdqs_trans->dqs_low[dqs_i] = 0;
5552 		#endif
5553 		}
5554 	}
5555 }
5556 
rxdqs_gating_sw_cal(DRAMC_CTX_T * p,struct rxdqs_gating_trans * rxdqs_trans,struct rxdqs_gating_cal * rxdqs_cal,U8 * pass_byte_count,struct rxdqs_gating_best_win * best_win,U8 dly_ui,U8 dly_ui_end)5557 static U8 rxdqs_gating_sw_cal(DRAMC_CTX_T *p,
5558 	struct rxdqs_gating_trans *rxdqs_trans,
5559 	struct rxdqs_gating_cal *rxdqs_cal, U8 *pass_byte_count,
5560 	struct rxdqs_gating_best_win *best_win, U8 dly_ui, U8 dly_ui_end)
5561 {
5562 	U8 gating_error[DQS_NUMBER];
5563 	U32 debug_cnt[DQS_NUMBER];
5564 	U32 debug_pass_cnt;
5565 	U8 dqs_i;
5566 	U8 passed_bytes;
5567 
5568 	memset(debug_cnt, 0, sizeof(debug_cnt));
5569 	passed_bytes = *pass_byte_count;
5570 
5571 	rxdqs_gating_sw_cal_trigger(p, rxdqs_cal);
5572 
5573 	if (p->rank == RANK_0) {
5574 		gating_error[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5575 			DDRPHY_REG_MISC_STBERR_ALL),
5576 			MISC_STBERR_ALL_GATING_ERROR_B0_RK0);
5577 		gating_error[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5578 			DDRPHY_REG_MISC_STBERR_ALL),
5579 			MISC_STBERR_ALL_GATING_ERROR_B1_RK0);
5580 	} else {
5581 		gating_error[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5582 			DDRPHY_REG_MISC_STBERR_ALL),
5583 			MISC_STBERR_ALL_GATING_ERROR_B0_RK1);
5584 		gating_error[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(
5585 			DDRPHY_REG_MISC_STBERR_ALL),
5586 			MISC_STBERR_ALL_GATING_ERROR_B1_RK1);
5587 	}
5588 
5589 	/* read DQS counter
5590 	 * Note: DQS counter is no longer used as pass condition. Here
5591 	 * Read it and log it is just as debug method. Any way, DQS counter
5592 	 * can still be used as a clue: it will be n*0x23 when gating is correct
5593 	 */
5594 	debug_cnt[0] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_CAL_DQSG_CNT_B0));
5595 	debug_cnt[1] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_CAL_DQSG_CNT_B1));
5596 
5597 	/* read (lead, lag) */
5598 	rxdqs_gating_get_leadlag(p, rxdqs_trans, rxdqs_cal);
5599 
5600 	msg("%2d %2d %2d | ",
5601 		rxdqs_cal->dqsien_dly_mck, rxdqs_cal->dqsien_dly_ui,
5602 		rxdqs_cal->dqsien_dly_pi);
5603 	msg("B1->B0 | %x %x | %x %x | (%d %d) (%d %d)\n",
5604 		debug_cnt[1], debug_cnt[0],
5605 		gating_error[1], gating_error[0],
5606 		rxdqs_trans->dqs_lead[1], rxdqs_trans->dqs_lag[1],
5607 		rxdqs_trans->dqs_lead[0], rxdqs_trans->dqs_lag[0]);
5608 
5609 #if (__LP5_COMBO__)
5610 	if((is_lp5_family(p)) && (vGet_Div_Mode(p) == DIV16_MODE))
5611 		debug_pass_cnt = (GATING_GOLDEND_DQSCNT_LP5 >> 1);
5612 	else
5613 #endif
5614 		debug_pass_cnt = GATING_GOLDEND_DQSCNT_LP5;
5615 
5616 	/* Decide the window center */
5617 	for (dqs_i = 0; dqs_i < (p->data_width / DQS_BIT_NUMBER); dqs_i++) {
5618 		if (passed_bytes & (1 << dqs_i))
5619 			continue;
5620 
5621 		if ((gating_error[dqs_i] == 0) && (debug_cnt[dqs_i] == debug_pass_cnt)) {
5622 			/* Calcuate DQSIEN position */
5623 			if (rxdqs_trans->dqs_transitioned[dqs_i] != 0) {
5624 				U8 pass_count = rxdqs_trans->dqs_transition[dqs_i];
5625 				U8 offset = (pass_count * rxdqs_cal->dqsien_pi_adj_step) / 2;
5626 				U8 mck2ui, ui2pi, freq_div;
5627 				U8 tmp;
5628 
5629 				mck2ui = rxdqs_cal->dqsien_ui_per_mck;
5630 				ui2pi = rxdqs_cal->dqsien_pi_per_ui;
5631 				freq_div = rxdqs_cal->dqsien_freq_div;
5632 
5633 				/* PI */
5634 				tmp = rxdqs_trans->dqsien_dly_pi_leadlag[dqs_i] + offset;
5635 				best_win->best_dqsien_dly_pi[dqs_i] = tmp % ui2pi;
5636 				best_win->best_dqsien_dly_pi_p1[dqs_i] =
5637 					best_win->best_dqsien_dly_pi[dqs_i];
5638 
5639 				/* UI & MCK - P0 */
5640 				tmp /= ui2pi;
5641 				tmp = rxdqs_trans->dqsien_dly_ui_leadlag[dqs_i] + tmp;
5642 				best_win->best_dqsien_dly_ui[dqs_i] = tmp % mck2ui;
5643 				best_win->best_dqsien_dly_mck[dqs_i] =
5644 					rxdqs_trans->dqsien_dly_mck_leadlag[dqs_i] + (tmp / mck2ui);
5645 
5646 				/* UI & MCK - P1 */
5647 				best_win->best_dqsien_dly_ui_p1[dqs_i] =
5648 					best_win->best_dqsien_dly_mck[dqs_i] * mck2ui +
5649 					best_win->best_dqsien_dly_ui[dqs_i] + freq_div; /* Total UI for Phase1 */
5650 				msg("Total UI for P1: %d, mck2ui %d\n",
5651 					best_win->best_dqsien_dly_mck_p1[dqs_i], mck2ui);
5652 				best_win->best_dqsien_dly_mck_p1[dqs_i] =
5653 					best_win->best_dqsien_dly_ui_p1[dqs_i] / mck2ui;
5654 				best_win->best_dqsien_dly_ui_p1[dqs_i] =
5655 					best_win->best_dqsien_dly_ui_p1[dqs_i] % mck2ui;
5656 
5657 				msg("best dqsien dly found for B%d: "
5658 					"(%2d, %2d, %2d)\n", dqs_i,
5659 					best_win->best_dqsien_dly_mck[dqs_i],
5660 					best_win->best_dqsien_dly_ui[dqs_i],
5661 					best_win->best_dqsien_dly_pi[dqs_i]);
5662 				passed_bytes |= 1 << dqs_i;
5663 
5664 				if (((p->data_width == DATA_WIDTH_16BIT) &&
5665 					(passed_bytes == 0x3)) ||
5666 					((p->data_width == DATA_WIDTH_32BIT) &&
5667 					(passed_bytes == 0xf))) {
5668 					dly_ui = dly_ui_end;
5669 					break;
5670 				}
5671 			}
5672 		} else {
5673 			/* Clear lead lag info in case lead/lag flag toggled
5674 			 * while gating counter & gating error still incorrect
5675 			 */
5676 			rxdqs_trans->dqs_high[dqs_i] = 0;
5677 			rxdqs_trans->dqs_transition[dqs_i] = 0;
5678 			rxdqs_trans->dqs_transitioned[dqs_i] = 0;
5679 		}
5680 	}
5681 
5682 	*pass_byte_count = passed_bytes;
5683 	return dly_ui;
5684 }
5685 
dramc_rx_dqs_gating_sw_cal(DRAMC_CTX_T * p,U8 use_enhance_rdqs)5686 static DRAM_STATUS_T dramc_rx_dqs_gating_sw_cal(DRAMC_CTX_T *p,
5687 	U8 use_enhance_rdqs)
5688 {
5689 	struct rxdqs_gating_cal rxdqs_cal;
5690 	struct rxdqs_gating_trans rxdqs_trans;
5691 	struct rxdqs_gating_best_win rxdqs_best_win;
5692 	U8 dly_ui, dly_ui_start, dly_ui_end;
5693 	U8 pi_per_ui, ui_per_mck, freq_div;
5694 	U8 pass_byte_count;
5695 	U8 dqs_i;
5696 	U8 u1GatingErrorFlag=0;
5697 
5698 	if (p == NULL) {
5699 		err("[Error] Context NULL\n");
5700 		return DRAM_FAIL;
5701 	}
5702 
5703 	memset(&rxdqs_cal, 0, sizeof(struct rxdqs_gating_cal));
5704 	memset(&rxdqs_trans, 0, sizeof(struct rxdqs_gating_trans));
5705 	memset(&rxdqs_best_win, 0, sizeof(struct rxdqs_gating_best_win));
5706 
5707 	pi_per_ui = DQS_GW_PI_PER_UI; /* 1 UI = ? PI. Sams as CBT, differ according to data rate?? */
5708 	ui_per_mck = DQS_GW_UI_PER_MCK; /* 1 mck = ? UI. Decided by (Tmck/Tck) * (Tck/Twck) */
5709 	if (vGet_Div_Mode(p) == DIV4_MODE)
5710 		freq_div = 2;
5711 	else
5712 		freq_div = 4;
5713 
5714 #if ENABLE_DDR800_OPEN_LOOP_MODE_OPTION
5715 	if (u1IsPhaseMode(p) == TRUE)
5716 		rxdqs_cal.dqsien_pi_adj_step = (0x1 << 4); // Divide by 16 (90 degree)
5717 	else
5718 #endif
5719 	rxdqs_cal.dqsien_pi_adj_step = DQS_GW_FINE_STEP;
5720 #if ENABLE_GATING_AUTOK_WA
5721 	if (__wa__gating_swk_for_autok)
5722 		rxdqs_cal.dqsien_pi_adj_step = pi_per_ui;
5723 #endif
5724 	rxdqs_cal.dqsien_pi_per_ui = pi_per_ui;
5725 	rxdqs_cal.dqsien_ui_per_mck = ui_per_mck;
5726 	rxdqs_cal.dqsien_freq_div = freq_div;
5727 
5728 	U32 reg_backup_address[ ] = {
5729 		(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL0)),
5730 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6)),
5731 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6)),
5732 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL1)),
5733 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2)),
5734 	};
5735 
5736 	/* Register backup */
5737 	DramcBackupRegisters(p, reg_backup_address,
5738 		sizeof (reg_backup_address) / sizeof (U32));
5739 
5740 	if (!rxdqs_gating_bypass(p)) {
5741 		rxdqs_gating_sw_cal_init(p, use_enhance_rdqs);
5742 
5743 #if 1
5744 	#if (LP5_DDR4266_RDBI_WORKAROUND)
5745 	if((is_lp5_family(p)) && (p->frequency == 2133))
5746 		dly_ui_start = 15;
5747 	else if((is_lp5_family(p)) && (p->frequency == 2750))
5748 		dly_ui_start = 12;
5749 	else
5750 		dly_ui_start = u1GetGatingStartPos(p, AUTOK_OFF);//7; //12;ly_ui_start + 32;
5751 	#else
5752 	if((is_lp5_family(p)) && ((p->frequency == 2133) || (p->frequency == 2750)))
5753 		dly_ui_start = 5;
5754 	else
5755 		dly_ui_start = u1GetGatingStartPos(p, AUTOK_OFF);//7; //12;ly_ui_start + 32;
5756 	#endif
5757 
5758 	dly_ui_end = dly_ui_start+ 32;
5759 	pass_byte_count = 0;
5760 #else
5761 	#if __LP5_COMBO__
5762 	if (is_lp5_family(p))
5763 	{
5764 		if (p->frequency == 1600)
5765 			dly_ui_start = 7; //12;
5766 		else
5767 			dly_ui_start = 8; //12;
5768 
5769 		dly_ui_end = dly_ui_start + 32;
5770 		pass_byte_count = 0;
5771 	}
5772 	else
5773 	#endif
5774 	{
5775 		 dly_ui_start = 9; //12; Eddie change to 9 for Hynix Normal Mode
5776 		 if(p->freq_sel==LP4_DDR4266)
5777 		 {
5778 			dly_ui_start = 16;
5779 		 }
5780 		 dly_ui_end = dly_ui_start + 32;
5781 		 pass_byte_count = 0;
5782 	}
5783 #endif
5784 
5785 
5786 	for (dly_ui = dly_ui_start; dly_ui < dly_ui_end;
5787 		dly_ui += DQS_GW_COARSE_STEP) {
5788 		rxdqs_gating_set_dqsien_dly(p, dly_ui, &rxdqs_cal);
5789 
5790 		for (rxdqs_cal.dqsien_dly_pi = 0; rxdqs_cal.dqsien_dly_pi <
5791 			pi_per_ui; rxdqs_cal.dqsien_dly_pi +=
5792 			rxdqs_cal.dqsien_pi_adj_step) {
5793 			dly_ui = rxdqs_gating_sw_cal(p, &rxdqs_trans, &rxdqs_cal,
5794 				&pass_byte_count, &rxdqs_best_win, dly_ui, dly_ui_end);
5795 
5796 			if (dly_ui == dly_ui_end)
5797 				break;
5798 		}
5799 	}
5800 
5801 		DramcEngine2End(p);
5802 
5803 		//check if there is no pass taps for each DQS
5804 		for (dqs_i=0; dqs_i<(p->data_width/DQS_BIT_NUMBER); dqs_i++)
5805 		{
5806 			if ((pass_byte_count<< dqs_i)==0)
5807 			{
5808 				u1GatingErrorFlag=1;
5809 				/*TINFO="error, no pass taps in DQS_%d !!!\n", dqs_i*/
5810 				err("error, no pass taps in DQS_%d!\n", dqs_i);
5811 			}
5812 		}
5813 	if (u1GatingErrorFlag==0)
5814 		vSetCalibrationResult(p, DRAM_CALIBRATION_GATING, DRAM_OK);
5815 
5816 #if (ENABLE_GATING_AUTOK_WA)
5817 	if (!u1GatingErrorFlag && __wa__gating_swk_for_autok) {
5818 		U8 ui[DQS_NUMBER], ui_min = 0xff;
5819 		U8 dqs_index;
5820 		for (dqs_index = 0; dqs_index < (p->data_width/DQS_BIT_NUMBER); dqs_index++){
5821 			ui[dqs_index] = rxdqs_best_win.best_dqsien_dly_mck[dqs_index] * ui_per_mck +
5822 			rxdqs_best_win.best_dqsien_dly_ui[dqs_index];
5823 
5824 			if (ui[dqs_index] < ui_min)
5825 				ui_min = ui[dqs_index];
5826 		}
5827 		__wa__gating_autok_init_ui[p->rank] = ui_min;
5828 
5829 			DramcRestoreRegisters(p, reg_backup_address,
5830 				sizeof (reg_backup_address) / sizeof (U32));
5831 		return DRAM_OK;
5832 	}
5833 #endif
5834 	}
5835 
5836 	rxdqs_gating_fastk_save_restore(p, &rxdqs_best_win, &rxdqs_cal);
5837 	rxdqs_gating_misc_process(p, &rxdqs_best_win);
5838 
5839 	msg("[Gating] SW calibration Done\n");
5840 
5841 	/* Set MCK & UI */
5842 	rxdqs_gating_set_final_result(p, ui_per_mck, &rxdqs_best_win);
5843 
5844 	DramcRestoreRegisters(p, reg_backup_address,
5845 		sizeof (reg_backup_address) / sizeof (U32));
5846 
5847 	DramPhyReset(p);
5848 
5849 	return DRAM_OK;
5850 }
5851 
5852 /* LPDDR5 Rx DQS Gating */
dramc_rx_dqs_gating_cal(DRAMC_CTX_T * p,u8 autok,U8 use_enhanced_rdqs)5853 DRAM_STATUS_T dramc_rx_dqs_gating_cal(DRAMC_CTX_T *p,
5854 		u8 autok, U8 use_enhanced_rdqs)
5855 {
5856 	DRAM_STATUS_T ret;
5857 
5858 	vPrintCalibrationBasicInfo(p);
5859 
5860 #if ENABLE_GATING_AUTOK_WA
5861 			 if (autok) {
5862 				 __wa__gating_swk_for_autok = 1;
5863 				 dramc_rx_dqs_gating_sw_cal(p, use_enhanced_rdqs);
5864 				 __wa__gating_swk_for_autok = 0;
5865 			 }
5866 #endif
5867 
5868 	 // default set FAIL
5869 	vSetCalibrationResult(p, DRAM_CALIBRATION_GATING, DRAM_FAIL);
5870 
5871 	/* Try HW auto calibration first. If failed,
5872 	 * will try SW mode.
5873 	 */
5874 	if (autok) {
5875 #if ENABLE_GATING_AUTOK_WA
5876 		if (rxdqs_gating_bypass(p)) /* Already done by SWK */
5877 			return DRAM_OK;
5878 #endif
5879 		ret = dramc_rx_dqs_gating_auto_cal(p);
5880 		if (ret == DRAM_OK) {
5881 			vSetCalibrationResult(p, DRAM_CALIBRATION_GATING, DRAM_OK);
5882 			return DRAM_OK;
5883 		}
5884 
5885 		err("[Error] Gating auto calibration fail!!\n");
5886 	}
5887 
5888 	msg("[Gating] SW mode calibration\n");
5889 
5890 	return dramc_rx_dqs_gating_sw_cal(p, use_enhanced_rdqs);
5891 }
5892 
5893 ///TODO: wait for porting +++
5894 #if GATING_ADJUST_TXDLY_FOR_TRACKING
DramcRxdqsGatingPostProcess(DRAMC_CTX_T * p)5895 void DramcRxdqsGatingPostProcess(DRAMC_CTX_T *p)
5896 {
5897 	U8 dqs_i;
5898 	U8 u1RankIdx, u1RankMax;
5899 	S8 s1ChangeDQSINCTL;
5900 #if XRTRTR_NEW_CROSS_RK_MODE
5901 	U16 u2PHSINCTL = 0;
5902 	U32 u4Rank_Sel_MCK_P0[2], u4Rank_Sel_MCK_P1[2], u4RANKINCTL_STB;
5903 #endif
5904 #if RDSEL_TRACKING_EN
5905 	U32 u4PI_value[2] = {0};
5906 #endif
5907 	U32 backup_rank;
5908 	U32 u4ReadDQSINCTL, u4RankINCTL_ROOT, u4XRTR2R, reg_TX_dly_DQSgated_min = 0;
5909 	U8 mck2ui_shift;
5910 
5911 	backup_rank = u1GetRank(p);
5912 
5913 #ifdef XRTR2R_PERFORM_ENHANCE_DQSG_RX_DLY
5914 	if (vGet_Div_Mode(p) == DIV8_MODE)
5915 	{
5916 		// wei-jen: DQSgated_min should be 2 when freq >= 1333, 1 when freq < 1333
5917 		if (p->frequency >= 1333)
5918 		{
5919 			reg_TX_dly_DQSgated_min = 2;
5920 		}
5921 		else
5922 		{
5923 			reg_TX_dly_DQSgated_min = 1;
5924 		}
5925 	}
5926 	else // for LPDDR4 1:4 mode
5927 	{
5928 		// 1866,1600,1333,1200	: reg_TX_dly_DQSgated (min) =2
5929 		reg_TX_dly_DQSgated_min = 2;
5930 	}
5931 #else
5932 	// wei-jen: DQSgated_min should be 3 when freq >= 1333, 2 when freq < 1333
5933 	if (p->frequency >= 1333)
5934 	{
5935 		reg_TX_dly_DQSgated_min = 3;
5936 	}
5937 	else
5938 	{
5939 		reg_TX_dly_DQSgated_min = 2;
5940 	}
5941 #endif
5942 
5943 	//Sylv_ia MP setting is switched to new mode, so RANKRXDVS can be set as 0 (review by HJ Huang)
5944 #if 0
5945 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_SHU_B0_DQ7), u1RankRxDVS, SHU_B0_DQ7_R_DMRANKRXDVS_B0);
5946 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_SHU_B1_DQ7), u1RankRxDVS, SHU_B1_DQ7_R_DMRANKRXDVS_B1);
5947 #endif
5948 	// === End of DVS setting =====
5949 
5950 	s1ChangeDQSINCTL = reg_TX_dly_DQSgated_min - u1TXDLY_Cal_min;
5951 
5952 	msg("[RxdqsGatingPostProcess] freq %d\n"
5953 					"ChangeDQSINCTL %d, reg_TX_dly_DQSgated_min %d, u1TXDLY_Cal_min %d\n",
5954 						p->frequency,
5955 						s1ChangeDQSINCTL, reg_TX_dly_DQSgated_min, u1TXDLY_Cal_min);
5956 	reg_msg("[RxdqsGatingPostProcess] freq %d\n"
5957 					   "ChangeDQSINCTL %d, reg_TX_dly_DQSgated_min %d, u1TXDLY_Cal_min %d\n",
5958 						p->frequency,
5959 						s1ChangeDQSINCTL, reg_TX_dly_DQSgated_min, u1TXDLY_Cal_min);
5960 
5961 	if (vGet_Div_Mode(p) == DIV16_MODE)
5962 		mck2ui_shift = 4;
5963 	else if (vGet_Div_Mode(p) == DIV8_MODE)
5964 		mck2ui_shift = 3;
5965 	else
5966 		mck2ui_shift = 2;
5967 
5968 	if (s1ChangeDQSINCTL != 0)	// need to change DQSINCTL and TXDLY of each byte
5969 	{
5970 		u1TXDLY_Cal_min += s1ChangeDQSINCTL;
5971 		u1TXDLY_Cal_max += s1ChangeDQSINCTL;
5972 
5973 		if (p->support_rank_num == RANK_DUAL)
5974 			u1RankMax = RANK_MAX;
5975 		else
5976 			 u1RankMax = RANK_1;
5977 
5978 		for (u1RankIdx = 0; u1RankIdx < u1RankMax; u1RankIdx++)
5979 		{
5980 			msg2("Rank: %d\n", u1RankIdx);
5981 			reg_msg("Rank: %d\n", u1RankIdx);
5982 
5983 			for (dqs_i = 0; dqs_i < (p->data_width / DQS_BIT_NUMBER); dqs_i++)
5984 			{
5985 #if 1
5986 				U8 total_ui, total_ui_P1;
5987 				total_ui = (ucbest_coarse_mck_backup[u1RankIdx][dqs_i] << 4) + ucbest_coarse_ui_backup[u1RankIdx][dqs_i];
5988 				total_ui_P1 = (ucbest_coarse_mck_P1_backup[u1RankIdx][dqs_i] << 4) + ucbest_coarse_ui_P1_backup[u1RankIdx][dqs_i];
5989 
5990 				total_ui += (s1ChangeDQSINCTL << mck2ui_shift);
5991 				total_ui_P1 += (s1ChangeDQSINCTL << mck2ui_shift);
5992 
5993 				ucbest_coarse_mck_backup[u1RankIdx][dqs_i] = (total_ui >> 4);
5994 				ucbest_coarse_ui_backup[u1RankIdx][dqs_i] = total_ui & 0xf;
5995 
5996 				ucbest_coarse_mck_P1_backup[u1RankIdx][dqs_i] = (total_ui_P1 >> 4);
5997 				ucbest_coarse_ui_P1_backup[u1RankIdx][dqs_i] = total_ui_P1 & 0xf;
5998 #else
5999 				if (vGet_Div_Mode(p) == DIV8_MODE)
6000 				{
6001 					u4ReadTXDLY[u1RankIdx][dqs_i] = ucbest_coarse_mck_backup[u1RankIdx][dqs_i];
6002 					u4ReadTXDLY_P1[u1RankIdx][dqs_i] = ucbest_coarse_mck_P1_backup[u1RankIdx][dqs_i];
6003 
6004 					u4ReadTXDLY[u1RankIdx][dqs_i] += s1ChangeDQSINCTL;
6005 					u4ReadTXDLY_P1[u1RankIdx][dqs_i] += s1ChangeDQSINCTL;
6006 
6007 					ucbest_coarse_mck_backup[u1RankIdx][dqs_i] = u4ReadTXDLY[u1RankIdx][dqs_i];
6008 					ucbest_coarse_mck_P1_backup[u1RankIdx][dqs_i] = u4ReadTXDLY_P1[u1RankIdx][dqs_i];
6009 				}
6010 				else // LP3 or LP4 1:4 mode
6011 				{
6012 					u4ReadTXDLY[u1RankIdx][dqs_i] = ((ucbest_coarse_mck_backup[u1RankIdx][dqs_i] << 1) + ((ucbest_coarse_ui_backup[u1RankIdx][dqs_i] >> 2) & 0x1));
6013 					u4ReadTXDLY_P1[u1RankIdx][dqs_i] = ((ucbest_coarse_mck_P1_backup[u1RankIdx][dqs_i] << 1) + ((ucbest_coarse_ui_P1_backup[u1RankIdx][dqs_i] >> 2) & 0x1));
6014 
6015 					u4ReadTXDLY[u1RankIdx][dqs_i] += s1ChangeDQSINCTL;
6016 					u4ReadTXDLY_P1[u1RankIdx][dqs_i] += s1ChangeDQSINCTL;
6017 
6018 					ucbest_coarse_mck_backup[u1RankIdx][dqs_i] = (u4ReadTXDLY[u1RankIdx][dqs_i] >> 1);
6019 					ucbest_coarse_ui_backup[u1RankIdx][dqs_i] = ((u4ReadTXDLY[u1RankIdx][dqs_i] & 0x1) << 2) + (ucbest_coarse_ui_backup[u1RankIdx][dqs_i] & 0x3);
6020 
6021 					ucbest_coarse_mck_P1_backup[u1RankIdx][dqs_i] = (u4ReadTXDLY_P1[u1RankIdx][dqs_i] >> 1);
6022 					ucbest_coarse_ui_P1_backup[u1RankIdx][dqs_i] = ((u4ReadTXDLY_P1[u1RankIdx][dqs_i] & 0x1) << 2) + (ucbest_coarse_ui_P1_backup[u1RankIdx][dqs_i] & 0x3);
6023 				}
6024 #endif
6025 				msg("best DQS%d dly(2T, 0.5T) = (%d, %d)\n", dqs_i, ucbest_coarse_mck_backup[u1RankIdx][dqs_i], ucbest_coarse_ui_backup[u1RankIdx][dqs_i]);
6026 				reg_msg("PostProcess best DQS%d dly(2T, 0.5T) = (%d, %d)\n", dqs_i, ucbest_coarse_mck_backup[u1RankIdx][dqs_i], ucbest_coarse_ui_backup[u1RankIdx][dqs_i]);
6027 			}
6028 			for (dqs_i = 0; dqs_i < (p->data_width / DQS_BIT_NUMBER); dqs_i++)
6029 			{
6030 				msg("best DQS%d P1 dly(2T, 0.5T) = (%d, %d)\n", dqs_i, ucbest_coarse_mck_P1_backup[u1RankIdx][dqs_i], ucbest_coarse_ui_P1_backup[u1RankIdx][dqs_i]);
6031 				reg_msg("PostProcess best DQS%d P1 dly(2T, 0.5T) = (%d, %d)\n", dqs_i, ucbest_coarse_mck_P1_backup[u1RankIdx][dqs_i], ucbest_coarse_ui_P1_backup[u1RankIdx][dqs_i]);
6032 			}
6033 		}
6034 
6035 		for (u1RankIdx = 0; u1RankIdx < u1RankMax; u1RankIdx++)
6036 		{
6037 			vSetRank(p, u1RankIdx);
6038 			// 4T or 2T coarse tune
6039 			/* Set DQSIEN delay in MCK and UI */
6040 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_MCK_UI_DLY),
6041 				P_Fld(ucbest_coarse_mck_backup[u1RankIdx][0],
6042 				SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P0_B0) |
6043 				P_Fld(ucbest_coarse_ui_backup[u1RankIdx][0],
6044 				SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P0_B0) |
6045 				P_Fld(ucbest_coarse_mck_P1_backup[u1RankIdx][0],
6046 				SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P1_B0) |
6047 				P_Fld(ucbest_coarse_ui_P1_backup[u1RankIdx][0],
6048 				SHU_RK_B0_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P1_B0));
6049 
6050 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B1_DQSIEN_MCK_UI_DLY),
6051 				P_Fld(ucbest_coarse_mck_backup[u1RankIdx][1],
6052 				SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P0_B1) |
6053 				P_Fld(ucbest_coarse_ui_backup[u1RankIdx][1],
6054 				SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P0_B1) |
6055 				P_Fld(ucbest_coarse_mck_P1_backup[u1RankIdx][1],
6056 				SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_MCK_P1_B1) |
6057 				P_Fld(ucbest_coarse_ui_P1_backup[u1RankIdx][1],
6058 				SHU_RK_B1_DQSIEN_MCK_UI_DLY_DQSIEN_UI_P1_B1));
6059 		#if RDSEL_TRACKING_EN
6060 			//Byte 0
6061 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_INI_UIPI),
6062 				(ucbest_coarse_mck_backup[u1RankIdx][0] << 4) | (ucbest_coarse_ui_backup[u1RankIdx][0]),
6063 				SHU_R0_B0_INI_UIPI_CURR_INI_UI_B0);//UI
6064 			//Byte 1
6065 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_INI_UIPI),
6066 				(ucbest_coarse_mck_backup[u1RankIdx][1] << 4) | (ucbest_coarse_ui_backup[u1RankIdx][1]),
6067 				SHU_R0_B1_INI_UIPI_CURR_INI_UI_B1);//UI
6068 		#endif
6069 		}
6070 	}
6071 	vSetRank(p, backup_rank);
6072 
6073 	u4ReadDQSINCTL = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RK_DQSCTL),
6074 		MISC_SHU_RK_DQSCTL_DQSINCTL);
6075 	reg_msg("u4ReadDQSINCTL=%d\n", u4ReadDQSINCTL);
6076 	u4ReadDQSINCTL -= s1ChangeDQSINCTL;
6077 
6078 	#if ENABLE_READ_DBI
6079 	if (p->DBI_R_onoff[p->dram_fsp])
6080 	{
6081 		u4ReadDQSINCTL++;
6082 		#if 0//cc mark for reg not found
6083 		u4ReadRODT = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_ODTCTRL), SHU_ODTCTRL_RODT);
6084 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_ODTCTRL), u4ReadRODT + 1, SHU_ODTCTRL_RODT); //update RODT value when READ_DBI is on
6085 		#endif
6086 	}
6087 	#endif
6088 
6089 #if XRTRTR_NEW_CROSS_RK_MODE
6090 		for (dqs_i = 0; dqs_i < (p->data_width / DQS_BIT_NUMBER); dqs_i++)
6091 		{
6092 			if (ucbest_coarse_mck_backup[RANK_0][dqs_i] > ucbest_coarse_mck_backup[RANK_1][dqs_i])
6093 			{
6094 				u4Rank_Sel_MCK_P0[dqs_i] = (ucbest_coarse_mck_backup[RANK_0][dqs_i] > 0)? (ucbest_coarse_mck_backup[RANK_0][dqs_i] - 1): 0;
6095 				u4Rank_Sel_MCK_P1[dqs_i] = (ucbest_coarse_mck_P1_backup[RANK_0][dqs_i] > 0)? (ucbest_coarse_mck_P1_backup[RANK_0][dqs_i] - 1): 0;
6096 			}
6097 			else
6098 			{
6099 				u4Rank_Sel_MCK_P0[dqs_i] = (ucbest_coarse_mck_backup[RANK_1][dqs_i] > 0)? (ucbest_coarse_mck_backup[RANK_1][dqs_i] - 1): 0;
6100 				u4Rank_Sel_MCK_P1[dqs_i] = (ucbest_coarse_mck_P1_backup[RANK_1][dqs_i] > 0)? (ucbest_coarse_mck_P1_backup[RANK_1][dqs_i] - 1): 0;
6101 			}
6102 		}
6103 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_RANK_SELPH_UI_DLY),
6104 			P_Fld(u4Rank_Sel_MCK_P0[0], SHU_B0_RANK_SELPH_UI_DLY_RANKSEL_MCK_DLY_P0_B0) |
6105 			P_Fld(u4Rank_Sel_MCK_P1[0], SHU_B0_RANK_SELPH_UI_DLY_RANKSEL_MCK_DLY_P1_B0));
6106 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_RANK_SELPH_UI_DLY),
6107 			P_Fld(u4Rank_Sel_MCK_P0[1], SHU_B1_RANK_SELPH_UI_DLY_RANKSEL_MCK_DLY_P0_B1) |
6108 			P_Fld(u4Rank_Sel_MCK_P1[1], SHU_B1_RANK_SELPH_UI_DLY_RANKSEL_MCK_DLY_P1_B1));
6109 
6110 		u4RANKINCTL_STB = (u4ReadDQSINCTL > 2)? (u4ReadDQSINCTL - 2): 0;
6111 		u2PHSINCTL = (u4ReadDQSINCTL == 0)? 0: (u4ReadDQSINCTL - 1);
6112 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RANKCTL), u4RANKINCTL_STB, MISC_SHU_RANKCTL_RANKINCTL_STB);
6113 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_MISC_RANK_SEL_STB), u2PHSINCTL, SHU_MISC_RANK_SEL_STB_RANK_SEL_PHSINCTL);
6114 #endif
6115 
6116 #ifdef XRTR2R_PERFORM_ENHANCE_DQSG_RX_DLY
6117 	// Wei-Jen: RANKINCTL_RXDLY = RANKINCTL = RankINCTL_ROOT = u4ReadDQSINCTL-2, if XRTR2R_PERFORM_ENHANCE_DQSG_RX_DLY enable
6118 	// Wei-Jen: New algorithm : u4ReadDQSINCTL-2 >= 0
6119 	if (u4ReadDQSINCTL >= 2)
6120 	{
6121 		u4RankINCTL_ROOT = u4ReadDQSINCTL - 2;
6122 	}
6123 	else
6124 	{
6125 		u4RankINCTL_ROOT = 0;
6126 		err("u4RankINCTL_ROOT <2, Please check\n");
6127 #if (__ETT__)
6128 		while (1);
6129 #endif
6130 	}
6131 #else
6132 	//Modify for corner IC failed at HQA test XTLV
6133 	if (u4ReadDQSINCTL >= 3)
6134 	{
6135 		u4RankINCTL_ROOT = u4ReadDQSINCTL - 3;
6136 	}
6137 	else
6138 	{
6139 		u4RankINCTL_ROOT = 0;
6140 		err("u4RankINCTL_ROOT <3, Risk for supporting 1066/RL8\n");
6141 	}
6142 #endif
6143 
6144 	//DQSINCTL
6145 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RK_DQSCTL),
6146 		u4ReadDQSINCTL, MISC_SHU_RK_DQSCTL_DQSINCTL);  //Rank0 DQSINCTL
6147 	vSetRank(p, RANK_1);
6148 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RK_DQSCTL),
6149 		u4ReadDQSINCTL, MISC_SHU_RK_DQSCTL_DQSINCTL);  //Rank1 DQSINCTL
6150 	vSetRank(p, backup_rank);
6151 
6152 	//No need to update RODT. If we update RODT, also need to update SELPH_ODTEN0_TXDLY
6153 	//vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_ODTCTRL), u4ReadDQSINCTL, SHU_ODTCTRL_RODT);			 //RODT = DQSINCTL
6154 
6155 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RANKCTL),
6156 		u4ReadDQSINCTL, MISC_SHU_RANKCTL_RANKINCTL_PHY);  //RANKINCTL_PHY = DQSINCTL
6157 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RANKCTL),
6158 		u4RankINCTL_ROOT, MISC_SHU_RANKCTL_RANKINCTL);	//RANKINCTL= DQSINCTL -3
6159 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RANKCTL),
6160 		u4RankINCTL_ROOT, MISC_SHU_RANKCTL_RANKINCTL_ROOT1);  //RANKINCTL_ROOT1= DQSINCTL -3
6161 
6162 #ifdef XRTR2R_PERFORM_ENHANCE_DQSG_RX_DLY
6163 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RANKCTL),
6164 		u4RankINCTL_ROOT, MISC_SHU_RANKCTL_RANKINCTL_RXDLY);
6165 
6166 	u4XRTR2R = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_ACTIM_XRT), SHU_ACTIM_XRT_XRTR2R);
6167 
6168 	msg2("TX_dly_DQSgated check: min %d  max %d, ChangeDQSINCTL=%d\n", u1TXDLY_Cal_min, u1TXDLY_Cal_max, s1ChangeDQSINCTL);
6169 	msg2("DQSINCTL=%d, RANKINCTL=%d, u4XRTR2R=%d\n", u4ReadDQSINCTL, u4RankINCTL_ROOT, u4XRTR2R);
6170 	reg_msg("DQSINCTL=%d, RANKINCTL=%d, u4XRTR2R=%d\n", u4ReadDQSINCTL, u4RankINCTL_ROOT, u4XRTR2R);
6171 #else
6172 	//XRTR2R=A-phy forbidden margin(6T) + reg_TX_dly_DQSgated (max) +Roundup(tDQSCKdiff/MCK+0.25MCK)+1(05T sel_ph margin)-1(forbidden margin overlap part)
6173 	//Roundup(tDQSCKdiff/MCK+1UI) =1~2 all LP3 and LP4 timing
6174 	//u4XRTR2R= 8 + u1TXDLY_Cal_max;  // 6+ u1TXDLY_Cal_max +2
6175 
6176 	//Modify for corner IC failed at HQA test XTLV @ 3200MHz
6177 	u4XRTR2R = 8 + u1TXDLY_Cal_max + 1;  // 6+ u1TXDLY_Cal_max +2
6178 	if (u4XRTR2R > 12)
6179 	{
6180 		u4XRTR2R = 12;
6181 		err("XRTR2R > 12, Max value is 12\n");
6182 	}
6183 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_ACTIM_XRT), u4XRTR2R, SHU_ACTIM_XRT_XRTR2R);
6184 
6185 	msg2("TX_dly_DQSgated check: min %d  max %d, ChangeDQSINCTL=%d\n", u1TXDLY_Cal_min, u1TXDLY_Cal_max, s1ChangeDQSINCTL);
6186 	msg2("DQSINCTL=%d, RANKINCTL=%d, u4XRTR2R=%d\n", u4ReadDQSINCTL, u4RankINCTL_ROOT, u4XRTR2R);
6187 	reg_msg("DQSINCTL=%d, RANKINCTL=%d, u4XRTR2R=%d\n", u4ReadDQSINCTL, u4RankINCTL_ROOT, u4XRTR2R);
6188 #endif
6189 
6190 #if 0//ENABLE_RODT_TRACKING
6191 	//Because Ki_bo+,WE2,Bi_anco,Vin_son...or behind project support WDQS, they need to apply the correct new setting
6192 	//The following 2 items are indepentent
6193 	//1. if TX_WDQS on(by vendor_id) or p->odt_onoff = 1, ROEN/RODTE/RODTE2 = 1
6194 	//2. if ENABLE_RODT_TRACKING on, apply new setting and RODTENSTB_TRACK_EN = ROEN
6195 	// LP4 support only
6196 	U8 u1ReadROEN;
6197 	u1ReadROEN = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_ODTCTRL), SHU_ODTCTRL_ROEN);
6198 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_RODTENSTB), P_Fld(0xff, SHU_RODTENSTB_RODTENSTB_EXT) | \
6199 															P_Fld(u1ReadROEN, SHU_RODTENSTB_RODTENSTB_TRACK_EN));
6200 #endif
6201 
6202 #ifdef XRTR2W_PERFORM_ENHANCE_RODTEN
6203 	// LP4 support only
6204 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RODTENSTB),
6205 		P_Fld(0x0fff, MISC_SHU_RODTENSTB_RODTENSTB_EXT) |
6206 		P_Fld(1, MISC_SHU_RODTENSTB_RODTEN_P1_ENABLE) |
6207 		P_Fld(1, MISC_SHU_RODTENSTB_RODTENSTB_TRACK_EN));
6208 #endif
6209 
6210 	vSetRank(p, backup_rank);
6211 
6212 
6213 }
6214 #endif
6215 
6216 #if GATING_ADJUST_TXDLY_FOR_TRACKING
DramcRxdqsGatingPreProcess(DRAMC_CTX_T * p)6217 void DramcRxdqsGatingPreProcess(DRAMC_CTX_T *p)
6218 {
6219 	u1TXDLY_Cal_min = 0xff;
6220 	u1TXDLY_Cal_max = 0;
6221 }
6222 #endif
6223 ///TODO: wait for porting ---
6224 
6225 #endif
6226 
6227 #if RDSEL_TRACKING_EN
RDSELRunTimeTracking_preset(DRAMC_CTX_T * p)6228 void RDSELRunTimeTracking_preset(DRAMC_CTX_T *p)
6229 {
6230 	U8 u1RankIdx;
6231 	S32 s4PosVH, s4NegVH;
6232 	U32 u4Gating_shift=0, u4Gating_origin_B0=0, u4Gating_origin_B1=0;
6233 	U32 u4Gating_origin_final=0xff;
6234 
6235 	s4NegVH = divRoundClosest(400, ((1000000 / p->frequency) / 64));
6236 
6237 	for (u1RankIdx = 0; u1RankIdx < p->support_rank_num; u1RankIdx++)
6238 	{
6239 		u4Gating_origin_B0 = ((ucbest_coarse_mck_backup[u1RankIdx][0] << 4) | (ucbest_coarse_ui_backup[u1RankIdx][0]));
6240 		u4Gating_origin_B1 = ((ucbest_coarse_mck_backup[u1RankIdx][1] << 4) | (ucbest_coarse_ui_backup[u1RankIdx][1]));
6241 
6242 		if (u4Gating_origin_B0 < u4Gating_origin_B1)
6243 		{
6244 			u4Gating_origin_final = (u4Gating_origin_B0 < u4Gating_origin_final) ? u4Gating_origin_B0 : u4Gating_origin_final;
6245 		}
6246 		else
6247 		{
6248 			u4Gating_origin_final = (u4Gating_origin_B1 < u4Gating_origin_final) ? u4Gating_origin_B1 : u4Gating_origin_final;
6249 		}
6250 	}
6251 
6252 	u4Gating_shift = ((((u4Gating_origin_final >> 3) & 0x1f) << 4) | (u4Gating_origin_final & 7)) - u4Gating_origin_B0;
6253 
6254 	s4PosVH = s4NegVH + (u4Gating_shift << 5);
6255 
6256 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_MISC_RDSEL_TRACK), P_Fld(s4PosVH, SHU_MISC_RDSEL_TRACK_SHU_GW_THRD_POS)
6257 													| P_Fld(-s4NegVH, SHU_MISC_RDSEL_TRACK_SHU_GW_THRD_NEG));
6258 }
6259 #endif
6260 
6261 #if RDDQC_PINMUX_WORKAROUND
RDDQCPinmuxWorkaround(DRAMC_CTX_T * p)6262 static void RDDQCPinmuxWorkaround(DRAMC_CTX_T *p)
6263 {
6264 	U8 *uiLPDDR_RDDQC_Mapping;
6265 #if (__LP5_COMBO__)
6266 	const U8 uiLPDDR5_RDDQC_Mapping_POP[CHANNEL_NUM][16] =
6267 	{
6268 		{
6269 			8, 9, 10, 11, 12, 15, 14, 13,
6270 			0, 1, 2, 3, 4, 7, 6, 5,
6271 		},
6272 		#if (CHANNEL_NUM>1)
6273 		{
6274 			8, 9, 10, 11, 12, 15, 14, 13,
6275 			0, 1, 2, 3, 4, 7, 6, 5,
6276 		},
6277 		#endif
6278 	};
6279 
6280 #endif
6281 	const U8 uiLPDDR4_RDDQC_Mapping_POP[PINMUX_MAX][CHANNEL_NUM][16] =
6282 	{
6283 		{
6284 		// for DSC
6285 			//CH-A
6286 			{
6287 				0, 1, 6, 7, 4, 5, 3, 2,
6288 				9, 8, 11, 10, 15, 14, 12, 13
6289 			},
6290 			#if (CHANNEL_NUM>1)
6291 			//CH-B
6292 			{
6293 				1, 0, 5, 4, 7, 2, 3, 6,
6294 				8, 9, 11, 10, 12, 14, 13, 15
6295 			},
6296 			#endif
6297 			#if (CHANNEL_NUM>2)
6298 			//CH-C
6299 			{
6300 				0, 1, 6, 7, 4, 5, 3, 2,
6301 				9, 8, 11, 10, 15, 14, 12, 13
6302 			},
6303 			//CH-D
6304 			{
6305 				1, 0, 5, 4, 7, 2, 3, 6,
6306 				8, 9, 11, 10, 12, 14, 13, 15
6307 			},
6308 			#endif
6309 		},
6310 		{
6311 		// for LPBK
6312 			// TODO: need porting
6313 		},
6314 		{
6315 		// for EMCP
6316 			//CH-A
6317 			{
6318 				1, 0, 3, 2, 4, 7, 6, 5,
6319 				8, 9, 10, 12, 15, 14, 11, 13
6320 			},
6321 			#if (CHANNEL_NUM>1)
6322 			//CH-B
6323 			{
6324 				0, 1, 7, 4, 2, 5, 6, 3,
6325 				9, 8, 10, 12, 11, 14, 13, 15
6326 			},
6327 			#endif
6328 			#if (CHANNEL_NUM>2)
6329 			//CH-C
6330 			{
6331 				1, 0, 3, 2, 4, 7, 6, 5,
6332 				8, 9, 10, 12, 15, 14, 11, 13
6333 			},
6334 			//CH-D
6335 			{
6336 				0, 1, 7, 4, 2, 5, 6, 3,
6337 				9, 8, 10, 12, 11, 14, 13, 15
6338 			},
6339 			#endif
6340 		}
6341 	};
6342 
6343 	#if (__LP5_COMBO__)
6344 	if (is_lp5_family(p))
6345 	{
6346 		uiLPDDR_RDDQC_Mapping = (U8 *)uiLPDDR5_RDDQC_Mapping_POP[p->channel];
6347 	}
6348 	else
6349 	#endif
6350 	{
6351 		uiLPDDR_RDDQC_Mapping = (U8 *)uiLPDDR4_RDDQC_Mapping_POP[p->DRAMPinmux][p->channel];
6352 	}
6353 
6354 
6355 	//Set RDDQC pinmux
6356 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX1), P_Fld(uiLPDDR_RDDQC_Mapping[0], MRR_BIT_MUX1_MRR_BIT0_SEL) | P_Fld(uiLPDDR_RDDQC_Mapping[1], MRR_BIT_MUX1_MRR_BIT1_SEL) |
6357 															   P_Fld(uiLPDDR_RDDQC_Mapping[2], MRR_BIT_MUX1_MRR_BIT2_SEL) | P_Fld(uiLPDDR_RDDQC_Mapping[3], MRR_BIT_MUX1_MRR_BIT3_SEL));
6358 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX2), P_Fld(uiLPDDR_RDDQC_Mapping[4], MRR_BIT_MUX2_MRR_BIT4_SEL) | P_Fld(uiLPDDR_RDDQC_Mapping[5], MRR_BIT_MUX2_MRR_BIT5_SEL) |
6359 															   P_Fld(uiLPDDR_RDDQC_Mapping[6], MRR_BIT_MUX2_MRR_BIT6_SEL) | P_Fld(uiLPDDR_RDDQC_Mapping[7], MRR_BIT_MUX2_MRR_BIT7_SEL));
6360 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX3), P_Fld(uiLPDDR_RDDQC_Mapping[8], MRR_BIT_MUX3_MRR_BIT8_SEL) | P_Fld(uiLPDDR_RDDQC_Mapping[9], MRR_BIT_MUX3_MRR_BIT9_SEL) |
6361 															   P_Fld(uiLPDDR_RDDQC_Mapping[10], MRR_BIT_MUX3_MRR_BIT10_SEL) | P_Fld(uiLPDDR_RDDQC_Mapping[11], MRR_BIT_MUX3_MRR_BIT11_SEL));
6362 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX4), P_Fld(uiLPDDR_RDDQC_Mapping[12], MRR_BIT_MUX4_MRR_BIT12_SEL) | P_Fld(uiLPDDR_RDDQC_Mapping[13], MRR_BIT_MUX4_MRR_BIT13_SEL) |
6363 															   P_Fld(uiLPDDR_RDDQC_Mapping[14], MRR_BIT_MUX4_MRR_BIT14_SEL) | P_Fld(uiLPDDR_RDDQC_Mapping[15], MRR_BIT_MUX4_MRR_BIT15_SEL));
6364 
6365 }
6366 #endif
6367 
6368 #define RDDQCGOLDEN_LP5_MR30_BIT_CTRL_LOWER   RDDQCGOLDEN_MR15_GOLDEN
6369 #define RDDQCGOLDEN_LP5_MR31_BIT_CTRL_UPPER   RDDQCGOLDEN_MR20_GOLDEN
6370 #define RDDQCGOLDEN_LP5_MR32_PATTERN_A		  RDDQCGOLDEN_MR32_GOLDEN
6371 #define RDDQCGOLDEN_LP5_MR33_PATTERN_B		  RDDQCGOLDEN_MR40_GOLDEN
DramcRxWinRDDQCInit(DRAMC_CTX_T * p)6372 U32 DramcRxWinRDDQCInit(DRAMC_CTX_T *p)
6373 {
6374 	U8 RDDQC_Bit_Ctrl_Lower = 0x55;
6375 	U8 RDDQC_Bit_Ctrl_Upper = 0x55;
6376 	U8 RDDQC_Pattern_A = 0x5A;
6377 	U8 RDDQC_Pattern_B = 0x3C;
6378 
6379 #if FOR_DV_SIMULATION_USED == 1
6380 	cal_sv_rand_args_t *psra = get_psra();
6381 
6382 	if (psra) {
6383 		RDDQC_Bit_Ctrl_Lower = psra->low_byte_invert_golden & 0xFF;
6384 		RDDQC_Bit_Ctrl_Upper = psra->upper_byte_invert_golden & 0xFF;
6385 		RDDQC_Pattern_A = psra->mr_dq_a_golden;
6386 		RDDQC_Pattern_B = psra->mr_dq_b_golden;
6387 
6388 		/*
6389 		 * TODO
6390 		 *
6391 		 * sv also passes mr20_6 and mr20_7 to sa.
6392 		 * currently, sa does NOT use these two random arguments.
6393 		 */
6394 	}
6395 #endif /* FOR_DV_SIMULATION_USED == 1 */
6396 
6397 	// Disable Read DBI
6398 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ7), 0, SHU_B0_DQ7_R_DMDQMDBI_SHU_B0);
6399 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ7), 0, SHU_B1_DQ7_R_DMDQMDBI_SHU_B1);
6400 
6401 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), u1GetRank(p), SWCMD_CTRL0_MRSRK);
6402 
6403 #if RDDQC_PINMUX_WORKAROUND
6404 	// Translate pin order by MRR bit sel
6405 	RDDQCPinmuxWorkaround(p);
6406 #endif
6407 
6408 	// Set golden values into dram MR
6409 #if (__LP5_COMBO__ == TRUE)
6410 	if (TRUE == is_lp5_family(p))
6411 	{
6412 		DramcModeRegWriteByRank(p, p->rank, 31, RDDQC_Bit_Ctrl_Lower);
6413 		DramcModeRegWriteByRank(p, p->rank, 32, RDDQC_Bit_Ctrl_Upper);
6414 		DramcModeRegWriteByRank(p, p->rank, 33, RDDQC_Pattern_A);
6415 		DramcModeRegWriteByRank(p, p->rank, 34, RDDQC_Pattern_B);
6416 	}
6417 	else
6418 #endif
6419 	{
6420 		DramcModeRegWriteByRank(p, p->rank, 15, RDDQC_Bit_Ctrl_Lower);
6421 		DramcModeRegWriteByRank(p, p->rank, 20, RDDQC_Bit_Ctrl_Upper);
6422 		DramcModeRegWriteByRank(p, p->rank, 32, RDDQC_Pattern_A);
6423 		DramcModeRegWriteByRank(p, p->rank, 40, RDDQC_Pattern_B);
6424 	}
6425 
6426 	//Set golden values into RG, watch out the MR_index of RGs are reference LP4
6427 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RDDQCGOLDEN),
6428 			P_Fld(RDDQC_Bit_Ctrl_Lower, RDDQCGOLDEN_LP5_MR30_BIT_CTRL_LOWER) |
6429 			P_Fld(RDDQC_Bit_Ctrl_Upper, RDDQCGOLDEN_LP5_MR31_BIT_CTRL_UPPER) |
6430 			P_Fld(RDDQC_Pattern_A, RDDQCGOLDEN_LP5_MR32_PATTERN_A) |
6431 			P_Fld(RDDQC_Pattern_B, RDDQCGOLDEN_LP5_MR33_PATTERN_B));
6432 
6433 	// Open gated clock, by KaiHsin   (DCM)
6434 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ8),
6435 			P_Fld(1, SHU_B0_DQ8_R_DMRXDLY_CG_IG_B0));
6436 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ8),
6437 			P_Fld(1, SHU_B1_DQ8_R_DMRXDLY_CG_IG_B1));
6438 
6439 #if (__LP5_COMBO__ == TRUE)
6440 	if (is_lp5_family(p))
6441 	{
6442 		// Set function mode applied to DQ & DMI
6443 //		  U8 RDDQC_RDC_DQ_mode = 0;
6444 //		  U8 RDDQC_RDC_DMI_mode = 0;
6445 
6446 //		  vSetLP5DramRDDQC_DQandDMI(p, RDDQC_RDC_DQ_mode, RDDQC_RDC_DMI_mode);
6447 
6448 //		  vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RDDQCGOLDEN1),
6449 //				  P_Fld(RDDQC_RDC_DQ_mode, RDDQCGOLDEN1_LP5_MR20_7_GOLDEN) |
6450 //				  P_Fld(RDDQC_RDC_DMI_mode, RDDQCGOLDEN1_LP5_MR20_6_GOLDEN));
6451 
6452 		if (is_heff_mode(p) == FALSE)
6453 		{
6454 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_COMMON0), 1, SHU_COMMON0_LP5WCKON);
6455 
6456 			// Enable MR18 "WCK always ON mode"
6457 			vSetLP5Dram_WCKON_OnOff(p, ON);
6458 		}
6459 
6460 		RunTime_SW_Cmd(p, RUNTIME_SWCMD_CAS_FS);
6461 	}
6462 #endif
6463 	return 0;
6464 }
6465 
DramcRxWinRDDQCEnd(DRAMC_CTX_T * p)6466 U32 DramcRxWinRDDQCEnd(DRAMC_CTX_T *p)
6467 {
6468 	// Recover MPC Rank
6469 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), 0, SWCMD_CTRL0_MRSRK);
6470 
6471 #if (__LP5_COMBO__ == TRUE)
6472 	if (is_lp5_family(p))
6473 	{
6474 		RunTime_SW_Cmd(p, RUNTIME_SWCMD_CAS_OFF);
6475 
6476 		if (is_heff_mode(p) == FALSE)
6477 		{
6478 			// Disable MR18 "WCK always ON mode"
6479 			vSetLP5Dram_WCKON_OnOff(p, OFF);
6480 
6481 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_COMMON0), 0, SHU_COMMON0_LP5WCKON);
6482 		}
6483 	}
6484 #endif
6485 	return 0;
6486 }
6487 
6488 /* Issue "RD DQ Calibration"
6489  * 1. SWCMD_CTRL1_RDDQC_LP_ENB = 1 to stop RDDQC burst
6490  * 2. RDDQCEN = 1 for RDDQC
6491  * 3. Wait rddqc_response = 1
6492  * 4. Read compare result
6493  * 5. RDDQCEN = 0
6494  */
DramcRxWinRDDQCRun(DRAMC_CTX_T * p)6495  U32 DramcRxWinRDDQCRun(DRAMC_CTX_T *p)
6496 {
6497 	U32 u4Result = 0, u4TmpResult;
6498 	DRAM_STATUS_T u4Response = DRAM_FAIL;
6499 
6500 	//Issue RD DQ calibration
6501 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL1), 1, SWCMD_CTRL1_RDDQC_LP_ENB);
6502 
6503 	// Trigger and wait
6504 	REG_TRANSFER_T TriggerReg = {DRAMC_REG_SWCMD_EN, SWCMD_EN_RDDQCEN};
6505 	REG_TRANSFER_T RepondsReg = {DRAMC_REG_SPCMDRESP, SPCMDRESP_RDDQC_RESPONSE};
6506 	u4Response = DramcTriggerAndWait(p, TriggerReg, RepondsReg);
6507 	(void)u4Response;
6508 
6509 	// Read RDDQC compare result
6510 	u4TmpResult = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_RDQC_CMP));
6511 	u4Result = (0xFFFF) & ((u4TmpResult) | (u4TmpResult >> 16)); // (BL0~7) | (BL8~15)
6512 
6513 #if (FEATURE_RDDQC_K_DMI == TRUE)
6514 	// Read DQM compare result
6515 	u4TmpResult = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RDQC_DQM_CMP), RDQC_DQM_CMP_RDDQC_DQM_CMP0_ERR);
6516 	u4TmpResult |= u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RDQC_DQM_CMP), RDQC_DQM_CMP_RDDQC_DQM_CMP1_ERR);
6517 	u4Result |= (u4TmpResult << 16);
6518 #endif
6519 
6520 	//R_DMRDDQCEN -> 0
6521 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0, SWCMD_EN_RDDQCEN);
6522 
6523 	return u4Result;
6524 }
6525 
DramcRxWindowPerbitCal(DRAMC_CTX_T * p,RX_PATTERN_OPTION_T u1UseTestEngine,U8 * u1AssignedVref,u8 isAutoK)6526 DRAM_STATUS_T DramcRxWindowPerbitCal(DRAMC_CTX_T *p,
6527 											  RX_PATTERN_OPTION_T u1UseTestEngine,
6528 											  U8 *u1AssignedVref,
6529 											  u8 isAutoK)
6530 {
6531 	U8 u1BitIdx, u1ByteIdx;
6532 	PASS_WIN_DATA_T FinalWinPerBit[DQ_DATA_WIDTH + RDDQC_ADD_DMI_NUM];
6533 	S32 iDQSDlyPerbyte[DQS_NUMBER], iDQMDlyPerbyte[DQS_NUMBER];//, iFinalDQSDly[DQS_NUMBER];
6534 	U8 u1VrefScanEnable = FALSE;
6535 	U16 u2FinalVref [DQS_NUMBER]= {0xe, 0xe};
6536 	U16 u2VrefBegin, u2VrefEnd, u2VrefStep;
6537 	U8	u1RXEyeScanEnable=0;
6538 
6539 #if ENABLE_EYESCAN_GRAPH
6540 	U8 EyeScan_index[DQ_DATA_WIDTH_LP4 + RDDQC_ADD_DMI_NUM] = {0};
6541 	U8 u1pass_in_this_vref_flag[DQ_DATA_WIDTH_LP4 + RDDQC_ADD_DMI_NUM];
6542 #endif
6543 
6544 	U8 backup_rank, rank_i, u1KnownVref[2]={0xff, 0xff};
6545 
6546 	// error handling
6547 	if (!p)
6548 	{
6549 		err("context NULL\n");
6550 		return DRAM_FAIL;
6551 	}
6552 
6553 #if RDDQC_PINMUX_WORKAROUND
6554 	U32 u4RegBackupAddress[] =
6555 	{
6556 		(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX1)),
6557 		(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX2)),
6558 		(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX3)),
6559 		(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX4)),
6560 	};
6561 
6562 	//Back up dramC register
6563 	DramcBackupRegisters(p, u4RegBackupAddress, ARRAY_SIZE(u4RegBackupAddress));
6564 #endif
6565 
6566 	if (u1UseTestEngine == PATTERN_TEST_ENGINE)
6567 		u1RXEyeScanEnable = GetEyeScanEnable(p, 1);
6568 
6569 #if (FEATURE_RDDQC_K_DMI == TRUE)
6570 	if (u1UseTestEngine == PATTERN_RDDQC)
6571 	{
6572 		iDQMDlyPerbyte[0] = -0xFFFFFF;
6573 		iDQMDlyPerbyte[1] = -0xFFFFFF;
6574 	}
6575 	else
6576 #endif
6577 	{
6578 		iDQMDlyPerbyte[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_RXDLY4), SHU_R0_B0_RXDLY4_RX_ARDQM0_R_DLY_B0);
6579 		iDQMDlyPerbyte[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_RXDLY4), SHU_R0_B1_RXDLY4_RX_ARDQM0_R_DLY_B1);
6580 
6581 	}
6582 
6583 #if ENABLE_EYESCAN_GRAPH
6584 	if (u1UseTestEngine == PATTERN_TEST_ENGINE)
6585 	{
6586 		for(u1vrefidx=0; u1vrefidx<EYESCAN_RX_VREF_RANGE_END;u1vrefidx++)
6587 		{
6588 			for (u1BitIdx = 0; u1BitIdx < DQ_DATA_WIDTH_LP4; u1BitIdx++)
6589 			{
6590 				for(ii=0; ii<EYESCAN_BROKEN_NUM; ii++)
6591 				{
6592 					gEyeScan_Min[u1vrefidx][u1BitIdx][ii] = EYESCAN_DATA_INVALID;
6593 					gEyeScan_Max[u1vrefidx][u1BitIdx][ii] = EYESCAN_DATA_INVALID;
6594 
6595 					gEyeScan_ContinueVrefHeight[u1BitIdx] = 0;
6596 					gEyeScan_TotalPassCount[u1BitIdx] = 0;
6597 				}
6598 			}
6599 		}
6600 	}
6601 #endif
6602 
6603 
6604 	//When doing RxWindowPerbitCal, should make sure that auto refresh is disable
6605 	vAutoRefreshSwitch(p, DISABLE);
6606 	//CKEFixOnOff(p, p->rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
6607 
6608 	backup_rank = u1GetRank(p);
6609 
6610 	//defult set result fail. When window found, update the result as oK
6611 	if (u1UseTestEngine == PATTERN_TEST_ENGINE)
6612 	{
6613 		vSetCalibrationResult(p, DRAM_CALIBRATION_RX_PERBIT, DRAM_FAIL);
6614 
6615 		// Something wrong with TA2 pattern -- SI, which causes RX autoK fail.
6616 		if (isAutoK == TRUE)
6617 		{
6618 			DramcEngine2Init(p, p->test2_1, p->test2_2, TEST_XTALK_PATTERN, 0, TE_NO_UI_SHIFT);
6619 		}
6620 		else
6621 		{
6622 #if ENABLE_K_WITH_WORST_SI_UI_SHIFT
6623 			DramcEngine2Init(p, p->test2_1, p->test2_2, p->test_pattern | 0x80, 0, TE_UI_SHIFT);//UI_SHIFT + LEN1
6624 #else
6625 			DramcEngine2Init(p, p->test2_1, p->test2_2, p->test_pattern, 0, TE_NO_UI_SHIFT);
6626 #endif
6627 		}
6628 	}
6629 	else
6630 	{
6631 		vSetCalibrationResult(p, DRAM_CALIBRATION_RX_RDDQC, DRAM_FAIL);
6632 		DramcRxWinRDDQCInit(p);
6633 	}
6634 
6635 	// Intialize, diable RX Vref
6636 	u2VrefBegin = 0;
6637 	u2VrefEnd = 0;
6638 	u2VrefStep = 1;
6639 
6640 	if (u1UseTestEngine == PATTERN_TEST_ENGINE)
6641 	{
6642 	#if (FOR_DV_SIMULATION_USED==0 && SW_CHANGE_FOR_SIMULATION==0)
6643 		if ((p->rank==RANK_0) || (p->frequency >= RX_VREF_DUAL_RANK_K_FREQ) || (u1RXEyeScanEnable==1))
6644 			u1VrefScanEnable =1;
6645 	#else
6646 			u1VrefScanEnable =0;
6647 	#endif
6648 	}
6649 
6650 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION
6651 	if (p->femmc_Ready == 1 && ((p->Bypass_RDDQC && u1UseTestEngine == PATTERN_RDDQC) || (p->Bypass_RXWINDOW && u1UseTestEngine == PATTERN_TEST_ENGINE)))
6652 	{
6653 		msg("[FAST_K] Bypass RX Calibration\n");
6654 	}
6655 	else
6656 #endif
6657 	{
6658 #if VENDER_JV_LOG
6659 #if 0 //BU don't want customer knows our RX's ability
6660 		if (u1UseTestEngine == 1)
6661 			vPrintCalibrationBasicInfo_ForJV(p);
6662 #endif
6663 #else
6664 		vPrintCalibrationBasicInfo(p);
6665 #endif
6666 		msg2("Start DQ dly to find pass range UseTestEngine =%d\n", u1UseTestEngine);
6667 	}
6668 
6669 	msg2("UseTestEngine: %d\n", u1UseTestEngine);
6670 	msg("RX Vref Scan: %d\n", u1VrefScanEnable);
6671 
6672 	if (u1VrefScanEnable)
6673 	{
6674 		if ((Get_Vref_Calibration_OnOff(p) == VREF_CALI_OFF) && (u1RXEyeScanEnable == 0))
6675 		{
6676 			u2VrefBegin = 0;
6677 			u2VrefEnd = 0;
6678 			u1KnownVref[0] = gFinalRXVrefDQForSpeedUp[p->channel][p->rank][p->odt_onoff][0];// byte 0
6679 			u1KnownVref[1] = gFinalRXVrefDQForSpeedUp[p->channel][p->rank][p->odt_onoff][1];// byte 1
6680 
6681 			if (u1UseTestEngine == PATTERN_TEST_ENGINE && ((u1KnownVref[0] == 0) || (u1KnownVref[1] == 0)))
6682 			{
6683 //				  err("\nWrong frequency K order= %d\n");
6684 				#if __ETT__
6685 				while (1);
6686 				#endif
6687 			}
6688 		}
6689 		else if (u1AssignedVref != NULL)  // need to specify RX Vref and don't scan RX Vref.
6690 		{
6691 			u2VrefBegin = 0;
6692 			u2VrefEnd = 0;
6693 			u1KnownVref[0] = u1AssignedVref[0];  // byte 0
6694 			u1KnownVref[1] = u1AssignedVref[1];  // byte 1
6695 		}
6696 		else
6697 		{
6698 			#if (SW_CHANGE_FOR_SIMULATION || FOR_DV_SIMULATION_USED)
6699 			u2VrefBegin = RX_VREF_RANGE_BEGIN;
6700 			#else
6701 			if (u1RXEyeScanEnable == 0)
6702 			{
6703 				if (p->odt_onoff)
6704 				{
6705 					u2VrefBegin = RX_VREF_RANGE_BEGIN_ODT_ON;
6706 				}
6707 				else
6708 				{
6709 					u2VrefBegin = RX_VREF_RANGE_BEGIN_ODT_OFF;
6710 				}
6711 				u2VrefEnd = RX_VREF_RANGE_END-1;
6712 				msg("\nSet Vref Range= %d -> %d\n",u2VrefBegin,u2VrefEnd);
6713 			}
6714 			else
6715 			{
6716 				u2VrefBegin = 0;//Lewis@20160817: Enlarge RX Vref range for eye scan
6717 				u2VrefEnd = EYESCAN_RX_VREF_RANGE_END-1;
6718 				msg("\nSet Eyescan Vref Range= %d -> %d\n",u2VrefBegin,u2VrefEnd);
6719 			}
6720 		#endif
6721 		}
6722 
6723 		if (u1RXEyeScanEnable == 0)
6724 		{
6725 			u2VrefStep = RX_VREF_RANGE_STEP;
6726 		}
6727 		else
6728 		{
6729 			u2VrefStep = EYESCAN_GRAPH_RX_VREF_STEP;
6730 		}
6731 
6732 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ5), 1, B0_DQ5_RG_RX_ARDQ_VREF_EN_B0);
6733 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ5), 1, B1_DQ5_RG_RX_ARDQ_VREF_EN_B1);
6734 	}
6735 	else // Disable RX Vref
6736 	{
6737 		u2VrefBegin = 0;
6738 		u2VrefEnd = 0;
6739 		u2VrefStep = 1;
6740 	}
6741 	(void)u2VrefStep;
6742 
6743 
6744 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION
6745 	if (p->femmc_Ready == 1 && ((p->Bypass_RDDQC && u1UseTestEngine == PATTERN_RDDQC) || (p->Bypass_RXWINDOW && u1UseTestEngine == PATTERN_TEST_ENGINE)))
6746 	{
6747 		// load RX DQS and DQM delay from eMMC
6748 		for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
6749 		{
6750 			if (u1VrefScanEnable)
6751 			{
6752 				// load RX Vref from eMMC
6753 			#if ( SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_VREF_CAL)
6754 				u2FinalVref[u1ByteIdx] = p->pSavetimeData->u1RxWinPerbitVref_Save[p->channel][p->rank][u1ByteIdx];
6755 			#endif
6756 			}
6757 
6758 			iDQSDlyPerbyte[u1ByteIdx] = p->pSavetimeData->u1RxWinPerbit_DQS[p->channel][p->rank][u1ByteIdx];
6759 			iDQMDlyPerbyte[u1ByteIdx] = p->pSavetimeData->u1RxWinPerbit_DQM[p->channel][p->rank][u1ByteIdx];
6760 		}
6761 
6762 		// load RX DQ delay from eMMC
6763 		for (u1BitIdx = 0; u1BitIdx < 16; u1BitIdx++)
6764 		{
6765 			FinalWinPerBit[u1BitIdx].best_dqdly = p->pSavetimeData->u1RxWinPerbit_DQ[p->channel][p->rank][u1BitIdx];
6766 		}
6767 
6768 		if (u1UseTestEngine == PATTERN_TEST_ENGINE)
6769 			vSetCalibrationResult(p, DRAM_CALIBRATION_RX_PERBIT, DRAM_FAST_K);
6770 		else
6771 			vSetCalibrationResult(p, DRAM_CALIBRATION_RX_RDDQC, DRAM_FAST_K);
6772 	}
6773 #endif
6774 
6775 	if (u1VrefScanEnable == TRUE)
6776 	{
6777 		// When only calibrate RX Vref for Rank 0, apply the same value for Rank 1.
6778 		for (rank_i = p->rank; rank_i < p->support_rank_num; rank_i++)
6779 		{
6780 			vSetRank(p, rank_i);
6781 
6782 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL),
6783 					P_Fld(u2FinalVref[0], SHU_B0_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_LB_B0) |
6784 					P_Fld(u2FinalVref[0], SHU_B0_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_UB_B0));
6785 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_PHY_VREF_SEL),
6786 					P_Fld(u2FinalVref[1], SHU_B1_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_LB_B1) |
6787 					P_Fld(u2FinalVref[1], SHU_B1_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_UB_B1));
6788 
6789 			for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
6790 			{
6791 				msg("\nFinal RX Vref Byte %d = %d to rank%d", u1ByteIdx, u2FinalVref[u1ByteIdx], rank_i);
6792 				reg_msg("\nFinal RX Vref Byte %d = %d to rank%d", u1ByteIdx, u2FinalVref[u1ByteIdx], rank_i);
6793 
6794 				gFinalRXVrefDQ[p->channel][rank_i][u1ByteIdx] = (U8) u2FinalVref[u1ByteIdx];
6795 				gFinalRXVrefDQForSpeedUp[p->channel][rank_i][p->odt_onoff][u1ByteIdx] = (U8) u2FinalVref[u1ByteIdx];
6796 			}
6797 		}
6798 		vSetRank(p, backup_rank);
6799 	}
6800 
6801 #if DUMP_TA2_WINDOW_SIZE_RX_TX
6802 	//RX
6803 	if (u1UseTestEngine == PATTERN_TEST_ENGINE)
6804 	{
6805 		U32 u4B0Tatal =0;
6806 		U32 u4B1Tatal =0;
6807 		msg("RX window per bit CH[%d] Rank[%d] window size\n", p->channel, p->rank);
6808 		for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
6809 		{
6810 			msg("DQ[%d] size = %d\n", u1BitIdx, gFinalRXPerbitWin[p->channel][p->rank][u1BitIdx]);
6811 			if(u1BitIdx < 8)
6812 			{
6813 				u4B0Tatal += gFinalRXPerbitWin[p->channel][p->rank][u1BitIdx];
6814 			}
6815 			else
6816 			{
6817 				u4B1Tatal += gFinalRXPerbitWin[p->channel][p->rank][u1BitIdx];
6818 			}
6819 		}
6820 		msg("total rx window size B0: %d B1: %d\n", u4B0Tatal, u4B1Tatal);
6821 	}
6822 #endif
6823 
6824 	// set dqs delay, (dqm delay)
6825 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_RXDLY5),
6826 			P_Fld((U32)iDQSDlyPerbyte[0], SHU_R0_B0_RXDLY5_RX_ARDQS0_R_DLY_B0));
6827 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_RXDLY4),
6828 			P_Fld((U32)iDQMDlyPerbyte[0], SHU_R0_B0_RXDLY4_RX_ARDQM0_R_DLY_B0));
6829 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_RXDLY5),
6830 			P_Fld((U32)iDQSDlyPerbyte[1], SHU_R0_B1_RXDLY5_RX_ARDQS0_R_DLY_B1));
6831 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_RXDLY4),
6832 			P_Fld((U32)iDQMDlyPerbyte[1], SHU_R0_B1_RXDLY4_RX_ARDQM0_R_DLY_B1));
6833 
6834 	// set dq delay
6835 	for (u1BitIdx = 0; u1BitIdx < DQS_BIT_NUMBER; u1BitIdx += 2)
6836 	{
6837 		 vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_RXDLY0 + u1BitIdx * 2),
6838 										P_Fld(((U32)FinalWinPerBit[u1BitIdx].best_dqdly), SHU_R0_B0_RXDLY0_RX_ARDQ0_R_DLY_B0) |
6839 										P_Fld(((U32)FinalWinPerBit[u1BitIdx + 1].best_dqdly), SHU_R0_B0_RXDLY0_RX_ARDQ1_R_DLY_B0));
6840 
6841 		 vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_RXDLY0 + u1BitIdx * 2),
6842 										P_Fld((U32)FinalWinPerBit[u1BitIdx + 8].best_dqdly, SHU_R0_B1_RXDLY0_RX_ARDQ0_R_DLY_B1) |
6843 										P_Fld((U32)FinalWinPerBit[u1BitIdx + 9].best_dqdly, SHU_R0_B1_RXDLY0_RX_ARDQ1_R_DLY_B1));
6844 
6845 		//msg("u1BitId %d  Addr 0x%2x = %2d %2d %2d %2d \n", u1BitIdx, DDRPHY_RXDQ1+u1BitIdx*2,
6846 		//				  FinalWinPerBit[u1BitIdx].best_dqdly, FinalWinPerBit[u1BitIdx+1].best_dqdly,  FinalWinPerBit[u1BitIdx+8].best_dqdly, FinalWinPerBit[u1BitIdx+9].best_dqdly);
6847 	}
6848 
6849 	DramPhyReset(p);
6850 
6851 #if RDDQC_PINMUX_WORKAROUND
6852 	DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
6853 #endif
6854 
6855 	vSetRank(p, backup_rank);
6856 
6857 	vPrintCalibrationBasicInfo(p);
6858 
6859 #ifdef ETT_PRINT_FORMAT
6860 	msg("DQS Delay:\nDQS0 = %d, DQS1 = %d\n"
6861 					"DQM Delay:\nDQM0 = %d, DQM1 = %d\n",
6862 						iDQSDlyPerbyte[0], iDQSDlyPerbyte[1],
6863 						iDQMDlyPerbyte[0], iDQMDlyPerbyte[1]);
6864 	reg_msg("DQS Delay:\nDQS0 = %d, DQS1 = %d\n"
6865 					"DQM Delay:\nDQM0 = %d, DQM1 = %d\n",
6866 						iDQSDlyPerbyte[0], iDQSDlyPerbyte[1],
6867 						iDQMDlyPerbyte[0], iDQMDlyPerbyte[1]);
6868 #else
6869 	msg("DQS Delay:\nDQS0 = %2d, DQS1 = %2d\n"
6870 					"DQM Delay:\nDQM0 = %2d, DQM1 = %2d\n",
6871 						iDQSDlyPerbyte[0], iDQSDlyPerbyte[1],
6872 						iDQMDlyPerbyte[0], iDQMDlyPerbyte[1]);
6873 	reg_msg("DQS Delay:\nDQS0 = %2d, DQS1 = %2d\n"
6874 					"DQM Delay:\nDQM0 = %2d, DQM1 = %2d\n",
6875 						iDQSDlyPerbyte[0], iDQSDlyPerbyte[1],
6876 						iDQMDlyPerbyte[0], iDQMDlyPerbyte[1]);
6877 #endif
6878 	msg("DQ Delay:\n");
6879 	reg_msg("DQ Delay:\n");
6880 
6881 	for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx = u1BitIdx + 4)
6882 	{
6883 #ifdef ETT_PRINT_FORMAT
6884 		msg("DQ%d =%d, DQ%d =%d, DQ%d =%d, DQ%d =%d\n", u1BitIdx, FinalWinPerBit[u1BitIdx].best_dqdly, u1BitIdx+1, FinalWinPerBit[u1BitIdx+1].best_dqdly, u1BitIdx+2, FinalWinPerBit[u1BitIdx+2].best_dqdly, u1BitIdx+3, FinalWinPerBit[u1BitIdx+3].best_dqdly);
6885 		reg_msg("DQ%d =%d, DQ%d =%d, DQ%d =%d, DQ%d =%d\n", u1BitIdx, FinalWinPerBit[u1BitIdx].best_dqdly, u1BitIdx+1, FinalWinPerBit[u1BitIdx+1].best_dqdly, u1BitIdx+2, FinalWinPerBit[u1BitIdx+2].best_dqdly, u1BitIdx+3, FinalWinPerBit[u1BitIdx+3].best_dqdly);
6886 #else
6887 		msg("DQ%2d =%2d, DQ%2d =%2d, DQ%2d =%2d, DQ%2d =%2d\n", u1BitIdx, FinalWinPerBit[u1BitIdx].best_dqdly, u1BitIdx+1, FinalWinPerBit[u1BitIdx+1].best_dqdly, u1BitIdx+2, FinalWinPerBit[u1BitIdx+2].best_dqdly, u1BitIdx+3, FinalWinPerBit[u1BitIdx+3].best_dqdly);
6888 		reg_msg("DQ%2d =%2d, DQ%2d =%2d, DQ%2d =%2d, DQ%2d =%2d\n", u1BitIdx, FinalWinPerBit[u1BitIdx].best_dqdly, u1BitIdx+1, FinalWinPerBit[u1BitIdx+1].best_dqdly, u1BitIdx+2, FinalWinPerBit[u1BitIdx+2].best_dqdly, u1BitIdx+3, FinalWinPerBit[u1BitIdx+3].best_dqdly);
6889 #endif
6890 	}
6891 	msg("\n\n");
6892 	msg3("[DramcRxWindowPerbitCal] Done\n");
6893 
6894 	#if LP5_DDR4266_RDBI_WORKAROUND
6895 	if((is_lp5_family(p)) && (p->frequency >= 2133))
6896 	{
6897 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ7), 1, SHU_B0_DQ7_R_DMDQMDBI_SHU_B0);
6898 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ7), 1, SHU_B1_DQ7_R_DMDQMDBI_SHU_B1);
6899 	}
6900 	#endif
6901 
6902 return DRAM_OK;
6903 
6904 	// Log example	==> Neec to update
6905 	/*
6906 ------------------------------------------------------
6907 Start calculate dq time and dqs time /
6908 Find max DQS delay per byte / Adjust DQ delay to align DQS...
6909 ------------------------------------------------------
6910 bit# 0 : dq time=11 dqs time= 8
6911 bit# 1 : dq time=11 dqs time= 8
6912 bit# 2 : dq time=11 dqs time= 6
6913 bit# 3 : dq time=10 dqs time= 8
6914 bit# 4 : dq time=11 dqs time= 8
6915 bit# 5 : dq time=10 dqs time= 8
6916 bit# 6 : dq time=11 dqs time= 8
6917 bit# 7 : dq time= 9 dqs time= 6
6918 ----seperate line----
6919 bit# 8 : dq time=12 dqs time= 7
6920 bit# 9 : dq time=10 dqs time= 8
6921 bit#10 : dq time=11 dqs time= 8
6922 bit#11 : dq time=10 dqs time= 8
6923 bit#12 : dq time=11 dqs time= 8
6924 bit#13 : dq time=11 dqs time= 8
6925 bit#14 : dq time=11 dqs time= 8
6926 bit#15 : dq time=12 dqs time= 8
6927 ----seperate line----
6928 bit#16 : dq time=11 dqs time= 7
6929 bit#17 : dq time=10 dqs time= 8
6930 bit#18 : dq time=11 dqs time= 7
6931 bit#19 : dq time=11 dqs time= 6
6932 bit#20 : dq time=10 dqs time= 9
6933 bit#21 : dq time=11 dqs time=10
6934 bit#22 : dq time=11 dqs time=10
6935 bit#23 : dq time= 9 dqs time= 9
6936 ----seperate line----
6937 bit#24 : dq time=12 dqs time= 6
6938 bit#25 : dq time=13 dqs time= 6
6939 bit#26 : dq time=13 dqs time= 7
6940 bit#27 : dq time=11 dqs time= 7
6941 bit#28 : dq time=12 dqs time= 8
6942 bit#29 : dq time=10 dqs time= 8
6943 bit#30 : dq time=13 dqs time= 7
6944 bit#31 : dq time=11 dqs time= 8
6945 ----seperate line----
6946 ==================================================
6947 	dramc_rxdqs_perbit_swcal_v2
6948 	channel=2(2:cha, 3:chb) apply = 1
6949 ==================================================
6950 DQS Delay :
6951  DQS0 = 0 DQS1 = 0 DQS2 = 0 DQS3 = 0
6952 DQ Delay :
6953 DQ 0 =	1 DQ 1 =  1 DQ 2 =	2 DQ 3 =  1
6954 DQ 4 =	1 DQ 5 =  1 DQ 6 =	1 DQ 7 =  1
6955 DQ 8 =	2 DQ 9 =  1 DQ10 =	1 DQ11 =  1
6956 DQ12 =	1 DQ13 =  1 DQ14 =	1 DQ15 =  2
6957 DQ16 =	2 DQ17 =  1 DQ18 =	2 DQ19 =  2
6958 DQ20 =	0 DQ21 =  0 DQ22 =	0 DQ23 =  0
6959 DQ24 =	3 DQ25 =  3 DQ26 =	3 DQ27 =  2
6960 DQ28 =	2 DQ29 =  1 DQ30 =	3 DQ31 =  1
6961 _______________________________________________________________
6962    */
6963 }
6964 
6965 #if SIMULATION_RX_DVS
DramcRxDVSCal(DRAMC_CTX_T * p,U8 u1byte)6966 static U8 DramcRxDVSCal(DRAMC_CTX_T *p, U8 u1byte)
6967 {
6968 	U8 u1rising_lead, u1falling_lead, u1rising_lag, u1falling_lag, u1lead_lag;
6969 
6970 	if (u1byte == 0)
6971 	{
6972 		u1rising_lead = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_FT_STATUS0), MISC_FT_STATUS0_AD_RX_ARDQ_DVS_R_LEAD_B0);
6973 		u1falling_lead = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_FT_STATUS1), MISC_FT_STATUS1_AD_RX_ARDQ_DVS_F_LEAD_B0);
6974 		u1rising_lag = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_FT_STATUS0), MISC_FT_STATUS0_AD_RX_ARDQ_DVS_R_LAG_B0);
6975 		u1falling_lag = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_FT_STATUS1), MISC_FT_STATUS1_AD_RX_ARDQ_DVS_F_LAG_B0);
6976 	}
6977 	else //byte1
6978 	{
6979 		u1rising_lead = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_FT_STATUS0), MISC_FT_STATUS0_AD_RX_ARDQ_DVS_R_LAG_B1);
6980 		u1falling_lead = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_FT_STATUS1), MISC_FT_STATUS1_AD_RX_ARDQ_DVS_F_LEAD_B1);
6981 		u1rising_lag = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_FT_STATUS0), MISC_FT_STATUS0_AD_RX_ARDQ_DVS_R_LAG_B1);
6982 		u1falling_lag = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_FT_STATUS1), MISC_FT_STATUS1_AD_RX_ARDQ_DVS_F_LAG_B1);
6983 	}
6984 
6985 	msg2("Byte%d | LEAD(%d %d) | LAG(%d %d)\n", u1byte, u1rising_lead, u1falling_lead, u1rising_lag, u1falling_lag);
6986 
6987 	u1lead_lag = (u1rising_lead | u1falling_lead | u1rising_lag | u1falling_lag);
6988 
6989 	return u1lead_lag;
6990 }
6991 
DramcRxDVSWindowCal(DRAMC_CTX_T * p)6992 DRAM_STATUS_T DramcRxDVSWindowCal(DRAMC_CTX_T *p)
6993 {
6994 	U8 ii, u1ByteIdx;
6995 	S16 iDelay = 0, S16DelayBegin = 0;
6996 	U16 u16DelayEnd = 0, u16DelayStep = 1;
6997 	U32 u4err_value;
6998 
6999 	U8 u1lead_lag, u1DVS_first_flag[DQS_NUMBER_LP4]={0}, u1DVS_first_pass[DQS_NUMBER_LP4]={0}, u1DVS_pass_window[DQS_NUMBER_LP4]={0}, u1finish_flag[DQS_NUMBER_LP4]={0};
7000 	U32 u4RegBackupAddress[] =
7001 	{
7002 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11)),
7003 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11)),
7004 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_RXDLY0)),
7005 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_RXDLY0)),
7006 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_RXDLY5)),
7007 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_RXDLY5)),
7008 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_RXDLY4)),
7009 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_RXDLY4)),
7010 	};
7011 
7012 	// error handling
7013 	if (!p)
7014 	{
7015 		err("context NULL\n");
7016 		return DRAM_FAIL;
7017 	}
7018 
7019 	msg("\\\RX DVS calibration\\\\n");
7020 
7021 	//When doing RxWindowPerbitCal, should make sure that auto refresh is disable
7022 	vAutoRefreshSwitch(p, DISABLE);
7023 	//CKEFixOnOff(p, p->rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL);
7024 
7025 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11), 1, SHU_B0_DQ11_RG_RX_ARDQ_DVS_EN_B0);
7026 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11), 1, SHU_B1_DQ11_RG_RX_ARDQ_DVS_EN_B1);
7027 
7028 	//defult set result fail. When window found, update the result as oK
7029 #if ENABLE_K_WITH_WORST_SI_UI_SHIFT
7030 	DramcEngine2Init(p, p->test2_1, p->test2_2, p->test_pattern | 0x80, 0, TE_UI_SHIFT);//PIC Need to check if need to use UI_SHIFT;//UI_SHIFT + LEN1
7031 #else
7032 	DramcEngine2Init(p, p->test2_1, p->test2_2, TEST_XTALK_PATTERN, 0, TE_NO_UI_SHIFT);
7033 #endif
7034 
7035 
7036 #if (__LP5_COMBO__ == TRUE)
7037 	if (is_lp5_family(p))
7038 	{
7039 		// 1 step = 1/4 delay cell
7040 		// Adjust step = 1/2/4(precision adjustment) by data-rate
7041 		if (p->frequency <= GetFreqBySel(p,LP5_DDR3200))
7042 			u16DelayStep = 4;
7043 		else if (p->frequency <= GetFreqBySel(p,LP5_DDR4800)) // 3733, 4266, 4800
7044 			u16DelayStep = 2;
7045 		else // 5500, 6000, 6400
7046 			u16DelayStep = 1;
7047 	}
7048 #endif
7049 	else
7050 	{
7051 		u16DelayStep = 4;
7052 	}
7053 	// Just for DV SIM test
7054 	S16DelayBegin = -80;
7055 	u16DelayEnd = 100;
7056 
7057 	msg("\nRX Delay %d -> %d, step: %d\n", S16DelayBegin, u16DelayEnd, u16DelayStep);
7058 
7059 	{
7060 		// Adjust DQM output delay to 0
7061 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_RXDLY4),
7062 				P_Fld(0, SHU_R0_B0_RXDLY4_RX_ARDQM0_R_DLY_B0) |
7063 				P_Fld(0, SHU_R0_B0_RXDLY4_RX_ARDQM0_F_DLY_B0));
7064 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_RXDLY4),
7065 				P_Fld(0, SHU_R0_B1_RXDLY4_RX_ARDQM0_R_DLY_B1) |
7066 				P_Fld(0, SHU_R0_B1_RXDLY4_RX_ARDQM0_F_DLY_B1));
7067 
7068 		// Adjust DQ output delay to 0
7069 		//every 2bit dq have the same delay register address
7070 		for (ii = 0; ii < 4; ii++)
7071 			SetRxDqDelay(p, ii, 0);
7072 		{
7073 			// non-autok flow
7074 			for (iDelay = S16DelayBegin; iDelay <= u16DelayEnd; iDelay += u16DelayStep)
7075 			{
7076 				SetRxDqDqsDelay(p, iDelay);
7077 
7078 				u4err_value = DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, p->test_pattern);
7079 
7080 				msg2("iDelay= %4d, err_value: 0x%x", iDelay, u4err_value);
7081 
7082 				for(u1ByteIdx=0; u1ByteIdx<(p->data_width/DQS_BIT_NUMBER); u1ByteIdx++)
7083 				{
7084 					u1lead_lag = DramcRxDVSCal(p, u1ByteIdx);
7085 
7086 					if ((u1lead_lag == 0) && (u1DVS_first_flag[u1ByteIdx] == 0) && (((u4err_value >> (u1ByteIdx<<3)) & 0xff) == 0))
7087 					{
7088 						u1DVS_first_pass[u1ByteIdx] = iDelay;
7089 						u1DVS_first_flag[u1ByteIdx] = 1;
7090 						msg("Byte%d find first pass delay\n")
7091 					}
7092 					else if (((u1lead_lag == 1) || (((u4err_value >> (u1ByteIdx<<3)) & 0xff) != 0)) && (u1DVS_first_flag[u1ByteIdx] == 1) && (u1finish_flag[u1ByteIdx] == 0))
7093 					{
7094 						u1DVS_pass_window[u1ByteIdx] = iDelay - u1DVS_first_pass[u1ByteIdx] - u16DelayStep;
7095 
7096 						if (u1DVS_pass_window[u1ByteIdx] < 7) //if window size bigger than 7, consider as real pass window.
7097 						{
7098 							u1DVS_pass_window[u1ByteIdx] = 0;
7099 							u1DVS_first_flag[u1ByteIdx] = 0;
7100 							msg("Byte%d find fake window\n")
7101 						}
7102 						else
7103 						{
7104 							 u1finish_flag[u1ByteIdx] = 1;
7105 							 msg("Byte%d find pass window\n")
7106 						}
7107 					}
7108 				}
7109 
7110 				if ((u1finish_flag[0]==1) && (u1finish_flag[1]==1))
7111 				{
7112 					msg("Two byte DVS window find, early break!\n");
7113 					break;
7114 				}
7115 			}
7116 		}
7117 	}
7118 
7119 	DramcEngine2End(p);
7120 
7121 	for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
7122 	{
7123 		u1DVS_increase[p->rank][u1ByteIdx] = (u1DVS_pass_window[u1ByteIdx] > 8)? ((u1DVS_pass_window[u1ByteIdx] - 8) >> 3): 0;
7124 		msg("\nByte %d final DVS window size(M) %d, DVS increase %d\n", u1ByteIdx, u1DVS_pass_window[u1ByteIdx], u1DVS_increase[p->rank][u1ByteIdx]);
7125 	}
7126 
7127 	DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
7128 
7129 	vAutoRefreshSwitch(p, ENABLE);
7130 
7131 	DramPhyReset(p);
7132 
7133 	vPrintCalibrationBasicInfo(p);
7134 
7135 	msg("\n\n");
7136 	msg3("[DramcRxDVSWindowCal] Done\n");
7137 
7138 return DRAM_OK;
7139 }
7140 
DramcDramcRxDVSCalPostProcess(DRAMC_CTX_T * p)7141 void DramcDramcRxDVSCalPostProcess(DRAMC_CTX_T *p)
7142 {
7143 	U8 rank_i, u1ByteIdx, u1DVS_increase_final, u1DVS_dly_final[DQS_NUMBER_LP4]={0};
7144 	U8 backup_rank = p->rank;
7145 
7146 	for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
7147 	{
7148 		if (p->support_rank_num == RANK_DUAL)
7149 			u1DVS_increase_final = (u1DVS_increase[RANK_0][u1ByteIdx] < u1DVS_increase[RANK_1][u1ByteIdx])? u1DVS_increase[RANK_0][u1ByteIdx] : u1DVS_increase[RANK_1][u1ByteIdx];
7150 		else
7151 			u1DVS_increase_final = u1DVS_increase[p->rank][u1ByteIdx];
7152 
7153 		if (u1ByteIdx == 0)
7154 		{
7155 			u1DVS_dly_final[u1ByteIdx] = u1DVS_increase_final + (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11), SHU_B0_DQ11_RG_RX_ARDQ_DVS_DLY_B0));
7156 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11), u1DVS_dly_final[u1ByteIdx], SHU_B0_DQ11_RG_RX_ARDQ_DVS_DLY_B0);
7157 		}
7158 		else //byte1
7159 		{
7160 			u1DVS_dly_final[u1ByteIdx] = u1DVS_increase_final + (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11), SHU_B1_DQ11_RG_RX_ARDQ_DVS_DLY_B1));
7161 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11), u1DVS_dly_final[u1ByteIdx], SHU_B1_DQ11_RG_RX_ARDQ_DVS_DLY_B1);
7162 		}
7163 		msg("Byte%d final DVS delay: %d\n", u1ByteIdx, u1DVS_dly_final[u1ByteIdx]);
7164 	}
7165 
7166 	for(rank_i=RANK_0; rank_i< p->support_rank_num; rank_i++)
7167 	{
7168 		vSetRank(p, rank_i);
7169 		DramcRxWindowPerbitCal(p, PATTERN_TEST_ENGINE, DVS_CAL_KEEP_VREF, AUTOK_OFF);
7170 	}
7171 
7172 	if ((DramcRxDVSCal(p, 0) == 1) || (DramcRxDVSCal(p, 1) == 1)) //Prevent set wrong DV dly
7173 	{
7174 		err("Final DVS delay is out of RX window\n");
7175 		for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
7176 		{
7177 			if (u1DVS_dly_final[u1ByteIdx] > 0)
7178 			{
7179 				u1DVS_dly_final[u1ByteIdx] -= 1;
7180 				if (u1ByteIdx == 0)
7181 				{
7182 					vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11), u1DVS_dly_final[u1ByteIdx], SHU_B0_DQ11_RG_RX_ARDQ_DVS_DLY_B0);
7183 				}
7184 				else //byte1
7185 				{
7186 					vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11), u1DVS_dly_final[u1ByteIdx], SHU_B1_DQ11_RG_RX_ARDQ_DVS_DLY_B1);
7187 				}
7188 			}
7189 			for(rank_i=RANK_0; rank_i< p->support_rank_num; rank_i++)
7190 			{
7191 				vSetRank(p, rank_i);
7192 				DramcRxWindowPerbitCal(p, PATTERN_TEST_ENGINE, DVS_CAL_KEEP_VREF, AUTOK_OFF);
7193 			}
7194 		}
7195 	}
7196 
7197 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11), 1, SHU_B0_DQ11_RG_RX_ARDQ_DVS_EN_B0);
7198 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11), 1, SHU_B1_DQ11_RG_RX_ARDQ_DVS_EN_B1);
7199 
7200 	vSetRank(p, backup_rank);
7201 }
7202 #endif
7203 
7204 #if SIMULATION_DATLAT
dle_factor_handler(DRAMC_CTX_T * p,U8 curr_val)7205 static void dle_factor_handler(DRAMC_CTX_T *p, U8 curr_val)
7206 {
7207 	U8 u1DATLAT_DSEL = 0;
7208 	U8 u1DLECG_OptionEXT1 = 0;
7209 	U8 u1DLECG_OptionEXT2 = 0;
7210 	U8 u1DLECG_OptionEXT3 = 0;
7211 
7212 	// If (RX_PIPE_BYPASS_ENABLE == 1) bypass RX PIPE, so RG_DATLAT_DSEL = RG_DATLAT
7213 	// else RG_DATLAT_DSEL = RG_DATLAT - 1
7214 	if (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_MISC_RX_PIPE_CTRL), SHU_MISC_RX_PIPE_CTRL_RX_PIPE_BYPASS_EN))
7215 	{
7216 		u1DATLAT_DSEL = curr_val;
7217 	}
7218 	else
7219 	{
7220 		if (curr_val < 1)
7221 			u1DATLAT_DSEL = curr_val;
7222 		else
7223 			u1DATLAT_DSEL = curr_val - 1;
7224 	}
7225 
7226 //	  msg("DATLAT: %d, u1DATLAT_DSEL: %d\n", curr_val, u1DATLAT_DSEL);
7227 
7228 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RDAT),
7229 			P_Fld(curr_val, MISC_SHU_RDAT_DATLAT) |
7230 			P_Fld(u1DATLAT_DSEL, MISC_SHU_RDAT_DATLAT_DSEL) |
7231 			P_Fld(u1DATLAT_DSEL, MISC_SHU_RDAT_DATLAT_DSEL_PHY));
7232 
7233 	// Had been adjusted for 868 already.
7234 	//(>=8 & <14) set EXT1 =1, EXT2=0, EXT3=0
7235 	//(>= 14 & <19) set EXT1=1, EXT2=1, EXT3=0
7236 	//(>=19) set EXT1=1, EXT2=1, EXT3=1
7237 	u1DLECG_OptionEXT1 = (curr_val >= 8)? (1): (0);
7238 	u1DLECG_OptionEXT2 = (curr_val >= 14)? (1): (0);
7239 	u1DLECG_OptionEXT3 = (curr_val >= 19)? (1): (0);
7240 //	  msg("u1DLECG_OptionEXT1: %d, 2 for %d, 3 for %d\n", u1DLECG_OptionEXT1, u1DLECG_OptionEXT2, u1DLECG_OptionEXT3);
7241 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_RX_CG_SET0),
7242 			P_Fld(u1DLECG_OptionEXT1, SHU_RX_CG_SET0_READ_START_EXTEND1) |
7243 			P_Fld(u1DLECG_OptionEXT1, SHU_RX_CG_SET0_DLE_LAST_EXTEND1) |
7244 			P_Fld((u1DLECG_OptionEXT2), SHU_RX_CG_SET0_READ_START_EXTEND2) |
7245 			P_Fld((u1DLECG_OptionEXT2), SHU_RX_CG_SET0_DLE_LAST_EXTEND2) |
7246 			P_Fld((u1DLECG_OptionEXT3), SHU_RX_CG_SET0_READ_START_EXTEND3) |
7247 			P_Fld((u1DLECG_OptionEXT3), SHU_RX_CG_SET0_DLE_LAST_EXTEND3));
7248 
7249 	DramPhyReset(p);
7250 
7251 }
7252 
7253 static U8 aru1RxDatlatResult[RANK_MAX];
DramcRxdatlatCal(DRAMC_CTX_T * p)7254 DRAM_STATUS_T DramcRxdatlatCal(DRAMC_CTX_T *p)
7255 {
7256 	U8  ucbest_step;
7257 
7258 	// error handling
7259 	if (!p)
7260 	{
7261 		err("context NULL\n");
7262 		return DRAM_FAIL;
7263 	}
7264 
7265 	msg("\n[DATLAT]\n"
7266 					"Freq=%d, CH%d RK%d\n\n", p->frequency, p->channel, p->rank);
7267 
7268 	// pre-save
7269 	// 0x07c[6:4]	DATLAT bit2-bit0
7270 	u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RDAT));
7271 
7272 
7273 	//default set FAIL
7274 	vSetCalibrationResult(p, DRAM_CALIBRATION_DATLAT, DRAM_FAIL);
7275 
7276 	// init best_step to default
7277 	ucbest_step = (U8) u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_RDAT), MISC_SHU_RDAT_DATLAT);
7278 	msg("DATLAT Default: 0x%x\n", ucbest_step);
7279 	reg_msg("DATLAT Default: 0x%x\n", ucbest_step);
7280 
7281 	// 1.set DATLAT 0-15 (0-21 for MT6595)
7282 	// 2.enable engine1 or engine2
7283 	// 3.check result  ,3~4 taps pass
7284 	// 4.set DATLAT 2nd value for optimal
7285 
7286 	// Initialize
7287 
7288 	DramcEngine2Init(p, p->test2_1, p->test2_2, p->test_pattern | 0x80, 0, TE_UI_SHIFT);//UI_SHIFT + LEN1
7289 
7290 #if (SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_DATLAT)
7291 	if (p->femmc_Ready == 1)
7292 	{
7293 			ucbest_step = p->pSavetimeData->u1RxDatlat_Save[p->channel][p->rank];
7294 	}
7295 #endif
7296 
7297 	aru1RxDatlatResult[p->rank] = ucbest_step;
7298 
7299 	msg("best_step = %d\n\n", ucbest_step);
7300 	reg_msg("best_step=%d\n\n", ucbest_step);
7301 
7302 #if __A60868_TO_BE_PORTING__
7303 #if __ETT__
7304 	U8 _init_Datlat_value = vDramcACTimingGetDatLat(p);
7305 	if ((_init_Datlat_value > (ucbest_step + 1)) || (_init_Datlat_value < (ucbest_step - 1)))
7306 	{
7307 		msg("[WARNING!!] Datlat initial value(%d) = best_step(%d) %c %d, out of range!\n\n",
7308 						   _init_Datlat_value,
7309 						   ucbest_step,
7310 						   (ucbest_step > _init_Datlat_value)? '-': '+',
7311 						   abs(ucbest_step - _init_Datlat_value));
7312 		while (1);
7313 	}
7314 #endif
7315 #endif
7316 
7317 #if defined(FOR_HQA_TEST_USED) && defined(FOR_HQA_REPORT_USED)
7318 	HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT2, "DATLAT", "", 0, ucbest_step, NULL);
7319 #endif
7320 
7321 #if (SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_DATLAT)
7322 	if (p->femmc_Ready == 1)
7323 	{
7324 		dle_factor_handler(p, ucbest_step);
7325 		vSetCalibrationResult(p, DRAM_CALIBRATION_DATLAT, DRAM_FAST_K);
7326 	}
7327 #endif
7328 
7329 	msg3("[DramcRxdatlatCal] Done\n");
7330 	return DRAM_OK;
7331 }
7332 
DramcDualRankRxdatlatCal(DRAMC_CTX_T * p)7333 DRAM_STATUS_T DramcDualRankRxdatlatCal(DRAMC_CTX_T *p)
7334 {
7335 	U8 u1FinalDatlat, u1Datlat0, u1Datlat1;
7336 
7337 	u1Datlat0 = aru1RxDatlatResult[0];
7338 	u1Datlat1 = aru1RxDatlatResult[1];
7339 
7340 	if (p->support_rank_num == RANK_DUAL)
7341 	{
7342 		if (u1Datlat0 > u1Datlat1)
7343 		{
7344 			u1FinalDatlat = u1Datlat0;
7345 		}
7346 		else
7347 		{
7348 			u1FinalDatlat = u1Datlat1;
7349 		}
7350 	}
7351 	else
7352 	{
7353 		u1FinalDatlat = u1Datlat0;
7354 	}
7355 
7356 #if ENABLE_READ_DBI
7357 	if (p->DBI_R_onoff[p->dram_fsp])
7358 	{
7359 	  u1FinalDatlat++;
7360 	}
7361 #endif
7362 
7363 	dle_factor_handler(p, u1FinalDatlat);
7364 
7365 #if RDSEL_TRACKING_EN
7366 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_MISC_RDSEL_TRACK), u1FinalDatlat, SHU_MISC_RDSEL_TRACK_DMDATLAT_I);
7367 #endif
7368 
7369 	msg("[DualRankRxdatlatCal] RK0: %d, RK1: %d, Final_Datlat %d\n", u1Datlat0, u1Datlat1, u1FinalDatlat);
7370 
7371 	return DRAM_OK;
7372 
7373 }
7374 #endif // SIMULATION_DATLAT
7375 
7376 #if SIMULATION_TX_PERBIT
7377 
7378 //=============================================================
7379 ///// DramC TX perbi calibration ----------Begin--------------
7380 //=============================================================
7381 //-------------------------------------------------------------------------
7382 /** DramcTxWindowPerbitCal (v2)
7383  *	TX DQS per bit SW calibration.
7384  *	@param p				Pointer of context created by DramcCtxCreate.
7385  *	@param	apply			(U8): 0 don't apply the register we set  1 apply the register we set ,default don't apply.
7386  *	@retval status			(DRAM_STATUS_T): DRAM_OK or DRAM_FAIL
7387  */
7388 //-------------------------------------------------------------------------
7389 #if (SW_CHANGE_FOR_SIMULATION || FOR_DV_SIMULATION_USED)
7390 #define TX_VREF_RANGE_BEGIN 	  0
7391 #define TX_VREF_RANGE_END			2 // binary 110010
7392 #define TX_VREF_RANGE_STEP		   2
7393 #else
7394 #define TX_VREF_RANGE_BEGIN 	  16
7395 #define TX_VREF_RANGE_END			50 // binary 110010
7396 #define TX_VREF_RANGE_STEP		   2
7397 #endif
7398 
7399 #define TX_DQ_UI_TO_PI_TAP		   64 // 1 PI = tCK/64, total 128 PI, 1UI = 32 PI
7400 #define TX_PHASE_DQ_UI_TO_PI_TAP		 32 // 1 PI = tCK/64, total 128 PI, 1UI = 32 PI for DDR800 semi open loop mode
7401 #define LP4_TX_VREF_DATA_NUM 50
7402 #define LP4_TX_VREF_PASS_CONDITION 0
7403 #define TX_PASS_WIN_CRITERIA	7
7404 #define LP4_TX_VREF_BOUNDARY_NOT_READY 0xff
7405 
7406 typedef struct _PASS_WIN_DATA_BY_VREF_T
7407 {
7408 	U16 u2VrefUsed;
7409 	U16 u2WinSum_byVref;
7410 	U8 u1WorseBitWinSize_byVref;
7411 	U8 u1WorseBitIdx_byVref;
7412 } PASS_WIN_DATA_BY_VREF_T;
7413 
TxWinTransferDelayToUIPI(DRAMC_CTX_T * p,U16 uiDelay,U8 u1AdjustPIToCenter,U8 * pu1UILarge_DQ,U8 * pu1UISmall_DQ,U8 * pu1PI,U8 * pu1UILarge_DQOE,U8 * pu1UISmall_DQOE)7414 static void TxWinTransferDelayToUIPI(DRAMC_CTX_T *p, U16 uiDelay, U8 u1AdjustPIToCenter, U8* pu1UILarge_DQ, U8* pu1UISmall_DQ, U8* pu1PI, U8* pu1UILarge_DQOE, U8* pu1UISmall_DQOE)
7415 {
7416 	U8 u1Small_ui_to_large, u1PI = 0, u164PIto1UI, u1TxDQOEShift = 0;
7417 	U16 u2TmpValue, u2DQOE_shift;
7418 	DDR800_MODE_T eDdr800Mode = vGet_DDR_Loop_Mode(p);
7419 	U8 u1PiTap = (u1IsPhaseMode(p) == TRUE) ? TX_PHASE_DQ_UI_TO_PI_TAP : TX_DQ_UI_TO_PI_TAP;
7420 
7421 	u1Small_ui_to_large = u1MCK2UI_DivShift(p);
7422 
7423 	#if ENABLE_WDQS_MODE_2
7424 	u1TxDQOEShift = WDQSMode2AcTxOEShift(p);
7425 	#else
7426 	u1TxDQOEShift = TX_DQ_OE_SHIFT_LP4;
7427 	#endif
7428 
7429 	if(pu1PI != NULL)
7430 	{
7431 		u1PI = uiDelay & (u1PiTap-1);
7432 		*pu1PI =u1PI;
7433 	}
7434 
7435 	if (u1IsLP4Div4DDR800(p) /*DDR800 close loop mode*/ || u1IsPhaseMode(p))
7436 		u164PIto1UI = 0;
7437 	else
7438 		u164PIto1UI = 1;
7439 
7440 	u2TmpValue = (uiDelay /u1PiTap)<<u164PIto1UI; // 1:8 mode for 2UI carry, DDR800 1:4 mode for 1UI carry
7441 
7442 	if (u1AdjustPIToCenter && (pu1PI != NULL) && (eDdr800Mode == CLOSE_LOOP_MODE))
7443 	{
7444 		if (u1PI < 10)
7445 		{
7446 			u1PI += (u1PiTap) >> 1;
7447 			u2TmpValue --;
7448 		}
7449 		else if (u1PI > u1PiTap - 10)
7450 		{
7451 			u1PI -= (u1PiTap) >> 1;
7452 			u2TmpValue ++;
7453 		}
7454 
7455 		*pu1PI = u1PI;
7456 	}
7457 
7458 	#if 0
7459 	*pu1UISmall_DQ = u2TmpValue % u1Small_ui_to_large;
7460 	*pu1UILarge_DQ = u2TmpValue / u1Small_ui_to_large;
7461 	#else
7462 	*pu1UISmall_DQ = u2TmpValue - ((u2TmpValue >> u1Small_ui_to_large) << u1Small_ui_to_large);
7463 	*pu1UILarge_DQ = (u2TmpValue >> u1Small_ui_to_large);
7464 	#endif
7465 	// calculate DQ OE according to DQ UI
7466 	#if (__LP5_COMBO__ == TRUE)
7467 	if (TRUE == is_lp5_family(p))
7468 	{
7469 		u2TmpValue -= TX_DQ_OE_SHIFT_LP5;
7470 	}
7471 	else
7472 	#endif
7473 	{
7474 		u2TmpValue -= u1TxDQOEShift;
7475 	}
7476 
7477 	if(((u1MR03Value[p->dram_fsp]&0x80)>>7)==1) //if WDBI is on, OE_DLY don't need to shift 1 MCK with DLY
7478 	{
7479 		if (vGet_Div_Mode(p) == DIV4_MODE)
7480 			u2DQOE_shift = 4; //OE_shift = OE_shift - 3(original OE position) + 4 (MCK)
7481 		else
7482 			u2DQOE_shift = 8; //OE_shift = OE_shift - 3(original OE position) + 8 (MCK)
7483 
7484 		u2TmpValue += u2DQOE_shift;
7485 	}
7486 
7487 	*pu1UISmall_DQOE = u2TmpValue - ((u2TmpValue >> u1Small_ui_to_large) << u1Small_ui_to_large);
7488 	*pu1UILarge_DQOE = (u2TmpValue >> u1Small_ui_to_large);
7489 }
7490 
TXPerbitCalibrationInit(DRAMC_CTX_T * p,U8 calType)7491 static void TXPerbitCalibrationInit(DRAMC_CTX_T *p, U8 calType)
7492 {
7493 	//Set TX delay chain to 0
7494 	if (calType != TX_DQ_DQS_MOVE_DQM_ONLY)
7495 	{
7496 	#if 1
7497 	#if PINMUX_AUTO_TEST_PER_BIT_TX
7498 		if(gTX_check_per_bit_flag == 1)
7499 		{
7500 			//not reset delay cell
7501 		}
7502 		else
7503 	#endif
7504 		{
7505 			vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY0), 0);
7506 			vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY1), 0);
7507 			vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY0), 0);
7508 			vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY1), 0);
7509 		}
7510 	#else
7511 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_SHU_R0_B0_DQ0), P_Fld(0x0, SHU_R0_B0_DQ0_RK0_TX_ARDQ7_DLY_B0)
7512 			| P_Fld(0x0, SHU_R0_B0_DQ0_RK0_TX_ARDQ6_DLY_B0)
7513 			| P_Fld(0x0, SHU_R0_B0_DQ0_RK0_TX_ARDQ5_DLY_B0)
7514 			| P_Fld(0x0, SHU_R0_B0_DQ0_RK0_TX_ARDQ4_DLY_B0)
7515 			| P_Fld(0x0, SHU_R0_B0_DQ0_RK0_TX_ARDQ3_DLY_B0)
7516 			| P_Fld(0x0, SHU_R0_B0_DQ0_RK0_TX_ARDQ2_DLY_B0)
7517 			| P_Fld(0x0, SHU_R0_B0_DQ0_RK0_TX_ARDQ1_DLY_B0)
7518 			| P_Fld(0x0, SHU_R0_B0_DQ0_RK0_TX_ARDQ0_DLY_B0));
7519 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_SHU_R0_B1_DQ0), P_Fld(0x0, SHU_R0_B1_DQ0_RK0_TX_ARDQ7_DLY_B1)
7520 			| P_Fld(0x0, SHU_R0_B1_DQ0_RK0_TX_ARDQ6_DLY_B1)
7521 			| P_Fld(0x0, SHU_R0_B1_DQ0_RK0_TX_ARDQ5_DLY_B1)
7522 			| P_Fld(0x0, SHU_R0_B1_DQ0_RK0_TX_ARDQ4_DLY_B1)
7523 			| P_Fld(0x0, SHU_R0_B1_DQ0_RK0_TX_ARDQ3_DLY_B1)
7524 			| P_Fld(0x0, SHU_R0_B1_DQ0_RK0_TX_ARDQ2_DLY_B1)
7525 			| P_Fld(0x0, SHU_R0_B1_DQ0_RK0_TX_ARDQ1_DLY_B1)
7526 			| P_Fld(0x0, SHU_R0_B1_DQ0_RK0_TX_ARDQ0_DLY_B1));
7527 	#endif
7528 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY3), 0x0, SHU_R0_B0_TXDLY3_TX_ARDQM0_DLY_B0);
7529 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY3), 0x0, SHU_R0_B1_TXDLY3_TX_ARDQM0_DLY_B1);
7530 	}
7531 
7532 
7533 	//Use HW TX tracking value
7534 	//R_DMARPIDQ_SW :drphy_conf (0x170[7])(default set 1)
7535 	//	 0: DQS2DQ PI setting controlled by HW
7536 	//R_DMARUIDQ_SW : Dramc_conf(0x156[15])(default set 1)
7537 	//	  0: DQS2DQ UI setting controlled by HW
7538 	///TODO: need backup original setting?
7539 	//vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_MISC_CTRL1), 1, MISC_CTRL1_R_DMARPIDQ_SW);
7540 	//vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DQSOSCR), 1, DQSOSCR_ARUIDQ_SW);
7541 
7542 }
7543 
7544 #define TX_TDQS2DQ_PRE_CAL 0
7545 #if TX_TDQS2DQ_PRE_CAL
7546 //	(1) DDR800 1:4 mode
7547 //	(2) DDR1200/1600 1:4 mode
7548 //	(3) 1:8 mode
7549 // The 3 condition have different MCK2UI/UI2PI. Therefore, TX DQS2DQ should be record separately.
7550 // Here, we record (2) and (3).  DDR800 1:4 skip recording DQS2DQ.
7551 U16 u2DQS2DQ_Pre_Cal[CHANNEL_NUM][RANK_MAX][2/*DIV_Mode*/] = {0};
7552 #endif
7553 
TXScanRange_PI(DRAMC_CTX_T * p,DRAM_TX_PER_BIT_CALIBRATION_TYTE_T calType,U16 * pu2Begin,U16 * pu2End)7554 static void TXScanRange_PI(DRAMC_CTX_T *p, DRAM_TX_PER_BIT_CALIBRATION_TYTE_T calType, U16 *pu2Begin, U16 *pu2End)
7555 {
7556 	U8 u1MCK2UI, u1UI2PI, u1ByteIdx;
7557 	U32 u4RegValue_TXDLY, u4RegValue_dly;
7558 	U8 ucdq_ui_large_bak[DQS_NUMBER], ucdq_ui_small_bak[DQS_NUMBER];
7559 	U16 u2TempVirtualDelay, u2SmallestVirtualDelay = 0xffff;
7560 	U16 u2DQDelayBegin = 0, u2DQDelayEnd = 0;
7561 
7562 #if (__LP5_COMBO__ == TRUE)
7563 	if (TRUE == is_lp5_family(p))
7564 	{
7565 		u4RegValue_TXDLY = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_WCK_WR_MCK));
7566 		u4RegValue_dly = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_WCK_WR_UI));
7567 	}
7568 	else
7569 #endif
7570 	{
7571 		u4RegValue_TXDLY = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS0));
7572 		u4RegValue_dly = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS1));
7573 	}
7574 
7575 	u1MCK2UI = u1MCK2UI_DivShift(p);
7576 
7577 	if (vGet_DDR_Loop_Mode(p) == DDR800_CLOSE_LOOP)
7578 		u1UI2PI = 6;
7579 	else
7580 		u1UI2PI = 5;
7581 
7582 
7583 	// find smallest DQS delay
7584 	for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
7585 	{
7586 		ucdq_ui_large_bak[u1ByteIdx] = (u4RegValue_TXDLY >> (u1ByteIdx << 2)) & 0x7;// MCK
7587 		ucdq_ui_small_bak[u1ByteIdx] = (u4RegValue_dly >> (u1ByteIdx << 2)) & 0x7;// UI
7588 		//wrlevel_dqs_final_delay[p->rank][u1ByteIdx]  ==> PI
7589 
7590 		//LP4 : Virtual Delay = 256 * MCK + 32*UI + PI;
7591 		//LP3 : Virtual Delay = 128 * MCK + 32*UI + PI;
7592 		u2TempVirtualDelay = (((ucdq_ui_large_bak[u1ByteIdx] << u1MCK2UI) + ucdq_ui_small_bak[u1ByteIdx]) << u1UI2PI) + wrlevel_dqs_final_delay[p->rank][u1ByteIdx];
7593 
7594 		if (u2TempVirtualDelay < u2SmallestVirtualDelay)
7595 		{
7596 			u2SmallestVirtualDelay = u2TempVirtualDelay;
7597 		}
7598 	}
7599 
7600 	u2DQDelayBegin = u2SmallestVirtualDelay;
7601 
7602 	#if TX_TDQS2DQ_PRE_CAL
7603 	if (u1IsLP4Div4DDR800(p) == FALSE)
7604 	{
7605 		if (u2DQS2DQ_Pre_Cal[p->channel][p->rank][vGet_Div_Mode(p)] > 0)
7606 		{
7607 			U16 u2TmpShift;
7608 			msg("TX_TDQS2DQ_PRE_CAL : change DQ begin %d -->", u2DQDelayBegin);
7609 
7610 			u2TmpShift = (u2DQS2DQ_Pre_Cal[p->channel][p->rank][vGet_Div_Mode(p)]* p->frequency) / 1000;
7611 			if (u2TmpShift >= 15)
7612 				u2TmpShift -= 15;
7613 			else
7614 				u2TmpShift = 0;
7615 
7616 			u2DQDelayBegin += u2TmpShift;
7617 			msg("%d (+%d)\n", u2DQDelayBegin, u2TmpShift);
7618 		}
7619 	}
7620 	#endif
7621 
7622 	#if (__LP5_COMBO__)
7623 	if (is_lp5_family(p)) {
7624 		/* For DDR3200, +1.5 MCK */
7625 		if (p->frequency == 1600)
7626 			u2DQDelayBegin += (((1 << u1MCK2UI) + ((1 << u1MCK2UI) >> 1)) << u1UI2PI);
7627 		else if (p->frequency == 2133)
7628 			u2DQDelayBegin += ((1 << u1MCK2UI) << u1UI2PI);
7629 		else if (p->frequency == 2750)
7630 			u2DQDelayBegin += (9 << u1UI2PI);
7631 	}
7632 	#endif
7633 
7634 	#if TX_K_DQM_WITH_WDBI
7635 	if (calType == TX_DQ_DQS_MOVE_DQM_ONLY)
7636 	{
7637 		// DBI on, calibration range -1MCK
7638 		u2DQDelayBegin -= (1 << (u1MCK2UI + 5));
7639 	}
7640 	#endif
7641 	/* Scan range: 1MCK */
7642 	u2DQDelayEnd = u2DQDelayBegin + ((1 << u1MCK2UI) << u1UI2PI);
7643 
7644 	*pu2Begin = u2DQDelayBegin;
7645 	*pu2End = u2DQDelayEnd;
7646 
7647 	#if 0//TX_TDQS2DQ_PRE_CAL
7648 	msg("TXScanRange_PI %d~%d\n", u2DQDelayBegin, u2DQDelayEnd);
7649 	#endif
7650 }
7651 
7652 
TXScanRange_Vref(DRAMC_CTX_T * p,U8 u1VrefScanEnable,U16 * pu2Range,U16 * pu2Begin,U16 * pu2End,U16 * pu2Setp)7653 static void TXScanRange_Vref(DRAMC_CTX_T *p, U8 u1VrefScanEnable, U16* pu2Range, U16 *pu2Begin, U16 *pu2End, U16 *pu2Setp)
7654 {
7655 	U16 u2VrefBegin = 0, u2VrefEnd = 0;
7656 
7657 	if (u1VrefScanEnable)
7658 	{
7659 	#if (SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_VREF_CAL)
7660 		if (p->femmc_Ready == 1)
7661 		{
7662 			// if fast K, use TX Vref that saved.
7663 			u2VrefBegin = p->pSavetimeData->u1TxWindowPerbitVref_Save[p->channel][p->rank];
7664 			u2VrefEnd = u2VrefBegin + 1;
7665 		}
7666 	#endif
7667 	}
7668 	else //LPDDR3, the for loop will only excute u2VrefLevel=TX_VREF_RANGE_END/2.
7669 	{
7670 		u2VrefBegin = 0;
7671 		u2VrefEnd = 0;
7672 	}
7673 
7674 	*pu2Range = (!p->odt_onoff);
7675 	*pu2Begin = u2VrefBegin;
7676 	*pu2End = u2VrefEnd;
7677 	*pu2Setp = TX_VREF_RANGE_STEP;
7678 
7679 }
7680 
TxChooseVref(DRAMC_CTX_T * p,PASS_WIN_DATA_BY_VREF_T pVrefInfo[],U8 u1VrefNum)7681 static U16 TxChooseVref(DRAMC_CTX_T *p, PASS_WIN_DATA_BY_VREF_T pVrefInfo[], U8 u1VrefNum)
7682 {
7683 	U8 u1VrefIdx, u1WorseBitIdx = 0, u1WinSizeOfWorseBit = 0;
7684 	U16 u2MaxWinSum = 0;
7685 	U16 u2FinalVref = 0;
7686 
7687 	for (u1VrefIdx = 0; u1VrefIdx < u1VrefNum; u1VrefIdx++)
7688 	{
7689 		msg("TX Vref=%d, minBit %d, minWin=%d, winSum=%d\n",
7690 			pVrefInfo[u1VrefIdx].u2VrefUsed,
7691 			pVrefInfo[u1VrefIdx].u1WorseBitIdx_byVref,
7692 			pVrefInfo[u1VrefIdx].u1WorseBitWinSize_byVref,
7693 			pVrefInfo[u1VrefIdx].u2WinSum_byVref);
7694 
7695 		#if LP4_TX_VREF_PASS_CONDITION
7696 		if ((pVrefInfo[u1VrefIdx].u1WorseBitWinSize_byVref > LP4_TX_VREF_PASS_CONDITION))
7697 		{
7698 			if (u1VrefPassBegin == LP4_TX_VREF_BOUNDARY_NOT_READY)
7699 			{
7700 				u1VrefPassBegin = pVrefInfo[u1VrefIdx].u2VrefUsed;
7701 				u1TempPassNum = 1;
7702 			}
7703 			else
7704 				u1TempPassNum ++;
7705 
7706 			if (u1VrefIdx == u1VrefNum - 1)
7707 			{
7708 				u1VrefPassEnd = pVrefInfo[u1VrefIdx].u2VrefUsed;
7709 				if (u1TempPassNum > u1MaxVerfPassNum)
7710 				{
7711 					u1VrefPassBegin_Final = u1VrefPassBegin;
7712 					u1VrefPassEnd_Final = u1VrefPassEnd;
7713 					u1MaxVerfPassNum = u1TempPassNum;
7714 				}
7715 			}
7716 		}
7717 		else
7718 		{
7719 			if ((u1VrefPassBegin != LP4_TX_VREF_BOUNDARY_NOT_READY) && (u1VrefPassEnd == LP4_TX_VREF_BOUNDARY_NOT_READY))
7720 			{
7721 				u1VrefPassEnd = pVrefInfo[u1VrefIdx].u2VrefUsed - TX_VREF_RANGE_STEP;
7722 				if (u1TempPassNum > u1MaxVerfPassNum)
7723 				{
7724 					u1VrefPassBegin_Final = u1VrefPassBegin;
7725 					u1VrefPassEnd_Final = u1VrefPassEnd;
7726 					u1MaxVerfPassNum = u1TempPassNum;
7727 				}
7728 				u1VrefPassBegin = 0xff;
7729 				u1VrefPassEnd = 0xff;
7730 				u1TempPassNum = 0;
7731 			}
7732 		}
7733 		#endif
7734 	}
7735 
7736 	#if LP4_TX_VREF_PASS_CONDITION
7737 	//if((u1VrefPassBegin_Final !=LP4_TX_VREF_BOUNDARY_NOT_READY) && (u1VrefPassEnd_Final!=LP4_TX_VREF_BOUNDARY_NOT_READY))
7738 	if (u1MaxVerfPassNum > 0)
7739 	{
7740 		// vref pass window found
7741 		u2FinalVref = (u1VrefPassBegin_Final + u1VrefPassEnd_Final) >> 1;
7742 		msg("[TxChooseVref] Window > %d, Vref (%d~%d), Final Vref %d\n", LP4_TX_VREF_PASS_CONDITION, u1VrefPassBegin_Final, u1VrefPassEnd_Final, u2FinalVref);
7743 	}
7744 	else
7745 	#endif
7746 	{
7747 		// not vref found
7748 		for (u1VrefIdx = 0; u1VrefIdx < u1VrefNum; u1VrefIdx++)
7749 		{
7750 			if ((pVrefInfo[u1VrefIdx].u1WorseBitWinSize_byVref > u1WinSizeOfWorseBit) ||
7751 				((pVrefInfo[u1VrefIdx].u1WorseBitWinSize_byVref == u1WinSizeOfWorseBit) && (pVrefInfo[u1VrefIdx].u2WinSum_byVref > u2MaxWinSum)))
7752 			{
7753 				u1WinSizeOfWorseBit = pVrefInfo[u1VrefIdx].u1WorseBitWinSize_byVref;
7754 				u1WorseBitIdx = pVrefInfo[u1VrefIdx].u1WorseBitIdx_byVref;
7755 				u2MaxWinSum = pVrefInfo[u1VrefIdx].u2WinSum_byVref;
7756 				u2FinalVref = pVrefInfo[u1VrefIdx].u2VrefUsed;
7757 			}
7758 		}
7759 
7760 		msg("[TxChooseVref] Worse bit %d, Min win %d, Win sum %d, Final Vref %d\n", u1WorseBitIdx, u1WinSizeOfWorseBit, u2MaxWinSum, u2FinalVref);
7761 	}
7762 
7763 	return u2FinalVref;
7764 }
7765 
7766 
DramcTXSetVref(DRAMC_CTX_T * p,U8 u1VrefRange,U8 u1VrefValue)7767 static void DramcTXSetVref(DRAMC_CTX_T *p, U8 u1VrefRange, U8 u1VrefValue)
7768 {
7769 	U8 u1TempOPValue;
7770 
7771 #ifdef __LP5_COMBO__
7772 	if (is_lp5_family(p))
7773 		u1TempOPValue = ((u1VrefValue & 0x7f));
7774 	else
7775 #endif
7776 		u1TempOPValue = ((u1VrefValue & 0x3f) | (u1VrefRange << 6));
7777 
7778 	u1MR14Value[p->channel][p->rank][p->dram_fsp] = u1TempOPValue;
7779 	//For TX VREF of different byte
7780 
7781 	DramcModeRegWriteByRank(p, p->rank, 14, u1TempOPValue);
7782 #ifdef __LP5_COMBO__
7783 	if (is_lp5_family(p))
7784 		DramcModeRegWriteByRank(p, p->rank, 15, u1TempOPValue);
7785 #endif
7786 
7787 	#if CALIBRATION_SPEED_UP_DEBUG
7788 	msg("Yulia TX Vref : CH%d Rank%d, TX Range %d Vref %d\n\n", p->channel, p->rank, u1VrefRange, (u1VrefValue & 0x3f));
7789 	#endif
7790 }
7791 
7792 
TXSetFinalVref(DRAMC_CTX_T * p,U16 u2FinalRange,U16 u2FinalVref)7793 static void TXSetFinalVref(DRAMC_CTX_T *p, U16 u2FinalRange, U16 u2FinalVref)
7794 {
7795 	DramcTXSetVref(p, u2FinalRange, u2FinalVref);
7796 
7797 #ifdef FOR_HQA_TEST_USED
7798 	gFinalTXVrefDQ[p->channel][p->rank] = (U8) u2FinalVref;
7799 #endif
7800 
7801 #if VENDER_JV_LOG
7802 	msg5("\nFinal TX Range %d Vref %d\n\n", u2FinalRange, u2FinalVref);
7803 #else
7804 	msg("\nFinal TX Range %d Vref %d\n\n", u2FinalRange, u2FinalVref);
7805 #endif
7806 
7807 	#if CALIBRATION_SPEED_UP_DEBUG
7808 	msg("Yulia TX Vref Final: CH%d Rank%d, TX Range %d Vref %d\n\n", p->channel, p->rank, u2FinalRange, u2FinalVref);
7809 	#endif
7810 }
7811 
7812 
7813 #if ENABLE_TX_TRACKING
7814 #if !BYPASS_CALIBRATION
7815 static
7816 #endif
TXUpdateTXTracking(DRAMC_CTX_T * p,DRAM_TX_PER_BIT_CALIBRATION_TYTE_T calType,U8 ucdq_pi[],U8 ucdqm_pi[])7817 void TXUpdateTXTracking(DRAMC_CTX_T *p, DRAM_TX_PER_BIT_CALIBRATION_TYTE_T calType, U8 ucdq_pi[], U8 ucdqm_pi[])
7818 {
7819 	 if (calType == TX_DQ_DQS_MOVE_DQ_ONLY || calType == TX_DQ_DQS_MOVE_DQM_ONLY)
7820 	 {
7821 		 //make a copy to dramc reg for TX DQ tracking used
7822 		 if (calType == TX_DQ_DQS_MOVE_DQ_ONLY)
7823 		 {
7824 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_PI),
7825 							P_Fld(ucdq_pi[0], SHURK_PI_RK0_ARPI_DQ_B0) | P_Fld(ucdq_pi[1], SHURK_PI_RK0_ARPI_DQ_B1));
7826 
7827 			// Source DQ
7828 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_DQS2DQ_CAL1),
7829 													P_Fld(ucdq_pi[1], SHURK_DQS2DQ_CAL1_BOOT_ORIG_UI_RK0_DQ1) |
7830 													P_Fld(ucdq_pi[0], SHURK_DQS2DQ_CAL1_BOOT_ORIG_UI_RK0_DQ0));
7831 			// Target DQ
7832 			 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_DQS2DQ_CAL2),
7833 													 P_Fld(ucdq_pi[1], SHURK_DQS2DQ_CAL2_BOOT_TARG_UI_RK0_DQ1) |
7834 													 P_Fld(ucdq_pi[0], SHURK_DQS2DQ_CAL2_BOOT_TARG_UI_RK0_DQ0));
7835 		 }
7836 
7837 		 //if(calType ==TX_DQ_DQS_MOVE_DQM_ONLY || (calType ==TX_DQ_DQS_MOVE_DQ_ONLY))
7838 		 {
7839 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_PI),
7840 							P_Fld(ucdqm_pi[0], SHURK_PI_RK0_ARPI_DQM_B0) | P_Fld(ucdqm_pi[1], SHURK_PI_RK0_ARPI_DQM_B1));
7841 
7842 			// Target DQM
7843 			 vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_DQS2DQ_CAL5),
7844 												 P_Fld(ucdqm_pi[1], SHURK_DQS2DQ_CAL5_BOOT_TARG_UI_RK0_DQM1) |
7845 												 P_Fld(ucdqm_pi[0], SHURK_DQS2DQ_CAL5_BOOT_TARG_UI_RK0_DQM0));
7846 		 }
7847 	 }
7848 
7849 
7850 #if 0// for LP3 , TX tracking will be disable, don't need to set DQ delay in DramC.
7851 	 ///TODO: check LP3 byte mapping of dramC
7852 	 vIO32WriteFldMulti(DRAMC_REG_SHURK0_PI + (CHANNEL_A << POS_BANK_NUM), \
7853 							  P_Fld(ucdq_final_pi[0], SHURK0_PI_RK0_ARPI_DQ_B0) | P_Fld(ucdq_final_pi[1], SHURK0_PI_RK0_ARPI_DQ_B1));
7854 
7855 	 vIO32WriteFldMulti(DRAMC_REG_SHURK0_PI + SHIFT_TO_CHB_ADDR, \
7856 							  P_Fld(ucdq_final_pi[2], SHURK0_PI_RK0_ARPI_DQ_B0) | P_Fld(ucdq_final_pi[3], SHURK0_PI_RK0_ARPI_DQ_B1));
7857 #endif
7858 
7859 }
7860 #endif //End ENABLE_TX_TRACKING
7861 
7862 #if !BYPASS_CALIBRATION
7863 static
7864 #endif
TXSetDelayReg_DQ(DRAMC_CTX_T * p,U8 u1UpdateRegUI,U8 ucdq_ui_large[],U8 ucdq_oen_ui_large[],U8 ucdq_ui_small[],U8 ucdq_oen_ui_small[],U8 ucdql_pi[])7865 void TXSetDelayReg_DQ(DRAMC_CTX_T *p, U8 u1UpdateRegUI, U8 ucdq_ui_large[], U8 ucdq_oen_ui_large[], U8 ucdq_ui_small[], U8 ucdq_oen_ui_small[], U8 ucdql_pi[])
7866 {
7867 	if (u1UpdateRegUI)
7868 	{
7869 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), \
7870 									 P_Fld(ucdq_ui_large[0], SHURK_SELPH_DQ0_TXDLY_DQ0) |
7871 									 P_Fld(ucdq_ui_large[1], SHURK_SELPH_DQ0_TXDLY_DQ1) |
7872 									 P_Fld(ucdq_ui_large[2], SHURK_SELPH_DQ0_TXDLY_DQ2) |
7873 									 P_Fld(ucdq_ui_large[3], SHURK_SELPH_DQ0_TXDLY_DQ3) |
7874 									 P_Fld(ucdq_oen_ui_large[0], SHURK_SELPH_DQ0_TXDLY_OEN_DQ0) |
7875 									 P_Fld(ucdq_oen_ui_large[1], SHURK_SELPH_DQ0_TXDLY_OEN_DQ1) |
7876 									 P_Fld(ucdq_oen_ui_large[2], SHURK_SELPH_DQ0_TXDLY_OEN_DQ2) |
7877 									 P_Fld(ucdq_oen_ui_large[3], SHURK_SELPH_DQ0_TXDLY_OEN_DQ3));
7878 
7879 		// DLY_DQ[2:0]
7880 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), \
7881 									 P_Fld(ucdq_ui_small[0], SHURK_SELPH_DQ2_DLY_DQ0) |
7882 									 P_Fld(ucdq_ui_small[1], SHURK_SELPH_DQ2_DLY_DQ1) |
7883 									 P_Fld(ucdq_ui_small[2], SHURK_SELPH_DQ2_DLY_DQ2) |
7884 									 P_Fld(ucdq_ui_small[3], SHURK_SELPH_DQ2_DLY_DQ3) |
7885 									 P_Fld(ucdq_oen_ui_small[0], SHURK_SELPH_DQ2_DLY_OEN_DQ0) |
7886 									 P_Fld(ucdq_oen_ui_small[1], SHURK_SELPH_DQ2_DLY_OEN_DQ1) |
7887 									 P_Fld(ucdq_oen_ui_small[2], SHURK_SELPH_DQ2_DLY_OEN_DQ2) |
7888 									 P_Fld(ucdq_oen_ui_small[3], SHURK_SELPH_DQ2_DLY_OEN_DQ3));
7889 	}
7890 
7891 
7892 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), ucdql_pi[0], SHU_R0_B0_DQ0_SW_ARPI_DQ_B0);
7893 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), ucdql_pi[1], SHU_R0_B1_DQ0_SW_ARPI_DQ_B1);
7894 }
7895 
7896 #if !BYPASS_CALIBRATION
7897 static
7898 #endif
TXSetDelayReg_DQM(DRAMC_CTX_T * p,U8 u1UpdateRegUI,U8 ucdqm_ui_large[],U8 ucdqm_oen_ui_large[],U8 ucdqm_ui_small[],U8 ucdqm_oen_ui_small[],U8 ucdqm_pi[])7899 void TXSetDelayReg_DQM(DRAMC_CTX_T *p, U8 u1UpdateRegUI, U8 ucdqm_ui_large[], U8 ucdqm_oen_ui_large[], U8 ucdqm_ui_small[], U8 ucdqm_oen_ui_small[], U8 ucdqm_pi[])
7900 {
7901 	if (u1UpdateRegUI)
7902 	{
7903 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1),
7904 									 P_Fld(ucdqm_ui_large[0], SHURK_SELPH_DQ1_TXDLY_DQM0) |
7905 									 P_Fld(ucdqm_ui_large[1], SHURK_SELPH_DQ1_TXDLY_DQM1) |
7906 									 P_Fld(ucdqm_ui_large[2], SHURK_SELPH_DQ1_TXDLY_DQM2) |
7907 									 P_Fld(ucdqm_ui_large[3], SHURK_SELPH_DQ1_TXDLY_DQM3) |
7908 									 P_Fld(ucdqm_oen_ui_large[0], SHURK_SELPH_DQ1_TXDLY_OEN_DQM0) |
7909 									 P_Fld(ucdqm_oen_ui_large[1], SHURK_SELPH_DQ1_TXDLY_OEN_DQM1) |
7910 									 P_Fld(ucdqm_oen_ui_large[2], SHURK_SELPH_DQ1_TXDLY_OEN_DQM2) |
7911 									 P_Fld(ucdqm_oen_ui_large[3], SHURK_SELPH_DQ1_TXDLY_OEN_DQM3));
7912 
7913 		 // DLY_DQM[2:0]
7914 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3),
7915 									 P_Fld(ucdqm_ui_small[0], SHURK_SELPH_DQ3_DLY_DQM0) |
7916 									 P_Fld(ucdqm_ui_small[1], SHURK_SELPH_DQ3_DLY_DQM1) |
7917 									 P_Fld(ucdqm_ui_small[2], SHURK_SELPH_DQ3_DLY_DQM2) |
7918 									 P_Fld(ucdqm_ui_small[3], SHURK_SELPH_DQ3_DLY_DQM3) |
7919 									 P_Fld(ucdqm_oen_ui_small[0], SHURK_SELPH_DQ3_DLY_OEN_DQM0) |
7920 									 P_Fld(ucdqm_oen_ui_small[1], SHURK_SELPH_DQ3_DLY_OEN_DQM1) |
7921 									 P_Fld(ucdqm_oen_ui_small[2], SHURK_SELPH_DQ3_DLY_OEN_DQM2) |
7922 									 P_Fld(ucdqm_oen_ui_small[3], SHURK_SELPH_DQ3_DLY_OEN_DQM3));
7923 	}
7924 
7925 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), ucdqm_pi[0], SHU_R0_B0_DQ0_SW_ARPI_DQM_B0);
7926 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), ucdqm_pi[1], SHU_R0_B1_DQ0_SW_ARPI_DQM_B1);
7927 }
7928 
7929 #if TX_AUTO_K_ENABLE
Tx_Auto_K_Init(DRAMC_CTX_T * p,DRAM_TX_PER_BIT_CALIBRATION_TYTE_T calType,U8 ucdq_pi,U8 u1PI_Len)7930 static void Tx_Auto_K_Init(DRAMC_CTX_T *p, DRAM_TX_PER_BIT_CALIBRATION_TYTE_T calType, U8 ucdq_pi, U8 u1PI_Len)
7931 {
7932 	u8 pi_thrd = 0xa;
7933 
7934 #if FOR_DV_SIMULATION_USED == 1
7935 	cal_sv_rand_args_t *psra = get_psra();
7936 
7937 	if (psra) {
7938 		pi_thrd = psra->tx_atk_pass_pi_thrd & 0xFF;
7939 		early_break = psra->tx_atk_early_break & 0xFF;
7940 	}
7941 #endif
7942 
7943 #if ENABLE_PA_IMPRO_FOR_TX_AUTOK
7944 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DCM_SUB_CTRL), 0x1, DCM_SUB_CTRL_SUBCLK_CTRL_TX_AUTOK);
7945 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_CG_SET0), 0x1, TX_CG_SET0_TX_ATK_CLKRUN);
7946 #endif
7947 
7948 	if (calType == TX_DQ_DQS_MOVE_DQ_DQM)
7949 	{
7950 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1),
7951 									 P_Fld(0x1, TX_ATK_SET1_TX_ATK_DQ_PI_EN) |	  //enable TX DQ auto K
7952 									 P_Fld(0x1, TX_ATK_SET1_TX_ATK_DQM_PI_EN));   //enable TX DQM auto K
7953 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET0),
7954 									 P_Fld(ucdq_pi, TX_ATK_SET0_TX_ATK_DQ_B0_PI_INIT) |    //Set begin position of DQ B0
7955 									 P_Fld(ucdq_pi, TX_ATK_SET0_TX_ATK_DQ_B1_PI_INIT) |    //Set begin position of DQ B1
7956 									 P_Fld(ucdq_pi, TX_ATK_SET0_TX_ATK_DQM_B0_PI_INIT) |   //Set begin position of DQM B0
7957 									 P_Fld(ucdq_pi, TX_ATK_SET0_TX_ATK_DQM_B1_PI_INIT));   //Set begin position of DQM B1
7958 	}
7959 	else if (calType == TX_DQ_DQS_MOVE_DQM_ONLY)
7960 	{
7961 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), 0x1, TX_ATK_SET1_TX_ATK_DQM_PI_EN);
7962 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET0),
7963 									 P_Fld(ucdq_pi, TX_ATK_SET0_TX_ATK_DQM_B0_PI_INIT) |
7964 									 P_Fld(ucdq_pi, TX_ATK_SET0_TX_ATK_DQM_B1_PI_INIT));
7965 	}
7966 	else
7967 	{
7968 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), 0x1, TX_ATK_SET1_TX_ATK_DQ_PI_EN);
7969 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET0),
7970 									 P_Fld(ucdq_pi, TX_ATK_SET0_TX_ATK_DQ_B0_PI_INIT) |
7971 									 P_Fld(ucdq_pi, TX_ATK_SET0_TX_ATK_DQ_B1_PI_INIT));
7972 	}
7973 
7974 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1), 0, MISC_CTRL1_R_DMARPIDQ_SW); //Switch PI SW mode to HW mode (control by DRAMC not APHY)
7975 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1),
7976 			P_Fld(u1PI_Len, TX_ATK_SET1_TX_ATK_PI_LEN) |	//enable TX auto k len
7977 			P_Fld(pi_thrd, TX_ATK_SET1_TX_ATK_PASS_PI_THRD));  //Set threshold of PI pass window
7978 #if (fcFOR_CHIP_ID == fcIPM) //Fix at Mar_gaux
7979 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), early_break, TX_ATK_SET1_TX_ATK_EARLY_BREAK); //Enable early break
7980 #endif
7981 
7982 #if (__LP5_COMBO__ == TRUE)
7983 	if (TRUE == is_lp5_family(p))
7984 	{
7985 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_TX_SET0),
7986 				P_Fld(0x5, SHU_TX_SET0_TXOEN_AUTOSET_OFFSET) |
7987 				P_Fld(0x1, SHU_TX_SET0_TXOEN_AUTOSET_EN));	 //Enable OE auto adjust
7988 	}
7989 	else
7990 #endif
7991 	{
7992 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_TX_SET0),
7993 				P_Fld(0x3, SHU_TX_SET0_TXOEN_AUTOSET_OFFSET) |
7994 				P_Fld(0x1, SHU_TX_SET0_TXOEN_AUTOSET_EN));	 //Enable OE auto adjust
7995 	}
7996 
7997 #if TX_AUTO_K_DEBUG_ENABLE
7998 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), 0x1, TX_ATK_SET1_TX_ATK_DBG_EN);
7999 #endif
8000 }
8001 
Tx_Auto_K_complete_check(DRAMC_CTX_T * p)8002 static void Tx_Auto_K_complete_check(DRAMC_CTX_T *p)
8003 {
8004 	U32 u4loop_count = 0;
8005 
8006 	while ((u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_RESULT8), TX_ATK_RESULT8_TX_ATK_DONE) != 0x1))
8007 	{
8008 		mcDELAY_US(1);
8009 		u4loop_count++;
8010 		//msg("Wait! TX auto K is not done!\n");
8011 		if (u4loop_count > 100000)
8012 		{
8013 			err("Error! TX auto K is not done!\n");
8014 			break;
8015 		}
8016 	}
8017 
8018 	if ((u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_RESULT8), TX_ATK_RESULT8_TX_ATK_FIND_PW) == 0x1))
8019 	{
8020 		vSetCalibrationResult(p, DRAM_CALIBRATION_TX_PERBIT, DRAM_OK);
8021 		msg("Tx auto K, all bit find passs window\n");
8022 	}
8023 	else
8024 	{
8025 		err("Error! TX auto K is fail!\n");
8026 	}
8027 }
8028 
Tx_Auto_K_Clear(DRAMC_CTX_T * p)8029 static void Tx_Auto_K_Clear(DRAMC_CTX_T *p)
8030 {
8031 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), 0x0, TX_ATK_SET1_TX_ATK_TRIG); //Disable Tx auto K
8032 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_TX_SET0), 0x0, SHU_TX_SET0_TXOEN_AUTOSET_EN);
8033 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1), 0x1, MISC_CTRL1_R_DMARPIDQ_SW);
8034 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), 0x0, TX_ATK_SET1_TX_ATK_DBG_EN);
8035 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), 0x1, TX_ATK_SET1_TX_ATK_CLR); //Clear state machine
8036 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), 0x0, TX_ATK_SET1_TX_ATK_CLR);
8037 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1),
8038 								 P_Fld(0x0, TX_ATK_SET1_TX_ATK_PI_LEN) |
8039 								 P_Fld(0x0, TX_ATK_SET1_TX_ATK_DQ_PI_EN) |
8040 								 P_Fld(0x0, TX_ATK_SET1_TX_ATK_DQM_PI_EN));
8041 #if ENABLE_PA_IMPRO_FOR_TX_AUTOK
8042 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_CG_SET0), 0x0, TX_CG_SET0_TX_ATK_CLKRUN);
8043 #endif
8044 }
8045 
8046 #if TX_AUTO_K_WORKAROUND
Tx_Auto_K_DQM_Workaround(DRAMC_CTX_T * p)8047 static void Tx_Auto_K_DQM_Workaround(DRAMC_CTX_T *p)
8048 {
8049 	//Set RK1 DQM DLY to RK0
8050 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1), u4DQM_MCK_RK1_backup);
8051 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3), u4DQM_UI_RK1_backup);
8052 	vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), u4DQM_PI_RK1_backup[0]);
8053 	vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), u4DQM_PI_RK1_backup[1]);
8054 }
Tx_Auto_K_DQ_Workaround(DRAMC_CTX_T * p)8055 static void Tx_Auto_K_DQ_Workaround(DRAMC_CTX_T *p)
8056 {
8057 	//Set RK1 DQM DLY to RK0
8058 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), u4DQ_MCK_RK1_backup);
8059 	vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), u4DQ_UI_RK1_backup);
8060 	vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), u4DQ_PI_RK1_backup[0]);
8061 	vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), u4DQ_PI_RK1_backup[1]);
8062 }
8063 #endif
8064 
8065 #if TX_AUTO_K_DEBUG_ENABLE
Tx_Auto_K_Debug_Message(DRAMC_CTX_T * p,U8 u1PI_Len)8066 static void Tx_Auto_K_Debug_Message(DRAMC_CTX_T *p, U8 u1PI_Len)
8067 {
8068 	U8 u1bit_num = 0, u1BitIdx;
8069 	U16 u2Length = 0, u2Length_max = 0;
8070 	U32 u4status;
8071 	U32 u4status_bit[4][DQ_DATA_WIDTH_LP4];
8072 
8073 	if (u1PI_Len == 0)
8074 		u2Length_max = 48;
8075 	else
8076 		u2Length_max = 32 * (1 + u1PI_Len);
8077 
8078 	for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
8079 	{
8080 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), u1BitIdx, TX_ATK_SET1_TX_ATK_DBG_BIT_SEL);
8081 
8082 		u4status_bit[0][u1BitIdx] = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_DBG_BIT_STATUS1));
8083 		u4status_bit[1][u1BitIdx] = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_DBG_BIT_STATUS2));
8084 		u4status_bit[2][u1BitIdx] = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_DBG_BIT_STATUS3));
8085 		u4status_bit[3][u1BitIdx] = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_DBG_BIT_STATUS4));
8086 	}
8087 
8088 	msg2("Debug TX DQ PASS/FAIL status:\n");
8089 
8090 	for (u2Length = 0; u2Length < u2Length_max; u2Length++)
8091 	{
8092 		msg2("Delay=%3d ", u2Length);
8093 
8094 		for (u1bit_num = 0; u1bit_num < p->data_width; u1bit_num++)
8095 		{
8096 			u4status = ((u4status_bit[u2Length / 32][u1bit_num] >> (u2Length % 32)) & 0x1);
8097 
8098 			if (u4status == 0)
8099 			{
8100 				msg2("x");
8101 			}
8102 			else
8103 			{
8104 				msg2("o");
8105 			}
8106 
8107 			if (u1bit_num == (p->data_width - 1))
8108 			{
8109 				msg2(" \n");
8110 			}
8111 		}
8112 	}
8113 
8114 	//msg("Debug DQ PASS(1)/FAIL(0) bit: %d, STATUS1: 0x%x, STATUS2: 0x%x, STATUS3: 0x%x, STATUS4: 0x%x,\n",u1BitIdx,u4status_bit[0][u1BitIdx],u4status_bit[1][u1BitIdx],u4status_bit[2][u1BitIdx],u4status_bit[3][u1BitIdx]);
8115 }
8116 #endif
8117 #endif
8118 
8119 #if TX_K_DQM_WITH_WDBI
vSwitchWriteDBISettings(DRAMC_CTX_T * p,U8 u1OnOff)8120 void vSwitchWriteDBISettings(DRAMC_CTX_T *p, U8 u1OnOff)
8121 {
8122 	S8 u1TXShiftMCK;
8123 
8124 	u1TXShiftMCK = (u1OnOff)? -1: 1;
8125 	DramcWriteShiftMCKForWriteDBI(p, u1TXShiftMCK); //Tx DQ/DQM -1 MCK for write DBI ON
8126 
8127 	SetDramModeRegForWriteDBIOnOff(p, p->dram_fsp, u1OnOff);
8128 	DramcWriteDBIOnOff(p, u1OnOff);
8129 
8130 	#if (TX_AUTO_K_ENABLE && TX_AUTO_K_WORKAROUND)
8131 	if (p->rank == RANK_1)
8132 	{
8133 		u4DQ_MCK_RK1_backup = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0));
8134 		u4DQ_UI_RK1_backup = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2));
8135 		u4DQ_PI_RK1_backup[0] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0));
8136 		u4DQ_PI_RK1_backup[1] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0));
8137 	}
8138 	#endif
8139 }
8140 #endif
8141 
8142 PASS_WIN_DATA_T WinPerBit[DQ_DATA_WIDTH], VrefWinPerBit[DQ_DATA_WIDTH], FinalWinPerBit[DQ_DATA_WIDTH];
DramcTxWindowPerbitCal(DRAMC_CTX_T * p,DRAM_TX_PER_BIT_CALIBRATION_TYTE_T calType,U8 u1VrefScanEnable,u8 isAutoK)8143 DRAM_STATUS_T DramcTxWindowPerbitCal(DRAMC_CTX_T *p, DRAM_TX_PER_BIT_CALIBRATION_TYTE_T calType, U8 u1VrefScanEnable, u8 isAutoK)
8144 {
8145 	U8 u1BitTemp, u1BitIdx, u1ByteIdx, u1RankIdx, backup_rank;
8146 	U32 uiFinishCount;
8147 
8148 	U16 uiDelay, u2DQDelayBegin, u2DQDelayEnd, u2DQDelayStep = 1;
8149 
8150 	U8 ucdq_pi, ucdq_ui_small, ucdq_ui_large, ucdq_oen_ui_small, ucdq_oen_ui_large;
8151 	U8 ucdq_ui_small_reg_value, u1UpdateRegUI;	// for UI and TXDLY change check, if different , set reg.
8152 
8153 	U8 ucdq_reg_pi[DQS_NUMBER], ucdq_reg_ui_large[DQS_NUMBER], ucdq_reg_ui_small[DQS_NUMBER];
8154 	U8 ucdq_reg_oen_ui_large[DQS_NUMBER], ucdq_reg_oen_ui_small[DQS_NUMBER];
8155 
8156 	U8 ucdq_reg_dqm_pi[DQS_NUMBER] = {0}, ucdq_reg_dqm_ui_large[DQS_NUMBER] = {0}, ucdq_reg_dqm_ui_small[DQS_NUMBER] = {0};
8157 	U8 ucdq_reg_dqm_oen_ui_large[DQS_NUMBER] = {0}, ucdq_reg_dqm_oen_ui_small[DQS_NUMBER] = {0};
8158 
8159 	#if 1//TX_DQM_CALC_MAX_MIN_CENTER
8160 	U16 u2DQM_Delay;  // LP4 only
8161 	U16 u2Center_min[DQS_NUMBER] = {0}, u2Center_max[DQS_NUMBER] = {0};
8162 	#endif
8163 	U8 u1EnableDelayCell = 0;
8164 	U16 u2DelayCellOfst[DQ_DATA_WIDTH] = {0};
8165 	U32 u4err_value, u4fail_bit;
8166 	U16 u2FinalRange = 0, u2FinalVref;
8167 	U16 u2VrefLevel, u2VrefBegin = 0, u2VrefEnd = 0, u2VrefStep;
8168 	U16 u2TempWinSum, u2MaxWindowSum = 0;//, u2tx_window_sum[LP4_TX_VREF_DATA_NUM]={0};
8169 	U8 u1min_bit, u1min_winsize = 0;
8170 	U8 u1VrefIdx = 0;
8171 	U8 u1PIDiff;
8172 	PASS_WIN_DATA_BY_VREF_T VrefInfo[LP4_TX_VREF_DATA_NUM];
8173 
8174 	if (!p)
8175 	{
8176 		err("context NULL\n");
8177 		return DRAM_FAIL;
8178 	}
8179 
8180 	#if TX_AUTO_K_ENABLE
8181 	U8 u1PI_Len, u1dq_shift;
8182 	U32 PwMaxInitReg[4] = {DRAMC_REG_TX_ATK_RESULT0, DRAMC_REG_TX_ATK_RESULT1, DRAMC_REG_TX_ATK_RESULT2, DRAMC_REG_TX_ATK_RESULT3};
8183 	U32 PwMaxLenReg[4] = {DRAMC_REG_TX_ATK_RESULT4, DRAMC_REG_TX_ATK_RESULT5, DRAMC_REG_TX_ATK_RESULT6, DRAMC_REG_TX_ATK_RESULT7};
8184 	U32 u4Length = 0;
8185 	#if TX_AUTO_K_WORKAROUND
8186 	U8 u1backup_Rank = 0;
8187 	#endif
8188 	#if TX_AUTO_K_WORKAROUND
8189 	U32 u4RegBackupAddress[] =
8190 	{
8191 		(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0)),
8192 		(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2)),
8193 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0)),
8194 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0)),
8195 		(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1)),
8196 		(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3)),
8197 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0)),
8198 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0)),
8199 	};
8200 	#endif
8201 	#endif
8202 
8203 #if VENDER_JV_LOG
8204 	if (calType == TX_DQ_DQS_MOVE_DQ_ONLY)
8205 		vPrintCalibrationBasicInfo_ForJV(p);
8206 #else
8207 		vPrintCalibrationBasicInfo(p);
8208 #endif
8209 
8210 	backup_rank = u1GetRank(p);
8211 
8212 	TXPerbitCalibrationInit(p, calType);
8213 	TXScanRange_PI(p, calType, &u2DQDelayBegin, &u2DQDelayEnd);
8214 	TXScanRange_Vref(p, u1VrefScanEnable, &u2FinalRange, &u2VrefBegin, &u2VrefEnd, &u2VrefStep);
8215 
8216 	//default set FAIL
8217 	vSetCalibrationResult(p, DRAM_CALIBRATION_TX_PERBIT, DRAM_FAIL);
8218 
8219 	if (isAutoK)
8220 	{
8221 	#if TX_AUTO_K_ENABLE
8222 	//CKEFixOnOff(p, p->rank, CKE_FIXON, CKE_WRITE_TO_ONE_CHANNEL); //Let CLK always on
8223 
8224 	//Set base address of TX MCK and UI
8225 	u1UpdateRegUI = 1;
8226 	uiDelay = u2DQDelayBegin;
8227 	u1PI_Len = 3;
8228 	TxWinTransferDelayToUIPI(p, uiDelay, 0, &ucdq_ui_large, &ucdq_ui_small, &ucdq_pi, &ucdq_oen_ui_large, &ucdq_oen_ui_small);
8229 
8230 	for (u1ByteIdx = 0; u1ByteIdx < DQS_NUMBER; u1ByteIdx++)
8231 	{
8232 		if (u1UpdateRegUI)
8233 		{
8234 			ucdq_reg_ui_large[u1ByteIdx] = ucdq_ui_large;
8235 			ucdq_reg_ui_small[u1ByteIdx] = ucdq_ui_small;
8236 			ucdq_reg_oen_ui_large[u1ByteIdx] = ucdq_oen_ui_large;
8237 			ucdq_reg_oen_ui_small[u1ByteIdx] = ucdq_oen_ui_small;
8238 
8239 			ucdq_reg_dqm_ui_large[u1ByteIdx] = ucdq_ui_large;
8240 			ucdq_reg_dqm_ui_small[u1ByteIdx] = ucdq_ui_small;
8241 			ucdq_reg_dqm_oen_ui_large[u1ByteIdx] = ucdq_oen_ui_large;
8242 			ucdq_reg_dqm_oen_ui_small[u1ByteIdx] = ucdq_oen_ui_small;
8243 		}
8244 
8245 			ucdq_reg_pi[u1ByteIdx] = ucdq_pi;
8246 			ucdq_reg_dqm_pi[u1ByteIdx] = ucdq_pi;
8247 	}
8248 
8249 	#if TX_AUTO_K_WORKAROUND
8250 	if (p->rank == 1)
8251 	{
8252 		u1backup_Rank = 1;
8253 		p->rank = 0;
8254 		DramcBackupRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
8255 	}
8256 	#endif
8257 
8258 	if (calType == TX_DQ_DQS_MOVE_DQ_ONLY || calType == TX_DQ_DQS_MOVE_DQ_DQM)
8259 	{
8260 		TXSetDelayReg_DQ(p, u1UpdateRegUI, ucdq_reg_ui_large, ucdq_reg_oen_ui_large, ucdq_reg_ui_small, ucdq_reg_oen_ui_small, ucdq_reg_pi);
8261 		msg("TX Auto-K set begin delay DQ MCK: %d, UI: %d, PI: %d\n", ucdq_reg_ui_large[0], ucdq_reg_ui_small[0], ucdq_reg_pi[0]);
8262 
8263 		#if TX_AUTO_K_WORKAROUND
8264 		if ((calType == TX_DQ_DQS_MOVE_DQ_ONLY) && (u1backup_Rank == 1))
8265 			Tx_Auto_K_DQM_Workaround(p); //Set best DLY value of RK1 DQM to RK0 DQM
8266 		#endif
8267 	}
8268 	if (calType == TX_DQ_DQS_MOVE_DQM_ONLY || calType == TX_DQ_DQS_MOVE_DQ_DQM)
8269 	{
8270 		TXSetDelayReg_DQM(p, u1UpdateRegUI, ucdq_reg_dqm_ui_large, ucdq_reg_dqm_oen_ui_large, ucdq_reg_dqm_ui_small, ucdq_reg_dqm_oen_ui_small, ucdq_reg_dqm_pi);
8271 		msg("TX Auto-K set begin delay DQM MCK: %d, UI: %d, PI: %d\n", ucdq_reg_dqm_ui_large[0], ucdq_reg_dqm_ui_small[0], ucdq_reg_dqm_pi[0]);
8272 
8273 		#if TX_AUTO_K_WORKAROUND
8274 		if ((calType == TX_DQ_DQS_MOVE_DQM_ONLY) && (u1backup_Rank == 1))
8275 			Tx_Auto_K_DQ_Workaround(p); //Set best DLY value of RK1 DQ to RK0 DQ
8276 		#endif
8277 	}
8278 
8279 	#if TX_AUTO_K_WORKAROUND
8280 	if (u1backup_Rank == 1)
8281 		p->rank = 1;
8282 	#endif
8283 
8284 	//Tx_Auto_K_Init(p, calType, ucdq_pi, u1PI_Len); //u1PI_Len = 1 means that PI len is 64 PI
8285 	#endif
8286 	}
8287 	else
8288 	{
8289 	if (vGet_DDR_Loop_Mode(p) == SEMI_OPEN_LOOP_MODE)
8290 		u2DQDelayStep = (1 << 3);
8291 	else if (vGet_DDR_Loop_Mode(p) == OPEN_LOOP_MODE)
8292 		u2DQDelayStep = (1 << 4);
8293 	else if (calType == TX_DQ_DQS_MOVE_DQ_DQM)
8294 		u2DQDelayStep = 2;
8295 	else
8296 		u2DQDelayStep = 1;
8297 	if (is_lp5_family(p))
8298 		u2DQDelayStep = 4; /* To speed up simulation */
8299 	#if (FOR_DV_SIMULATION_USED == 1)
8300 		u2DQDelayStep = (vGet_DDR_Loop_Mode(p) == OPEN_LOOP_MODE) ? 16 : 8;
8301 	#endif
8302 	}
8303 
8304 #if 0
8305 	msg("[TxWindowPerbitCal] calType=%d, VrefScanEnable %d (Range %d,  VrefBegin %d, u2VrefEnd %d)\n"
8306 					"\nBegin, DQ Scan Range %d~%d\n",
8307 					calType, u1VrefScanEnable, u2FinalRange, u2VrefBegin, u2VrefEnd, u2DQDelayBegin, u2DQDelayEnd);
8308 #endif
8309 
8310 	#if SUPPORT_SAVE_TIME_FOR_CALIBRATION
8311 	if (p->femmc_Ready == 1 && (p->Bypass_TXWINDOW))
8312 	{
8313 		for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
8314 		{
8315 			u2Center_min[u1ByteIdx] = p->pSavetimeData->u1TxCenter_min_Save[p->channel][p->rank][u1ByteIdx];
8316 			u2Center_max[u1ByteIdx] = p->pSavetimeData->u1TxCenter_max_Save[p->channel][p->rank][u1ByteIdx];
8317 
8318 			for (u1BitIdx = 0; u1BitIdx < DQS_BIT_NUMBER; u1BitIdx++)
8319 			{
8320 				u1BitTemp = u1ByteIdx * DQS_BIT_NUMBER + u1BitIdx;
8321 				FinalWinPerBit[u1BitTemp].win_center = p->pSavetimeData->u1Txwin_center_Save[p->channel][p->rank][u1BitTemp];
8322 			}
8323 		}
8324 		vSetCalibrationResult(p, DRAM_CALIBRATION_TX_PERBIT, DRAM_FAST_K);
8325 	}
8326 	else
8327 	#endif
8328 	{
8329 #if ENABLE_K_WITH_WORST_SI_UI_SHIFT
8330 		DramcEngine2Init(p, p->test2_1, p->test2_2, p->test_pattern | 0x80, 0, TE_UI_SHIFT);//UI_SHIFT + LEN1
8331 #else
8332 		DramcEngine2Init(p, p->test2_1, p->test2_2, TEST_XTALK_PATTERN, 0, TE_NO_UI_SHIFT);
8333 #endif
8334 
8335 		for (u2VrefLevel = u2VrefBegin; u2VrefLevel <= u2VrefEnd; u2VrefLevel += u2VrefStep)
8336 		{
8337 			// SET tx Vref (DQ) here, LP3 no need to set this.
8338 			if (u1VrefScanEnable)
8339 			{
8340 				#if (!REDUCE_LOG_FOR_PRELOADER)
8341 				msg("\n\n\tLP4 TX VrefRange %d, VrefLevel=%d\n", u2FinalRange, u2VrefLevel);
8342 				#endif
8343 
8344 				#if VENDER_JV_LOG
8345 				if (calType == TX_DQ_DQS_MOVE_DQ_ONLY)
8346 				{
8347 					msg5("\n\tLP4 TX VrefRange %d, VrefLevel=%d\n", u2FinalRange, u2VrefLevel);
8348 				}
8349 				#endif
8350 
8351 				DramcTXSetVref(p, u2FinalRange, u2VrefLevel);
8352 			}
8353 			else
8354 			{
8355 				msg("\n\n\tTX Vref Scan disable\n");
8356 			}
8357 
8358 			// initialize parameters
8359 			uiFinishCount = 0;
8360 			u2TempWinSum = 0;
8361 			ucdq_ui_small_reg_value = 0xff;
8362 
8363 			for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
8364 			{
8365 				WinPerBit[u1BitIdx].first_pass = (S16)PASS_RANGE_NA;
8366 				WinPerBit[u1BitIdx].last_pass = (S16)PASS_RANGE_NA;
8367 				VrefWinPerBit[u1BitIdx].first_pass = (S16)PASS_RANGE_NA;
8368 				VrefWinPerBit[u1BitIdx].last_pass = (S16)PASS_RANGE_NA;
8369 			}
8370 
8371 			if (isAutoK)
8372 			{
8373 			#if TX_AUTO_K_ENABLE
8374 			Tx_Auto_K_Init(p, calType, ucdq_pi, u1PI_Len); //u1PI_Len = 1 means that PI len is 64 PI
8375 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET1), 0x1, TX_ATK_SET1_TX_ATK_TRIG); //TX Auto K start
8376 			#endif
8377 			}
8378 			else
8379 			{
8380 			//Move DQ delay ,  1 PI = tCK/64, total 128 PI, 1UI = 32 PI
8381 			//For data rate 3200, max tDQS2DQ is 2.56UI (82 PI)
8382 			//For data rate 4266, max tDQS2DQ is 3.41UI (109 PI)
8383 			for (uiDelay = u2DQDelayBegin; uiDelay < u2DQDelayEnd; uiDelay += u2DQDelayStep)
8384 			{
8385 				TxWinTransferDelayToUIPI(p, uiDelay, 0, &ucdq_ui_large, &ucdq_ui_small, &ucdq_pi, &ucdq_oen_ui_large, &ucdq_oen_ui_small);
8386 
8387 				// Check if TX UI changed, if not change , don't need to set reg again
8388 				if (ucdq_ui_small_reg_value != ucdq_ui_small)
8389 				{
8390 					u1UpdateRegUI = 1;
8391 					ucdq_ui_small_reg_value = ucdq_ui_small;
8392 				}
8393 				else
8394 					u1UpdateRegUI = 0;
8395 
8396 				for (u1ByteIdx = 0; u1ByteIdx < DQS_NUMBER; u1ByteIdx++)
8397 				{
8398 					if (u1UpdateRegUI)
8399 					{
8400 						ucdq_reg_ui_large[u1ByteIdx] = ucdq_ui_large;
8401 						ucdq_reg_ui_small[u1ByteIdx] = ucdq_ui_small;
8402 						ucdq_reg_oen_ui_large[u1ByteIdx] = ucdq_oen_ui_large;
8403 						ucdq_reg_oen_ui_small[u1ByteIdx] = ucdq_oen_ui_small;
8404 
8405 						ucdq_reg_dqm_ui_large[u1ByteIdx] = ucdq_ui_large;
8406 						ucdq_reg_dqm_ui_small[u1ByteIdx] = ucdq_ui_small;
8407 						ucdq_reg_dqm_oen_ui_large[u1ByteIdx] = ucdq_oen_ui_large;
8408 						ucdq_reg_dqm_oen_ui_small[u1ByteIdx] = ucdq_oen_ui_small;
8409 					}
8410 
8411 					ucdq_reg_pi[u1ByteIdx] = ucdq_pi;
8412 					ucdq_reg_dqm_pi[u1ByteIdx] = ucdq_pi;
8413 				}
8414 
8415 				if (calType == TX_DQ_DQS_MOVE_DQ_ONLY || calType == TX_DQ_DQS_MOVE_DQ_DQM)
8416 				{
8417 					TXSetDelayReg_DQ(p, u1UpdateRegUI, ucdq_reg_ui_large, ucdq_reg_oen_ui_large, ucdq_reg_ui_small, ucdq_reg_oen_ui_small, ucdq_reg_pi);
8418 				}
8419 
8420 				if (calType == TX_DQ_DQS_MOVE_DQM_ONLY || calType == TX_DQ_DQS_MOVE_DQ_DQM)
8421 				{
8422 					TXSetDelayReg_DQM(p, u1UpdateRegUI, ucdq_reg_dqm_ui_large, ucdq_reg_dqm_oen_ui_large, ucdq_reg_dqm_ui_small, ucdq_reg_dqm_oen_ui_small, ucdq_reg_dqm_pi);
8423 				}
8424 
8425 				u4err_value = 0;
8426 #if ENABLE_K_WITH_WORST_SI_UI_SHIFT
8427 				//DramcEngine2SetPat(p, p->test_pattern, 0, 0, TE_UI_SHIFT);
8428 				u4err_value = DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, p->test_pattern);
8429 #else
8430 				//audio + xtalk pattern
8431 				DramcEngine2SetPat(p, TEST_AUDIO_PATTERN, 0, 0, TE_NO_UI_SHIFT);
8432 				u4err_value = DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, TEST_AUDIO_PATTERN);
8433 				DramcEngine2SetPat(p, TEST_XTALK_PATTERN, 0, 1, TE_NO_UI_SHIFT);
8434 				u4err_value |= DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, TEST_XTALK_PATTERN);
8435 #endif
8436 				//audio + xtalk pattern
8437 				//u4err_value = 0;
8438 				//DramcEngine2SetPat(p, TEST_AUDIO_PATTERN, 0, 0);
8439 				//u4err_value = DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, TEST_AUDIO_PATTERN);
8440 				//DramcEngine2SetPat(p, TEST_XTALK_PATTERN, 0, 1);
8441 				//u4err_value |= DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, TEST_XTALK_PATTERN);
8442 
8443 				if (u1VrefScanEnable == 0 && (calType != TX_DQ_DQS_MOVE_DQM_ONLY))
8444 				{
8445 					//msg("Delay=%3d |%2d %2d %3d| %2d %2d| 0x%8x [0]",uiDelay, ucdq_ui_large,ucdq_ui_small, ucdq_pi, ucdq_oen_ui_large,ucdq_oen_ui_small, u4err_value);
8446 					#ifdef ETT_PRINT_FORMAT
8447 					if (u4err_value != 0)
8448 					{
8449 						msg2("%d |%d %d %d|[0]", uiDelay, ucdq_ui_large, ucdq_ui_small, ucdq_pi);
8450 					}
8451 					#else
8452 					msg2("Delay=%3d |%2d %2d %3d| 0x%8x [0]", uiDelay, ucdq_ui_large, ucdq_ui_small, ucdq_pi, u4err_value);
8453 					#endif
8454 				}
8455 
8456 				// check fail bit ,0 ok ,others fail
8457 				for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
8458 				{
8459 					u4fail_bit = u4err_value & ((U32)1 << u1BitIdx);
8460 
8461 					if (u1VrefScanEnable == 0 && (calType != TX_DQ_DQS_MOVE_DQM_ONLY))
8462 					{
8463 						if(u4err_value != 0)
8464 						{
8465 							if (u1BitIdx % DQS_BIT_NUMBER == 0)
8466 							{
8467 								msg2(" ");
8468 							}
8469 
8470 							if (u4fail_bit == 0)
8471 							{
8472 								 msg2("o");
8473 							}
8474 							else
8475 							{
8476 								msg2("x");
8477 							}
8478 						}
8479 					}
8480 
8481 					if (WinPerBit[u1BitIdx].first_pass == PASS_RANGE_NA)
8482 					{
8483 						if (u4fail_bit == 0) //compare correct: pass
8484 						{
8485 							WinPerBit[u1BitIdx].first_pass = uiDelay;
8486 
8487 							#if TX_TDQS2DQ_PRE_CAL
8488 							if ((u1IsLP4Div4DDR800(p) == FALSE) && (calType == TX_DQ_DQS_MOVE_DQ_ONLY) && (u1VrefScanEnable == FALSE))
8489 							{
8490 								if (u2DQS2DQ_Pre_Cal[p->channel][p->rank][vGet_Div_Mode(p)] == 0)
8491 								{
8492 									u2DQS2DQ_Pre_Cal[p->channel][p->rank][vGet_Div_Mode(p)] = ((uiDelay - u2DQDelayBegin)* 1000) / p->frequency;
8493 								}
8494 
8495 								if (uiDelay == u2DQDelayBegin)
8496 								{
8497 									err("TX_TDQS2DQ_PRE_CAL: Warning, possible miss TX window boundary\n");
8498 									#if __ETT__
8499 									while (1);
8500 									#endif
8501 								}
8502 							}
8503 							#endif
8504 						}
8505 					}
8506 					else if (WinPerBit[u1BitIdx].last_pass == PASS_RANGE_NA)
8507 					{
8508 						if (u4fail_bit != 0) //compare error : fail
8509 						{
8510 							WinPerBit[u1BitIdx].last_pass = uiDelay - u2DQDelayStep;
8511 						}
8512 						else if (uiDelay > (u2DQDelayEnd - u2DQDelayStep))
8513 						{
8514 							WinPerBit[u1BitIdx].last_pass = uiDelay;
8515 						}
8516 
8517 						if (WinPerBit[u1BitIdx].last_pass != PASS_RANGE_NA)
8518 						{
8519 							if ((WinPerBit[u1BitIdx].last_pass - WinPerBit[u1BitIdx].first_pass) >= (VrefWinPerBit[u1BitIdx].last_pass - VrefWinPerBit[u1BitIdx].first_pass))
8520 							{
8521 								if ((VrefWinPerBit[u1BitIdx].last_pass != PASS_RANGE_NA) && (VrefWinPerBit[u1BitIdx].last_pass - VrefWinPerBit[u1BitIdx].first_pass) > 0)
8522 								{
8523 									msg2("Bit[%d] Bigger window update %d > %d, window broken?\n", u1BitIdx, \
8524 										(WinPerBit[u1BitIdx].last_pass - WinPerBit[u1BitIdx].first_pass), (VrefWinPerBit[u1BitIdx].last_pass - VrefWinPerBit[u1BitIdx].first_pass));
8525 								}
8526 
8527 								//if window size bigger than TX_PASS_WIN_CRITERIA, consider as real pass window. If not, don't update finish counte and won't do early break;
8528 								if ((WinPerBit[u1BitIdx].last_pass - WinPerBit[u1BitIdx].first_pass) > TX_PASS_WIN_CRITERIA)
8529 									uiFinishCount |= (1 << u1BitIdx);
8530 
8531 								//update bigger window size
8532 								VrefWinPerBit[u1BitIdx].first_pass = WinPerBit[u1BitIdx].first_pass;
8533 								VrefWinPerBit[u1BitIdx].last_pass = WinPerBit[u1BitIdx].last_pass;
8534 							}
8535 
8536 							//reset tmp window
8537 							WinPerBit[u1BitIdx].first_pass = PASS_RANGE_NA;
8538 							WinPerBit[u1BitIdx].last_pass = PASS_RANGE_NA;
8539 						}
8540 					 }
8541 				}
8542 
8543 				if(u1VrefScanEnable==0 && (calType != TX_DQ_DQS_MOVE_DQM_ONLY))
8544 				{
8545 					if(u4err_value != 0)
8546 					{
8547 						msg2(" [MSB]\n");
8548 					}
8549 				}
8550 
8551 				//if all bits widnow found and all bits turns to fail again, early break;
8552 				if (uiFinishCount == 0xffff)
8553 				{
8554 					vSetCalibrationResult(p, DRAM_CALIBRATION_TX_PERBIT, DRAM_OK);
8555 					#if !REDUCE_LOG_FOR_PRELOADER
8556 					#ifdef ETT_PRINT_FORMAT
8557 					msg2("TX calibration finding left boundary early break. PI DQ delay=0x%B\n", uiDelay);
8558 					#else
8559 					msg2("TX calibration finding left boundary early break. PI DQ delay=0x%2x\n", uiDelay);
8560 					#endif
8561 					#endif
8562 					break;	//early break
8563 				}
8564 			}
8565 			}
8566 
8567 			if (isAutoK)
8568 			{
8569 			#if TX_AUTO_K_ENABLE
8570 				Tx_Auto_K_complete_check(p);
8571 				#if TX_AUTO_K_DEBUG_ENABLE
8572 				Tx_Auto_K_Debug_Message(p, u1PI_Len);
8573 				#endif
8574 			#endif
8575 			}
8576 
8577 			// (1) calculate per bit window size
8578 			// (2) find out min win of all DQ bits
8579 			// (3) calculate perbit window center
8580 			u1min_winsize = 0xff;
8581 			u1min_bit = 0xff;
8582 			for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
8583 			{
8584 				if (isAutoK)
8585 				{
8586 				#if TX_AUTO_K_ENABLE
8587 				u1dq_shift = ((u1BitIdx % 4) * 8);
8588 				VrefWinPerBit[u1BitIdx].first_pass = u2DQDelayBegin - ucdq_pi + ((u4IO32Read4B(DRAMC_REG_ADDR(PwMaxInitReg[u1BitIdx / 4])) & (0xff << u1dq_shift)) >> u1dq_shift);
8589 				VrefWinPerBit[u1BitIdx].last_pass = ((u4IO32Read4B(DRAMC_REG_ADDR(PwMaxLenReg[u1BitIdx / 4])) & (0xff << u1dq_shift)) >> u1dq_shift) + VrefWinPerBit[u1BitIdx].first_pass;
8590 				VrefWinPerBit[u1BitIdx].win_size = ((u4IO32Read4B(DRAMC_REG_ADDR(PwMaxLenReg[u1BitIdx / 4])) & (0xff << u1dq_shift)) >> u1dq_shift);
8591 
8592 				if (u1PI_Len == 0)
8593 					u4Length = 48;
8594 				else
8595 					u4Length = 32 * (1 + u1PI_Len);
8596 
8597 				if ((VrefWinPerBit[u1BitIdx].first_pass == (int)(u2DQDelayBegin - ucdq_pi)) || (VrefWinPerBit[u1BitIdx].last_pass == (int)(u2DQDelayBegin + u4Length)))
8598 				{
8599 					err("Error! Probably miss pass window!\n");
8600 				}
8601 
8602 				msg("TX DQ bit %d, first pass: %d, last pass: %d\n", u1BitIdx, VrefWinPerBit[u1BitIdx].first_pass, VrefWinPerBit[u1BitIdx].last_pass);
8603 				#else
8604 				//if(VrefWinPerBit[u1BitIdx].last_pass == VrefWinPerBit[u1BitIdx].first_pass)
8605 				if (VrefWinPerBit[u1BitIdx].first_pass == PASS_RANGE_NA)
8606 					VrefWinPerBit[u1BitIdx].win_size = 0;
8607 				else
8608 					VrefWinPerBit[u1BitIdx].win_size = VrefWinPerBit[u1BitIdx].last_pass - VrefWinPerBit[u1BitIdx].first_pass + u2DQDelayStep;
8609 				#endif
8610 				}
8611 				else
8612 				{
8613 					if (VrefWinPerBit[u1BitIdx].first_pass == PASS_RANGE_NA)
8614 						VrefWinPerBit[u1BitIdx].win_size = 0;
8615 					else
8616 						VrefWinPerBit[u1BitIdx].win_size = VrefWinPerBit[u1BitIdx].last_pass - VrefWinPerBit[u1BitIdx].first_pass + u2DQDelayStep;
8617 				}
8618 
8619 				if (VrefWinPerBit[u1BitIdx].win_size < u1min_winsize)
8620 				{
8621 					u1min_bit = u1BitIdx;
8622 					u1min_winsize = VrefWinPerBit[u1BitIdx].win_size;
8623 				}
8624 
8625 				u2TempWinSum += VrefWinPerBit[u1BitIdx].win_size;  //Sum of CA Windows for vref selection
8626 
8627 				#if VENDER_JV_LOG
8628 				if (calType == TX_DQ_DQS_MOVE_DQ_ONLY)
8629 				{
8630 					msg5("TX Bit%d, %d%%\n", u1BitIdx, (VrefWinPerBit[u1BitIdx].win_size * 100 + 31) / 32);
8631 				}
8632 				#endif
8633 
8634 
8635 				// calculate per bit window position and print
8636 				VrefWinPerBit[u1BitIdx].win_center = (VrefWinPerBit[u1BitIdx].first_pass + VrefWinPerBit[u1BitIdx].last_pass) >> 1;
8637 				#if PINMUX_AUTO_TEST_PER_BIT_TX
8638 				gFinalTXPerbitFirstPass[p->channel][u1BitIdx] = VrefWinPerBit[u1BitIdx].first_pass;
8639 				#endif
8640 			}
8641 
8642 
8643 			#if __ETT__
8644 			if (u1VrefScanEnable == 0)
8645 			{
8646 				//msg("\n\tCH=%d, VrefRange= %d, VrefLevel = %d\n", p->channel, u2FinalRange, u2VrefLevel);
8647 				TxPrintWidnowInfo(p, VrefWinPerBit);
8648 			}
8649 			#endif
8650 
8651 			if (u1VrefScanEnable == 1)
8652 			{
8653 				if (u2TempWinSum > u2MaxWindowSum)
8654 					u2MaxWindowSum = u2TempWinSum;
8655 
8656 				VrefInfo[u1VrefIdx].u2VrefUsed = u2VrefLevel;
8657 				VrefInfo[u1VrefIdx].u1WorseBitWinSize_byVref = u1min_winsize;
8658 				VrefInfo[u1VrefIdx].u1WorseBitIdx_byVref = u1min_bit;
8659 				VrefInfo[u1VrefIdx].u2WinSum_byVref = u2TempWinSum;
8660 				u1VrefIdx ++;
8661 			}
8662 
8663 			#if TX_AUTO_K_ENABLE
8664 			if (isAutoK)
8665 				Tx_Auto_K_Clear(p);
8666 			#endif
8667 
8668 			#if LP4_TX_VREF_PASS_CONDITION
8669 			if (u1VrefScanEnable && (u2TempWinSum < (u2MaxWindowSum * 95 / 100)) && (u1min_winsize < LP4_TX_VREF_PASS_CONDITION))
8670 			#else
8671 			if (u1VrefScanEnable && (u2TempWinSum < (u2MaxWindowSum * 95 / 100)) && (u1min_winsize > TX_PASS_WIN_CRITERIA))
8672 			#endif
8673 			{
8674 				msg("\nTX Vref early break, caculate TX vref\n");
8675 				break;
8676 			}
8677 
8678 			#if TX_AUTO_K_ENABLE
8679 			Tx_Auto_K_Clear(p);
8680 			#endif
8681 		}
8682 
8683 		DramcEngine2End(p);
8684 
8685 		#if (TX_AUTO_K_ENABLE && TX_AUTO_K_WORKAROUND)
8686 		if ((isAutoK) && (p->rank == RANK_1))
8687 		{
8688 			vSetRank(p, RANK_0);
8689 			DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
8690 			vSetRank(p, backup_rank);
8691 		}
8692 		#endif
8693 
8694 		if (u1VrefScanEnable == 0)// ..if time domain (not vref scan) , calculate window center of all bits.
8695 		{
8696 			// Calculate the center of DQ pass window
8697 			// Record center sum of each byte
8698 			for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
8699 			{
8700 				#if 1//TX_DQM_CALC_MAX_MIN_CENTER
8701 				u2Center_min[u1ByteIdx] = 0xffff;
8702 				u2Center_max[u1ByteIdx] = 0;
8703 				#endif
8704 
8705 				for (u1BitIdx = 0; u1BitIdx < DQS_BIT_NUMBER; u1BitIdx++)
8706 				{
8707 					u1BitTemp = u1ByteIdx * DQS_BIT_NUMBER + u1BitIdx;
8708 					memcpy(FinalWinPerBit, VrefWinPerBit, sizeof(PASS_WIN_DATA_T) * DQ_DATA_WIDTH);
8709 
8710 					if (FinalWinPerBit[u1BitTemp].win_center < u2Center_min[u1ByteIdx])
8711 						u2Center_min[u1ByteIdx] = FinalWinPerBit[u1BitTemp].win_center;
8712 
8713 					if (FinalWinPerBit[u1BitTemp].win_center > u2Center_max[u1ByteIdx])
8714 						u2Center_max[u1ByteIdx] = FinalWinPerBit[u1BitTemp].win_center;
8715 
8716 					#ifdef FOR_HQA_TEST_USED
8717 					if ((calType == TX_DQ_DQS_MOVE_DQ_ONLY) && (u1VrefScanEnable == 0))
8718 					{
8719 						gFinalTXPerbitWin[p->channel][p->rank][u1BitTemp] = FinalWinPerBit[u1BitTemp].win_size;
8720 					}
8721 					#endif
8722 				}
8723 			}
8724 		}
8725 	}
8726 
8727 	// SET tx Vref (DQ) = u2FinalVref, LP3 no need to set this.
8728 	if (u1VrefScanEnable)
8729 	{
8730 		#if SUPPORT_SAVE_TIME_FOR_CALIBRATION && BYPASS_VREF_CAL
8731 		if (p->femmc_Ready == 1 && (p->Bypass_TXWINDOW))
8732 		{
8733 			u2FinalVref = p->pSavetimeData->u1TxWindowPerbitVref_Save[p->channel][p->rank];
8734 		}
8735 		else
8736 		#endif
8737 		{
8738 			u2FinalVref = TxChooseVref(p, VrefInfo, u1VrefIdx);
8739 		}
8740 
8741 		TXSetFinalVref(p, u2FinalRange, u2FinalVref);
8742 		return DRAM_OK;
8743 	}
8744 
8745 #ifdef FOR_HQA_TEST_USED
8746 	// LP4 DQ time domain || LP3 DQ_DQM time domain
8747 	if (calType == TX_DQ_DQS_MOVE_DQ_ONLY)
8748 	{
8749 		gFinalTXPerbitWin_min_max[p->channel][p->rank] = u1min_winsize;
8750 		if(u1min_winsize<16)
8751 		{
8752 			err("[WARNING] Smaller TX win !!\n");
8753 			#if CHECK_HQA_CRITERIA
8754 			while(1);
8755 			#endif
8756 		}
8757 	}
8758 #endif
8759 
8760 	// LP3 only use "TX_DQ_DQS_MOVE_DQ_DQM" scan
8761 	// first freq 800(LP4-1600) doesn't support jitter meter(data < 1T), therefore, don't use delay cell
8762 	if ((calType == TX_DQ_DQS_MOVE_DQ_ONLY) && (p->frequency >= 1333) && (p->u2DelayCellTimex100 != 0))
8763 	{
8764 		u1EnableDelayCell = 1;
8765 		msg("[TX_PER_BIT_DELAY_CELL] DelayCellTimex100 =%d/100 ps\n", p->u2DelayCellTimex100);
8766 	}
8767 
8768 	//Calculate the center of DQ pass window
8769 	//average the center delay
8770 	for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
8771 	{
8772 		msg(" == TX Byte %d ==\n", u1ByteIdx);
8773 		u2DQM_Delay = ((u2Center_min[u1ByteIdx] + u2Center_max[u1ByteIdx]) >> 1); //(max +min)/2
8774 
8775 		if (u1EnableDelayCell == 0)
8776 		{
8777 			uiDelay = u2DQM_Delay;
8778 		}
8779 		else// if(calType == TX_DQ_DQS_MOVE_DQ_ONLY)
8780 		{
8781 			uiDelay = u2Center_min[u1ByteIdx];	// for DQ PI delay , will adjust with delay cell
8782 
8783 			// calculate delay cell perbit
8784 			for (u1BitIdx = 0; u1BitIdx < DQS_BIT_NUMBER; u1BitIdx++)
8785 			{
8786 				u1BitTemp = u1ByteIdx * DQS_BIT_NUMBER + u1BitIdx;
8787 				u1PIDiff = FinalWinPerBit[u1BitTemp].win_center - u2Center_min[u1ByteIdx];
8788 				if (p->u2DelayCellTimex100 != 0)
8789 				{
8790 					u2DelayCellOfst[u1BitTemp] = (u1PIDiff * 100000000 / (p->frequency << 6)) / p->u2DelayCellTimex100;
8791 
8792 					msg("u2DelayCellOfst[%d]=%d cells (%d PI)\n", u1BitTemp, u2DelayCellOfst[u1BitTemp], u1PIDiff);
8793 
8794 					if(u2DelayCellOfst[u1BitTemp]>255)
8795 					{
8796 						msg("[WARNING] TX DQ%d delay cell %d >255, adjust to 255 cell\n", u1BitIdx, u2DelayCellOfst[u1BitTemp]);
8797 						u2DelayCellOfst[u1BitTemp] =255;
8798 					}
8799 				}
8800 				else
8801 				{
8802 					err("Error: Cell time (p->u2DelayCellTimex100) is 0 \n");
8803 					break;
8804 				}
8805 			}
8806 
8807 		}
8808 
8809 		TxWinTransferDelayToUIPI(p, uiDelay, 1, &ucdq_reg_ui_large[u1ByteIdx], &ucdq_reg_ui_small[u1ByteIdx], &ucdq_reg_pi[u1ByteIdx], \
8810 								&ucdq_reg_oen_ui_large[u1ByteIdx], &ucdq_reg_oen_ui_small[u1ByteIdx]);
8811 
8812 		TxWinTransferDelayToUIPI(p, u2DQM_Delay, 1, &ucdq_reg_dqm_ui_large[u1ByteIdx], &ucdq_reg_dqm_ui_small[u1ByteIdx], &ucdq_reg_dqm_pi[u1ByteIdx], \
8813 								&ucdq_reg_dqm_oen_ui_large[u1ByteIdx], &ucdq_reg_dqm_oen_ui_small[u1ByteIdx]);
8814 
8815 		if (calType == TX_DQ_DQS_MOVE_DQ_ONLY || calType == TX_DQ_DQS_MOVE_DQ_DQM)
8816 		{
8817 			msg("Update DQ	dly =%d (%d ,%d, %d)  DQ  OEN =(%d ,%d)\n",
8818 							uiDelay, ucdq_reg_ui_large[u1ByteIdx], ucdq_reg_ui_small[u1ByteIdx], ucdq_reg_pi[u1ByteIdx], \
8819 							ucdq_reg_oen_ui_large[u1ByteIdx], ucdq_reg_oen_ui_small[u1ByteIdx]);
8820 		}
8821 
8822 		//if(calType ==TX_DQ_DQS_MOVE_DQM_ONLY || calType== TX_DQ_DQS_MOVE_DQ_DQM)
8823 		{
8824 			msg("Update DQM dly =%d (%d ,%d, %d)  DQM OEN =(%d ,%d)",
8825 					u2DQM_Delay, ucdq_reg_dqm_ui_large[u1ByteIdx], ucdq_reg_dqm_ui_small[u1ByteIdx], ucdq_reg_dqm_pi[u1ByteIdx], \
8826 					ucdq_reg_dqm_oen_ui_large[u1ByteIdx], ucdq_reg_dqm_oen_ui_small[u1ByteIdx]);
8827 		}
8828 		msg("\n");
8829 
8830 #ifdef FOR_HQA_REPORT_USED
8831 		if (calType == TX_DQ_DQS_MOVE_DQ_ONLY)
8832 		{
8833 			for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
8834 			{
8835 				HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT1, "TX_Window_Center_", "DQ", u1BitIdx, FinalWinPerBit[u1BitIdx].win_center, NULL);
8836 			}
8837 		}
8838 
8839 		if (calType == TX_DQ_DQS_MOVE_DQM_ONLY)
8840 		{
8841 			HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT0, "TX_Window_Center_", "DQM", u1ByteIdx, u2DQM_Delay, NULL);
8842 		}
8843 #if 0
8844 		HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT1, "TX_Window_Center_", "LargeUI", u1ByteIdx, ucdq_reg_ui_large[u1ByteIdx], NULL);
8845 		HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT0, "TX_Window_Center_", "SmallUI", u1ByteIdx, ucdq_reg_ui_small[u1ByteIdx], NULL);
8846 		HQA_Log_Message_for_Report(p, p->channel, p->rank, HQA_REPORT_FORMAT0, "TX_Window_Center_", "PI", u1ByteIdx, ucdq_reg_pi[u1ByteIdx], NULL);
8847 #endif
8848 #endif
8849 
8850 	}
8851 
8852 
8853 #if REG_ACCESS_PORTING_DGB
8854 	RegLogEnable = 1;
8855 #endif
8856 
8857 		/* p->rank = RANK_0, save to Reg Rank0 and Rank1, p->rank = RANK_1, save to Reg Rank1 */
8858 		for (u1RankIdx = p->rank; u1RankIdx < RANK_MAX; u1RankIdx++)
8859 		{
8860 			vSetRank(p, u1RankIdx);
8861 
8862 			if (calType == TX_DQ_DQS_MOVE_DQ_ONLY || calType == TX_DQ_DQS_MOVE_DQ_DQM)
8863 			{
8864 				TXSetDelayReg_DQ(p, TRUE, ucdq_reg_ui_large, ucdq_reg_oen_ui_large, ucdq_reg_ui_small, ucdq_reg_oen_ui_small, ucdq_reg_pi);
8865 			}
8866 
8867 			TXSetDelayReg_DQM(p, TRUE, ucdq_reg_dqm_ui_large, ucdq_reg_dqm_oen_ui_large, ucdq_reg_dqm_ui_small, ucdq_reg_dqm_oen_ui_small, ucdq_reg_dqm_pi);
8868 
8869 			 if (u1EnableDelayCell)
8870 			 {
8871 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY0),
8872 						   P_Fld(u2DelayCellOfst[3], SHU_R0_B0_TXDLY0_TX_ARDQ3_DLY_B0)
8873 						 | P_Fld(u2DelayCellOfst[2], SHU_R0_B0_TXDLY0_TX_ARDQ2_DLY_B0)
8874 						 | P_Fld(u2DelayCellOfst[1], SHU_R0_B0_TXDLY0_TX_ARDQ1_DLY_B0)
8875 						 | P_Fld(u2DelayCellOfst[0], SHU_R0_B0_TXDLY0_TX_ARDQ0_DLY_B0));
8876 				 vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_TXDLY1),
8877 						   P_Fld(u2DelayCellOfst[7], SHU_R0_B0_TXDLY1_TX_ARDQ7_DLY_B0)
8878 						 | P_Fld(u2DelayCellOfst[6], SHU_R0_B0_TXDLY1_TX_ARDQ6_DLY_B0)
8879 						 | P_Fld(u2DelayCellOfst[5], SHU_R0_B0_TXDLY1_TX_ARDQ5_DLY_B0)
8880 						 | P_Fld(u2DelayCellOfst[4], SHU_R0_B0_TXDLY1_TX_ARDQ4_DLY_B0));
8881 				 vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY0),
8882 						   P_Fld(u2DelayCellOfst[11], SHU_R0_B1_TXDLY0_TX_ARDQ3_DLY_B1)
8883 						 | P_Fld(u2DelayCellOfst[10], SHU_R0_B1_TXDLY0_TX_ARDQ2_DLY_B1)
8884 						 | P_Fld(u2DelayCellOfst[9], SHU_R0_B1_TXDLY0_TX_ARDQ1_DLY_B1)
8885 						 | P_Fld(u2DelayCellOfst[8], SHU_R0_B1_TXDLY0_TX_ARDQ0_DLY_B1));
8886 				 vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_TXDLY1),
8887 						   P_Fld(u2DelayCellOfst[15], SHU_R0_B1_TXDLY1_TX_ARDQ7_DLY_B1)
8888 						 | P_Fld(u2DelayCellOfst[14], SHU_R0_B1_TXDLY1_TX_ARDQ6_DLY_B1)
8889 						 | P_Fld(u2DelayCellOfst[13], SHU_R0_B1_TXDLY1_TX_ARDQ5_DLY_B1)
8890 						 | P_Fld(u2DelayCellOfst[12], SHU_R0_B1_TXDLY1_TX_ARDQ4_DLY_B1));
8891 			 }
8892 
8893 			#if ENABLE_TX_TRACKING
8894 			TXUpdateTXTracking(p, calType, ucdq_reg_pi, ucdq_reg_dqm_pi);
8895 			#endif
8896 		}
8897 
8898 		vSetRank(p, backup_rank);
8899 
8900 		if (isAutoK)
8901 		{
8902 		#if TX_AUTO_K_ENABLE
8903 			#if TX_AUTO_K_WORKAROUND
8904 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_TX_ATK_SET0),
8905 								 P_Fld(ucdq_reg_pi[0], TX_ATK_SET0_TX_ATK_DQ_B0_PI_INIT) |
8906 								 P_Fld(ucdq_reg_pi[1], TX_ATK_SET0_TX_ATK_DQ_B1_PI_INIT) |
8907 								 P_Fld(ucdq_reg_dqm_pi[0], TX_ATK_SET0_TX_ATK_DQM_B0_PI_INIT) |
8908 								 P_Fld(ucdq_reg_dqm_pi[1], TX_ATK_SET0_TX_ATK_DQM_B1_PI_INIT)); //If TX auto-k is enable, TX_PI will be switch to PI_INIT
8909 			#endif
8910 		#endif
8911 		}
8912 
8913 #if REG_ACCESS_PORTING_DGB
8914 	RegLogEnable = 0;
8915 #endif
8916 
8917 #if (TX_AUTO_K_ENABLE && TX_AUTO_K_WORKAROUND)
8918 	if ((isAutoK) && (p->rank == RANK_1) && (calType == TX_DQ_DQS_MOVE_DQ_DQM))
8919 	{
8920 		u4DQM_MCK_RK1_backup = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1));
8921 		u4DQM_UI_RK1_backup = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3));
8922 		u4DQM_PI_RK1_backup[0] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0));
8923 		u4DQM_PI_RK1_backup[1] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0));
8924 		u4DQ_MCK_RK1_backup = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0));
8925 		u4DQ_UI_RK1_backup = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2));
8926 		u4DQ_PI_RK1_backup[0] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0));
8927 		u4DQ_PI_RK1_backup[1] = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0));
8928 	}
8929 #endif
8930 
8931 	msg3("[TxWindowPerbitCal] Done\n\n");
8932 
8933 	#if 0
8934 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_PADCTL4), 1, PADCTL4_CKEFIXON);  // test only
8935 	#endif
8936 
8937 	return DRAM_OK;
8938 }
8939 
8940 #endif //SIMULATION_TX_PERBIT
8941 
8942 #if ENABLE_EYESCAN_GRAPH
Dramc_K_TX_EyeScan_Log(DRAMC_CTX_T * p)8943 void Dramc_K_TX_EyeScan_Log(DRAMC_CTX_T *p)
8944 {
8945 	U8 ucindex, u1BitIdx, u1ByteIdx;
8946 	U8 ii, backup_rank, u1PrintWinData, u1vrefidx;
8947 	PASS_WIN_DATA_T WinPerBit[DQ_DATA_WIDTH], VrefWinPerBit[DQ_DATA_WIDTH], FinalWinPerBit[DQ_DATA_WIDTH];
8948 	U16 tx_pi_delay[4], tx_dqm_pi_delay[4];
8949 	U16 u2DQDelayBegin, uiDelay;
8950 	U16 u2VrefLevel, u2VrefBegin, u2VrefEnd, u2VrefStep, u2VrefRange;
8951 	U8 ucdq_pi, ucdq_ui_small, ucdq_ui_large,ucdq_oen_ui_small, ucdq_oen_ui_large;
8952 	U32 uiFinishCount;
8953 	U16 u2TempWinSum, u2tx_window_sum=0;
8954 	U32 u4err_value, u4fail_bit;
8955 	#if 1//TX_DQM_CALC_MAX_MIN_CENTER
8956 	U16 u2Center_min[DQS_NUMBER],u2Center_max[DQS_NUMBER];
8957 	#endif
8958 
8959 	U16 TXPerbitWin_min_max = 0;
8960 	U32 min_bit, min_winsize;
8961 
8962 	U16 u2FinalVref=0xd;
8963 	U16 u2FinalRange=0;
8964 
8965 	U8 EyeScan_index[DQ_DATA_WIDTH];
8966 
8967 	U16 backup_u1MR14Value;
8968 	U8 u1pass_in_this_vref_flag[DQ_DATA_WIDTH];
8969 
8970 	U8 u1MCK2UI, u1UI2PI;
8971 
8972 	U32 u4RegBackupAddress[] =
8973 	{
8974 		(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0)),
8975 		(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2)),
8976 		(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1)),
8977 		(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3)),
8978 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0)),
8979 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0)),
8980 	};
8981 
8982 	if (GetEyeScanEnable(p, 2)==DISABLE) return;
8983 
8984 	//if (gTX_EYE_Scan_only_higheset_freq_flag==1 && p->frequency != u2DFSGetHighestFreq(p)) return;
8985 
8986 	//backup register value
8987 	DramcBackupRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress)/sizeof(U32));
8988 
8989 	backup_u1MR14Value = u1MR14Value[p->channel][p->rank][p->dram_fsp];
8990 	//Jimmy Temp
8991 	DramcModeRegReadByRank(p, p->rank, 14, &backup_u1MR14Value);
8992 
8993 	if (gFinalTXVrefDQ[p->channel][p->rank] ==0) //Set final TX Vref as default value
8994 		gFinalTXVrefDQ[p->channel][p->rank] = u1MR14Value[p->channel][p->rank][p->dram_fsp];
8995 
8996 	//set initial values
8997 	for(u1vrefidx=0; u1vrefidx<=VREF_VOLTAGE_TABLE_NUM_LP5-1;u1vrefidx++)
8998 	{
8999 		for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
9000 		{
9001 			for(ii=0; ii<EYESCAN_BROKEN_NUM; ii++)
9002 			{
9003 				gEyeScan_Min[u1vrefidx][u1BitIdx][ii] = EYESCAN_DATA_INVALID;
9004 				gEyeScan_Max[u1vrefidx][u1BitIdx][ii] = EYESCAN_DATA_INVALID;
9005 			}
9006 			gEyeScan_ContinueVrefHeight[u1BitIdx] = 0;
9007 			gEyeScan_TotalPassCount[u1BitIdx] = 0;
9008 		}
9009 	}
9010 
9011 
9012 	u1MCK2UI = u1MCK2UI_DivShift(p);
9013 
9014 	//if (vGet_DDR800_Mode(p) == DDR800_CLOSE_LOOP)
9015 	//	  u1UI2PI = 6;
9016 	//else
9017 		u1UI2PI = 5;
9018 
9019 
9020 	for(u1ByteIdx=0; u1ByteIdx < p->data_width/DQS_BIT_NUMBER; u1ByteIdx++)
9021 	{
9022 		if (u1ByteIdx == 0)
9023 		{
9024 			tx_pi_delay[u1ByteIdx] = (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), SHURK_SELPH_DQ0_TXDLY_DQ0)<<(u1MCK2UI+u1UI2PI)) +
9025 						  (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), SHURK_SELPH_DQ2_DLY_DQ0)<<u1UI2PI) +
9026 						  u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), SHU_R0_B0_DQ0_SW_ARPI_DQ_B0)*(u1IsPhaseMode(p)==TRUE ? 8 : 1);
9027 
9028 			tx_dqm_pi_delay[u1ByteIdx] = (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1), SHURK_SELPH_DQ1_TXDLY_DQM0)<<(u1MCK2UI+u1UI2PI)) +
9029 							  (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3), SHURK_SELPH_DQ3_DLY_DQM0)<<u1UI2PI) +
9030 							  u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), SHU_R0_B0_DQ0_SW_ARPI_DQM_B0)*(u1IsPhaseMode(p)==TRUE ? 8 : 1);
9031 		}
9032 		else
9033 		{
9034 			tx_pi_delay[u1ByteIdx] = (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), SHURK_SELPH_DQ0_TXDLY_DQ1)<<(u1MCK2UI+u1UI2PI)) +
9035 						  (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), SHURK_SELPH_DQ2_DLY_DQ1)<<u1UI2PI) +
9036 						  u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), SHU_R0_B1_DQ0_SW_ARPI_DQ_B1)*(u1IsPhaseMode(p)==TRUE ? 8 : 1);
9037 
9038 			tx_dqm_pi_delay[u1ByteIdx] = (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1), SHURK_SELPH_DQ1_TXDLY_DQM1)<<(u1MCK2UI+u1UI2PI)) +
9039 							  (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3), SHURK_SELPH_DQ3_DLY_DQM1)<<u1UI2PI) +
9040 							  u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), SHU_R0_B1_DQ0_SW_ARPI_DQM_B1)*(u1IsPhaseMode(p)==TRUE ? 8 : 1);
9041 		}
9042 	}
9043 
9044 	if (tx_pi_delay[0] < tx_pi_delay[1])
9045 	{
9046 		u2DQDelayBegin = tx_pi_delay[0]-32;
9047 	}
9048 	else
9049 	{
9050 		u2DQDelayBegin = tx_pi_delay[1]-32;
9051 	}
9052 
9053 	u2VrefRange = 0;
9054 	u2VrefBegin = 0;
9055 	u2VrefEnd = (p->dram_type==TYPE_LPDDR5?VREF_VOLTAGE_TABLE_NUM_LP5:VREF_VOLTAGE_TABLE_NUM_LP4)-1;
9056 	u2VrefStep = EYESCAN_GRAPH_CATX_VREF_STEP;
9057 	msg3("\nTX Vref %d -> %d, step: %d\n", u2VrefBegin, u2VrefEnd, u2VrefStep);
9058 
9059 #if ENABLE_K_WITH_WORST_SI_UI_SHIFT
9060 	DramcEngine2Init(p, p->test2_1, p->test2_2, p->test_pattern | 0x80, 0, TE_UI_SHIFT);//UI_SHIFT + LEN1
9061 #else
9062 	DramcEngine2Init(p, p->test2_1, p->test2_2, TEST_XTALK_PATTERN, 0, TE_NO_UI_SHIFT);
9063 #endif
9064 
9065 
9066 	for(u2VrefLevel = u2VrefBegin; u2VrefLevel <= u2VrefEnd; u2VrefLevel += u2VrefStep)
9067 	{
9068 		//set vref
9069 //fra		 u1MR14Value[p->channel][p->rank][p->dram_fsp] = (u2VrefLevel | (u2VrefRange<<6));
9070 		DramcTXSetVref(p, u2VrefRange, u2VrefLevel);
9071 		msg3("\n\n Set TX VrefRange %d, VrefLevel=%d\n", u2VrefRange, u2VrefLevel);
9072 
9073 		// initialize parameters
9074 		uiFinishCount = 0;
9075 		u2TempWinSum =0;
9076 
9077 		for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
9078 		{
9079 			WinPerBit[u1BitIdx].first_pass = (S16)PASS_RANGE_NA;
9080 			WinPerBit[u1BitIdx].last_pass = (S16)PASS_RANGE_NA;
9081 			VrefWinPerBit[u1BitIdx].first_pass = (S16)PASS_RANGE_NA;
9082 			VrefWinPerBit[u1BitIdx].last_pass = (S16)PASS_RANGE_NA;
9083 
9084 			gEyeScan_DelayCellPI[u1BitIdx] = 0;
9085 
9086 			EyeScan_index[u1BitIdx] = 0;
9087 			u1pass_in_this_vref_flag[u1BitIdx] = 0;
9088 		}
9089 
9090 		for (uiDelay=0; uiDelay<64; uiDelay+=(u1IsPhaseMode(p)==TRUE ? 8 : 1))
9091 		{
9092 			TxWinTransferDelayToUIPI(p, tx_pi_delay[0]+uiDelay-32, 0, &ucdq_ui_large, &ucdq_ui_small, &ucdq_pi, &ucdq_oen_ui_large, &ucdq_oen_ui_small);
9093 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), \
9094 											 P_Fld(ucdq_ui_large, SHURK_SELPH_DQ0_TXDLY_DQ0) | \
9095 											 P_Fld(ucdq_oen_ui_large, SHURK_SELPH_DQ0_TXDLY_OEN_DQ0));
9096 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), \
9097 											 P_Fld(ucdq_ui_small, SHURK_SELPH_DQ2_DLY_DQ0) | \
9098 											 P_Fld(ucdq_oen_ui_small, SHURK_SELPH_DQ2_DLY_OEN_DQ0));
9099 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), ucdq_pi, SHU_R0_B0_DQ0_SW_ARPI_DQ_B0);
9100 
9101 			TxWinTransferDelayToUIPI(p, tx_pi_delay[1]+uiDelay-32, 0, &ucdq_ui_large, &ucdq_ui_small, &ucdq_pi, &ucdq_oen_ui_large, &ucdq_oen_ui_small);
9102 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), \
9103 											 P_Fld(ucdq_ui_large, SHURK_SELPH_DQ0_TXDLY_DQ1) | \
9104 											 P_Fld(ucdq_oen_ui_large, SHURK_SELPH_DQ0_TXDLY_OEN_DQ1));
9105 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), \
9106 											 P_Fld(ucdq_ui_small, SHURK_SELPH_DQ2_DLY_DQ1) | \
9107 											 P_Fld(ucdq_oen_ui_small, SHURK_SELPH_DQ2_DLY_OEN_DQ1));
9108 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), ucdq_pi, SHU_R0_B1_DQ0_SW_ARPI_DQ_B1);
9109 
9110 			TxWinTransferDelayToUIPI(p, tx_dqm_pi_delay[0]+uiDelay-32, 0, &ucdq_ui_large, &ucdq_ui_small, &ucdq_pi, &ucdq_oen_ui_large, &ucdq_oen_ui_small);
9111 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1), \
9112 											P_Fld(ucdq_ui_large, SHURK_SELPH_DQ1_TXDLY_DQM0) | \
9113 											P_Fld(ucdq_oen_ui_large, SHURK_SELPH_DQ1_TXDLY_OEN_DQM0));
9114 		   vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3), \
9115 											P_Fld(ucdq_ui_small, SHURK_SELPH_DQ3_DLY_DQM0) | \
9116 											P_Fld(ucdq_oen_ui_small, SHURK_SELPH_DQ3_DLY_OEN_DQM0));
9117 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B0_DQ0), ucdq_pi,  SHU_R0_B0_DQ0_SW_ARPI_DQM_B0);
9118 
9119 			TxWinTransferDelayToUIPI(p, tx_dqm_pi_delay[1]+uiDelay-32, 0, &ucdq_ui_large, &ucdq_ui_small, &ucdq_pi, &ucdq_oen_ui_large, &ucdq_oen_ui_small);
9120 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1), \
9121 											 P_Fld(ucdq_ui_large, SHURK_SELPH_DQ1_TXDLY_DQM1) | \
9122 											 P_Fld(ucdq_oen_ui_large, SHURK_SELPH_DQ1_TXDLY_OEN_DQM1));
9123 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3), \
9124 											 P_Fld(ucdq_ui_small, SHURK_SELPH_DQ3_DLY_DQM1) | \
9125 											 P_Fld(ucdq_oen_ui_small, SHURK_SELPH_DQ3_DLY_OEN_DQM1));
9126 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_R0_B1_DQ0), ucdq_pi, SHU_R0_B1_DQ0_SW_ARPI_DQM_B1);
9127 
9128 			u4err_value=0;
9129 #if ENABLE_K_WITH_WORST_SI_UI_SHIFT
9130 			//DramcEngine2SetPat(p, p->test_pattern, 0, 0, TE_UI_SHIFT);
9131 			u4err_value = DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, p->test_pattern);
9132 #else
9133 			//audio + xtalk pattern
9134 			DramcEngine2SetPat(p, TEST_AUDIO_PATTERN, 0, 0, TE_NO_UI_SHIFT);
9135 			u4err_value = DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, TEST_AUDIO_PATTERN);
9136 			DramcEngine2SetPat(p, TEST_XTALK_PATTERN, 0, 1, TE_NO_UI_SHIFT);
9137 			u4err_value |= DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, TEST_XTALK_PATTERN);
9138 #endif
9139 			// audio + xtalk pattern
9140 			//u4err_value=0;
9141 			//DramcEngine2SetPat(p,TEST_AUDIO_PATTERN, 0, 0, TE_NO_UI_SHIFT);
9142 			//u4err_value = DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, TEST_AUDIO_PATTERN);
9143 			//DramcEngine2SetPat(p,TEST_XTALK_PATTERN, 0, 1, TE_NO_UI_SHIFT);
9144 			//u4err_value |= DramcEngine2Run(p, TE_OP_WRITE_READ_CHECK, TEST_XTALK_PATTERN);
9145 
9146 			// check fail bit ,0 ok ,others fail
9147 			for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
9148 			{
9149 				u4fail_bit = u4err_value&((U32)1<<u1BitIdx);
9150 
9151 				if (u4fail_bit == 0)
9152 				{
9153 					gEyeScan_TotalPassCount[u1BitIdx]+=EYESCAN_GRAPH_CATX_VREF_STEP;
9154 				}
9155 
9156 				if(WinPerBit[u1BitIdx].first_pass== PASS_RANGE_NA)
9157 				{
9158 					if(u4fail_bit==0) //compare correct: pass
9159 					{
9160 						WinPerBit[u1BitIdx].first_pass = uiDelay;
9161 						u1pass_in_this_vref_flag[u1BitIdx] = 1;
9162 					}
9163 				}
9164 				else if(WinPerBit[u1BitIdx].last_pass == PASS_RANGE_NA)
9165 				{
9166 					if(u4fail_bit !=0) //compare error : fail
9167 					{
9168 						WinPerBit[u1BitIdx].last_pass  = (uiDelay-1);
9169 					}
9170 					else if (uiDelay>=63)
9171 					{
9172 						WinPerBit[u1BitIdx].last_pass  = 63;
9173 					}
9174 
9175 					if(WinPerBit[u1BitIdx].last_pass  !=PASS_RANGE_NA)
9176 					{
9177 						if((WinPerBit[u1BitIdx].last_pass -WinPerBit[u1BitIdx].first_pass) >= (VrefWinPerBit[u1BitIdx].last_pass -VrefWinPerBit[u1BitIdx].first_pass))
9178 						{
9179 							//if window size bigger than 7, consider as real pass window. If not, don't update finish counte and won't do early break;
9180 							if((WinPerBit[u1BitIdx].last_pass -WinPerBit[u1BitIdx].first_pass) >7)
9181 								uiFinishCount |= (1<<u1BitIdx);
9182 
9183 							//update bigger window size
9184 							VrefWinPerBit[u1BitIdx].first_pass = WinPerBit[u1BitIdx].first_pass;
9185 							VrefWinPerBit[u1BitIdx].last_pass = WinPerBit[u1BitIdx].last_pass;
9186 						}
9187 
9188 
9189 							if (EyeScan_index[u1BitIdx] < EYESCAN_BROKEN_NUM)
9190 							{
9191 #if VENDER_JV_LOG || defined(RELEASE)
9192 								gEyeScan_Min[(u2VrefLevel+u2VrefRange*30)/EYESCAN_GRAPH_CATX_VREF_STEP][u1BitIdx][EyeScan_index[u1BitIdx]] = WinPerBit[u1BitIdx].first_pass;
9193 								gEyeScan_Max[(u2VrefLevel+u2VrefRange*30)/EYESCAN_GRAPH_CATX_VREF_STEP][u1BitIdx][EyeScan_index[u1BitIdx]] = WinPerBit[u1BitIdx].last_pass;
9194 #else
9195 //fra								 gEyeScan_Min[(u2VrefLevel+u2VrefRange*30)/EYESCAN_GRAPH_CATX_VREF_STEP][u1BitIdx][EyeScan_index[u1BitIdx]] = WinPerBit[u1BitIdx].first_pass + tx_pi_delay[u1BitIdx/8]-32;
9196 //fra								 gEyeScan_Max[(u2VrefLevel+u2VrefRange*30)/EYESCAN_GRAPH_CATX_VREF_STEP][u1BitIdx][EyeScan_index[u1BitIdx]] = WinPerBit[u1BitIdx].last_pass + tx_pi_delay[u1BitIdx/8]-32;
9197 								gEyeScan_Min[(u2VrefLevel+u2VrefRange*30)/EYESCAN_GRAPH_CATX_VREF_STEP][u1BitIdx][EyeScan_index[u1BitIdx]] = (S8) WinPerBit[u1BitIdx].first_pass;
9198 								gEyeScan_Max[(u2VrefLevel+u2VrefRange*30)/EYESCAN_GRAPH_CATX_VREF_STEP][u1BitIdx][EyeScan_index[u1BitIdx]] = (S8) WinPerBit[u1BitIdx].last_pass;
9199 								msg3("VrefRange %d, VrefLevel=%d, u1BitIdx=%d, index=%d (%d, %d)==\n",u2VrefRange,u2VrefLevel, u1BitIdx, EyeScan_index[u1BitIdx], gEyeScan_Min[u2VrefLevel/EYESCAN_GRAPH_CATX_VREF_STEP][u1BitIdx][EyeScan_index[u1BitIdx]], gEyeScan_Max[u2VrefLevel/EYESCAN_GRAPH_CATX_VREF_STEP][u1BitIdx][EyeScan_index[u1BitIdx]]);
9200 								gEyeScan_MinMax_store_delay[u1BitIdx/8] =  tx_pi_delay[u1BitIdx/8]-32; /* save this information for HQA pass/fail judgement used */
9201 #endif
9202 								EyeScan_index[u1BitIdx]=EyeScan_index[u1BitIdx]+1;
9203 							}
9204 
9205 
9206 						//reset tmp window
9207 						WinPerBit[u1BitIdx].first_pass = PASS_RANGE_NA;
9208 						WinPerBit[u1BitIdx].last_pass = PASS_RANGE_NA;
9209 					}
9210 				 }
9211 			   }
9212 		}
9213 
9214 		min_winsize = 0xffff;
9215 		min_bit = 0xff;
9216 		for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
9217 		{
9218 			VrefWinPerBit[u1BitIdx].win_size = VrefWinPerBit[u1BitIdx].last_pass- VrefWinPerBit[u1BitIdx].first_pass +(VrefWinPerBit[u1BitIdx].last_pass==VrefWinPerBit[u1BitIdx].first_pass?0:1);
9219 
9220 			if (VrefWinPerBit[u1BitIdx].win_size < min_winsize)
9221 			{
9222 				min_bit = u1BitIdx;
9223 				min_winsize = VrefWinPerBit[u1BitIdx].win_size;
9224 			}
9225 
9226 			u2TempWinSum += VrefWinPerBit[u1BitIdx].win_size;  //Sum of CA Windows for vref selection
9227 
9228 			gEyeScan_WinSize[(u2VrefLevel+u2VrefRange*30)/EYESCAN_GRAPH_CATX_VREF_STEP][u1BitIdx] = VrefWinPerBit[u1BitIdx].win_size;
9229 
9230 #ifdef FOR_HQA_TEST_USED
9231 			if((((backup_u1MR14Value>>6)&1) == u2VrefRange) && ((backup_u1MR14Value&0x3f)==u2VrefLevel))
9232 			{
9233 				gFinalTXPerbitWin[p->channel][p->rank][u1BitIdx] = VrefWinPerBit[u1BitIdx].win_size;
9234 			}
9235 #endif
9236 
9237 		}
9238 
9239 		if ((min_winsize > TXPerbitWin_min_max) || ((min_winsize == TXPerbitWin_min_max) && (u2TempWinSum >u2tx_window_sum)))
9240 		{
9241 			TXPerbitWin_min_max = min_winsize;
9242 			u2tx_window_sum =u2TempWinSum;
9243 			u2FinalRange = u2VrefRange;
9244 			u2FinalVref = u2VrefLevel;
9245 
9246 			//Calculate the center of DQ pass window
9247 			// Record center sum of each byte
9248 			for (u1ByteIdx=0; u1ByteIdx<(p->data_width/DQS_BIT_NUMBER); u1ByteIdx++)
9249 			{
9250 		#if 1//TX_DQM_CALC_MAX_MIN_CENTER
9251 				u2Center_min[u1ByteIdx] = 0xffff;
9252 				u2Center_max[u1ByteIdx] = 0;
9253 		#endif
9254 
9255 				for (u1BitIdx=0; u1BitIdx<DQS_BIT_NUMBER; u1BitIdx++)
9256 				{
9257 					ucindex = u1ByteIdx * DQS_BIT_NUMBER + u1BitIdx;
9258 					FinalWinPerBit[ucindex].first_pass = VrefWinPerBit[ucindex].first_pass;
9259 					FinalWinPerBit[ucindex].last_pass =  VrefWinPerBit[ucindex].last_pass;
9260 					FinalWinPerBit[ucindex].win_size = VrefWinPerBit[ucindex].win_size;
9261 					FinalWinPerBit[ucindex].win_center = (FinalWinPerBit[ucindex].first_pass + FinalWinPerBit[ucindex].last_pass) >> 1;
9262 
9263 					if(FinalWinPerBit[ucindex].win_center < u2Center_min[u1ByteIdx])
9264 						u2Center_min[u1ByteIdx] = FinalWinPerBit[ucindex].win_center;
9265 
9266 					if(FinalWinPerBit[ucindex].win_center > u2Center_max[u1ByteIdx])
9267 						u2Center_max[u1ByteIdx] = FinalWinPerBit[ucindex].win_center;
9268 				}
9269 			}
9270 		}
9271 
9272 
9273 		if(u2VrefRange==0 && u2VrefLevel ==50 && p->dram_type!=TYPE_LPDDR5)
9274 		{
9275 			u2VrefRange = 1;
9276 			u2VrefLevel = 20;
9277 		}
9278 
9279 		for (u1BitIdx = 0; u1BitIdx < p->data_width; u1BitIdx++)
9280 		{
9281 			if (u1pass_in_this_vref_flag[u1BitIdx]) gEyeScan_ContinueVrefHeight[u1BitIdx]+=EYESCAN_GRAPH_CATX_VREF_STEP;  //count pass number of continue vref
9282 		}
9283 	}
9284 
9285 	DramcEngine2End(p);
9286 
9287 	//Calculate the center of DQ pass window
9288 	//average the center delay
9289 	for (u1ByteIdx=0; u1ByteIdx<(p->data_width/DQS_BIT_NUMBER); u1ByteIdx++)
9290 	{
9291 		uiDelay = ((u2Center_min[u1ByteIdx] + u2Center_max[u1ByteIdx])>>1); //(max +min)/2
9292 
9293 #if VENDER_JV_LOG || defined(RELEASE)
9294 		gEyeScan_CaliDelay[u1ByteIdx] = uiDelay;
9295 #else
9296 		gEyeScan_CaliDelay[u1ByteIdx] = uiDelay + tx_pi_delay[u1ByteIdx]-32;
9297 #endif
9298 	}
9299 
9300 
9301 	//restore to orignal value
9302 	DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress)/sizeof(U32));
9303 
9304 	//restore Vref
9305 	#ifdef __LP5_COMBO__
9306 	if (is_lp5_family(p))
9307 	{
9308 	   u2VrefRange = 0;
9309 	   u2VrefLevel = backup_u1MR14Value;
9310 	}
9311 	else
9312 	#endif
9313 	{
9314 	   u2VrefRange = backup_u1MR14Value>>6;
9315 	   u2VrefLevel = backup_u1MR14Value & 0x3f;
9316 	}
9317 	DramcTXSetVref(p, u2VrefRange, u2VrefLevel);
9318 	u1MR14Value[p->channel][p->rank][p->dram_fsp] = backup_u1MR14Value;
9319 
9320 }
9321 #endif
9322 
9323 #if TX_OE_CALIBATION
9324 #define TX_OE_PATTERN_USE_TA2 1
9325 #define TX_OE_SCAN_FULL_RANGE 0
9326 
DramcTxOECalibration(DRAMC_CTX_T * p)9327 void DramcTxOECalibration(DRAMC_CTX_T *p)
9328 {
9329 	U8 u1ByteIdx;
9330 	//U8 ucbegin=0xff, , ucfirst, ucsum, ucbest_step;
9331 	U8 ucdq_oen_ui_large[2] = {0}, ucdq_oen_ui_small[2] = {0};
9332 	//U8 ucdq_ui_large_reg_value=0xff, ucdq_ui_small_reg_value=0xff;
9333 
9334 	#if TX_OE_PATTERN_USE_TA2
9335 	msg("\n[DramC_TX_OE_Calibration] TA2\n");
9336 	#else
9337 	msg("\n[DramC_TX_OE_Calibration] DMA\n");
9338 	#endif
9339 
9340 	//default set FAIL
9341 	vSetCalibrationResult(p, DRAM_CALIBRATION_TX_OE, DRAM_FAIL);
9342 
9343 #if (SUPPORT_SAVE_TIME_FOR_CALIBRATION)
9344 	if (p->femmc_Ready == 1)
9345 	{
9346 		for (u1ByteIdx = 0; u1ByteIdx < DQS_NUMBER_LP4; u1ByteIdx++)
9347 		{
9348 			ucdq_oen_ui_large[u1ByteIdx] = p->pSavetimeData->u1TX_OE_DQ_MCK[p->channel][p->rank][u1ByteIdx];
9349 			ucdq_oen_ui_small[u1ByteIdx] = p->pSavetimeData->u1TX_OE_DQ_UI[p->channel][p->rank][u1ByteIdx];
9350 		}
9351 	 vSetCalibrationResult(p, DRAM_CALIBRATION_TX_OE, DRAM_FAST_K);
9352 	}
9353 #endif
9354 
9355 	for (u1ByteIdx = 0; u1ByteIdx < DQS_NUMBER_LP4; u1ByteIdx++)
9356 	{
9357 		msg("Byte%d TX OE(2T, 0.5T) = (%d, %d)\n", u1ByteIdx, ucdq_oen_ui_large[u1ByteIdx], ucdq_oen_ui_small[u1ByteIdx]);
9358 	}
9359 	msg("\n\n");
9360 
9361 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), \
9362 									P_Fld(ucdq_oen_ui_large[0], SHURK_SELPH_DQ0_TXDLY_OEN_DQ0) | \
9363 									P_Fld(ucdq_oen_ui_large[1], SHURK_SELPH_DQ0_TXDLY_OEN_DQ1));
9364 
9365 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ1), \
9366 									P_Fld(ucdq_oen_ui_large[0], SHURK_SELPH_DQ1_TXDLY_OEN_DQM0) | \
9367 									P_Fld(ucdq_oen_ui_large[1], SHURK_SELPH_DQ1_TXDLY_OEN_DQM1));
9368 	// DLY_DQ[2:0]
9369 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), \
9370 									P_Fld(ucdq_oen_ui_small[0], SHURK_SELPH_DQ2_DLY_OEN_DQ0) | \
9371 									P_Fld(ucdq_oen_ui_small[1], SHURK_SELPH_DQ2_DLY_OEN_DQ1) );
9372 	 // DLY_DQM[2:0]
9373 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ3), \
9374 									 P_Fld(ucdq_oen_ui_small[0], SHURK_SELPH_DQ3_DLY_OEN_DQM0) | \
9375 									 P_Fld(ucdq_oen_ui_small[1], SHURK_SELPH_DQ3_DLY_OEN_DQM1));
9376 }
9377 #endif
9378 
9379 //-------------------------------------------------------------------------
9380 /** DramcMiockJmeter
9381  *	start MIOCK jitter meter.
9382  *	@param p				Pointer of context created by DramcCtxCreate.
9383  *	@param block_no 		(U8): block 0 or 1.
9384  *	@retval status			(DRAM_STATUS_T): DRAM_OK or DRAM_FAIL
9385  */
9386 //-------------------------------------------------------------------------
9387 
9388 #ifdef ENABLE_MIOCK_JMETER
DramcMiockJmeter(DRAMC_CTX_T * p)9389 DRAM_STATUS_T DramcMiockJmeter(DRAMC_CTX_T *p)
9390 {
9391 	U16 ucsearch_state, fgcurrent_value, fginitial_value, ucstart_period = 0, ucmiddle_period = 0, ucend_period = 0;
9392 	U32 u4sample_cnt, u4ones_cnt[DQS_NUMBER];
9393 	U16 u2real_freq, u2real_period, ucdqs_dly;
9394 	U16 u2Jm_dly_start = 0, u2Jm_dly_end = 512, u2Jm_dly_step = 4;
9395 	U8 u1ShuLevel;
9396 	U32 u4PLL3_ADDR, u4B0_DQ;
9397 	U32 u4PLL5_ADDR;
9398 	U32 u4PLL8_ADDR;
9399 	U32 u4SDM_PCW;
9400 	U32 u4PREDIV;
9401 	U32 u4POSDIV;
9402 	U32 u4CKDIV4;
9403 	U32 u4VCOFreq;
9404 	U32 u4DataRate;
9405 	U8 u1RxGatingPI = 0, u1RxGatingPI_start = 0, u1RxGatingPI_end = 63;
9406 	U8 backup_rank, u1RankIdx, u1FBKSEL;
9407 
9408 	u1RxGatingPI = 0x0;
9409 
9410 	u2gdelay_cell_ps = 0;
9411 
9412 	// error handling
9413 	if (!p)
9414 	{
9415 		err("context NULL\n");
9416 		return DRAM_FAIL;
9417 	}
9418 
9419 	U32 u4RegBackupAddress[] =
9420 	{
9421 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1)),
9422 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6)),
9423 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6)),
9424 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ5)),
9425 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ5)),
9426 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ3)),
9427 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ3)),
9428 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1)),
9429 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL4)),
9430 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2)),
9431 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2)),
9432 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DLL_ARPI2)),
9433 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DLL_ARPI2)),
9434 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11)),
9435 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11)),
9436 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD11)),
9437 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL)),
9438 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_PI_DLY)), // need porting to Jmeter
9439 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_PI_DLY + DDRPHY_AO_RANK_OFFSET)), // need porting to Jmeter
9440 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_JMETER)),
9441 		//(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2)), // for gating on/off
9442 		//(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DVFSCTL2)), // for gating on/off
9443 		//(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_STBCAL)), // for gating on/off
9444 		((DDRPHY_REG_SHU_CA_DLL1)),
9445 		((DDRPHY_REG_SHU_B0_DLL1)),
9446 		((DDRPHY_REG_SHU_B1_DLL1)),
9447 		((DDRPHY_REG_B0_DQ2)),
9448 		((DDRPHY_REG_B1_DQ2)),
9449 		((DDRPHY_REG_CA_CMD2)),
9450 		((DDRPHY_REG_SHU_B0_DQ13)),
9451 		((DDRPHY_REG_SHU_B1_DQ13)),
9452 		((DDRPHY_REG_SHU_CA_CMD13)),
9453 
9454 		((DDRPHY_REG_SHU_CA_DLL1) + SHIFT_TO_CHB_ADDR),
9455 		((DDRPHY_REG_SHU_B0_DLL1) + SHIFT_TO_CHB_ADDR),
9456 		((DDRPHY_REG_SHU_B1_DLL1) + SHIFT_TO_CHB_ADDR),
9457 		((DDRPHY_REG_B0_DQ2) + SHIFT_TO_CHB_ADDR),
9458 		((DDRPHY_REG_B1_DQ2) + SHIFT_TO_CHB_ADDR),
9459 		((DDRPHY_REG_CA_CMD2) + SHIFT_TO_CHB_ADDR),
9460 		((DDRPHY_REG_SHU_B0_DQ13) + SHIFT_TO_CHB_ADDR),
9461 		((DDRPHY_REG_SHU_B1_DQ13) + SHIFT_TO_CHB_ADDR),
9462 		((DDRPHY_REG_SHU_CA_CMD13) + SHIFT_TO_CHB_ADDR),
9463 #if (CHANNEL_NUM > 2)
9464 		((DDRPHY_REG_SHU_CA_DLL1) + SHIFT_TO_CHC_ADDR),
9465 		((DDRPHY_REG_SHU_B0_DLL1) + SHIFT_TO_CHC_ADDR),
9466 		((DDRPHY_REG_SHU_B1_DLL1) + SHIFT_TO_CHC_ADDR),
9467 		((DDRPHY_REG_B0_DQ2) + SHIFT_TO_CHC_ADDR),
9468 		((DDRPHY_REG_B1_DQ2) + SHIFT_TO_CHC_ADDR),
9469 		((DDRPHY_REG_CA_CMD2) + SHIFT_TO_CHC_ADDR),
9470 		((DDRPHY_REG_SHU_B0_DQ13) + SHIFT_TO_CHC_ADDR),
9471 		((DDRPHY_REG_SHU_B1_DQ13) + SHIFT_TO_CHC_ADDR),
9472 		((DDRPHY_REG_SHU_CA_CMD13) + SHIFT_TO_CHC_ADDR),
9473 
9474 		((DDRPHY_REG_SHU_CA_DLL1) + SHIFT_TO_CHD_ADDR),
9475 		((DDRPHY_REG_SHU_B0_DLL1) + SHIFT_TO_CHD_ADDR),
9476 		((DDRPHY_REG_SHU_B1_DLL1) + SHIFT_TO_CHD_ADDR),
9477 		((DDRPHY_REG_B0_DQ2) + SHIFT_TO_CHD_ADDR),
9478 		((DDRPHY_REG_B1_DQ2) + SHIFT_TO_CHD_ADDR),
9479 		((DDRPHY_REG_CA_CMD2) + SHIFT_TO_CHD_ADDR),
9480 		((DDRPHY_REG_SHU_B0_DQ13) + SHIFT_TO_CHD_ADDR),
9481 		((DDRPHY_REG_SHU_B1_DQ13) + SHIFT_TO_CHD_ADDR),
9482 		((DDRPHY_REG_SHU_CA_CMD13) + SHIFT_TO_CHD_ADDR),
9483 #endif
9484 	};
9485 
9486 	backup_rank = u1GetRank(p);
9487 
9488 	//backup register value
9489 	DramcBackupRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
9490 
9491 	//OE disable - start
9492 	vIO32WriteFldMulti_All(DDRPHY_REG_B0_DQ2, P_Fld( 0 , B0_DQ2_RG_TX_ARDQS_OE_TIE_SEL_B0 ) \
9493 														  | P_Fld( 1	   , B0_DQ2_RG_TX_ARDQS_OE_TIE_EN_B0  ) \
9494 														  | P_Fld( 0 , B0_DQ2_RG_TX_ARWCK_OE_TIE_SEL_B0 ) \
9495 														  | P_Fld( 1	   , B0_DQ2_RG_TX_ARWCK_OE_TIE_EN_B0  ) \
9496 														  | P_Fld( 0 , B0_DQ2_RG_TX_ARWCKB_OE_TIE_SEL_B0 ) \
9497 														  | P_Fld( 1	   , B0_DQ2_RG_TX_ARWCKB_OE_TIE_EN_B0  ) \
9498 														  | P_Fld( 0 , B0_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B0 ) \
9499 														  | P_Fld( 1	   , B0_DQ2_RG_TX_ARDQM_OE_TIE_EN_B0  ) \
9500 														  | P_Fld( 0 , B0_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B0 ) \
9501 														  | P_Fld( 0xff 	  , B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0	) );
9502 
9503 	vIO32WriteFldMulti_All(DDRPHY_REG_B1_DQ2, P_Fld( 0 , B1_DQ2_RG_TX_ARDQS_OE_TIE_SEL_B1 ) \
9504 														  | P_Fld( 1	   , B1_DQ2_RG_TX_ARDQS_OE_TIE_EN_B1  ) \
9505 														  | P_Fld( 0 , B1_DQ2_RG_TX_ARWCK_OE_TIE_SEL_B1 ) \
9506 														  | P_Fld( 1	   , B1_DQ2_RG_TX_ARWCK_OE_TIE_EN_B1  ) \
9507 														  | P_Fld( 0 , B1_DQ2_RG_TX_ARWCKB_OE_TIE_SEL_B1 ) \
9508 														  | P_Fld( 1	   , B1_DQ2_RG_TX_ARWCKB_OE_TIE_EN_B1  ) \
9509 														  | P_Fld( 0 , B1_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B1 ) \
9510 														  | P_Fld( 1	   , B1_DQ2_RG_TX_ARDQM_OE_TIE_EN_B1  ) \
9511 														  | P_Fld( 0 , B1_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B1 ) \
9512 														  | P_Fld( 0xff 	  , B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1	) );
9513 
9514 	vIO32WriteFldMulti_All(DDRPHY_REG_CA_CMD2, P_Fld( 0 , CA_CMD2_RG_TX_ARCLK_OE_TIE_SEL_CA ) \
9515 														  | P_Fld( 1	   , CA_CMD2_RG_TX_ARCLK_OE_TIE_EN_CA  ) \
9516 														  | P_Fld( 0 , CA_CMD2_RG_TX_ARCS_OE_TIE_SEL_CA ) \
9517 														  | P_Fld( 1	   , CA_CMD2_RG_TX_ARCS_OE_TIE_EN_CA  ) \
9518 														  | P_Fld( 0 , CA_CMD2_RG_TX_ARCA_OE_TIE_SEL_CA ) \
9519 														  | P_Fld( 0xff 	  , CA_CMD2_RG_TX_ARCA_OE_TIE_EN_CA  ) );
9520 
9521 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DQ13 , P_Fld( 0 , SHU_B0_DQ13_RG_TX_ARDQSB_OE_TIE_SEL_B0  ) \
9522 															  | P_Fld( 1	   , SHU_B0_DQ13_RG_TX_ARDQSB_OE_TIE_EN_B0	 ));
9523 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DQ13 , P_Fld( 0 , SHU_B1_DQ13_RG_TX_ARDQSB_OE_TIE_SEL_B1  ) \
9524 															  | P_Fld( 1	   , SHU_B1_DQ13_RG_TX_ARDQSB_OE_TIE_EN_B1	 ));
9525 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_CA_CMD13, P_Fld( 0 , SHU_CA_CMD13_RG_TX_ARCLKB_OE_TIE_SEL_CA ) \
9526 															  | P_Fld( 1	   , SHU_CA_CMD13_RG_TX_ARCLKB_OE_TIE_EN_CA  ));
9527 	//OE disable - end
9528 
9529 	//DramcHWGatingOnOff(p, 0); // disable Gating tracking for DQS PI, Remove to vApplyConfigBeforeCalibration
9530 
9531 	// @A60868 for *RANK_SEL_SER_EN* = 0 to DA_RX_ARDQ_RANK_SEL_TXD_*[0]
9532 	//				for *RANK_SEL_SER_EN* = 1 to DA_RX_ARDQ_RANK_SEL_TXD_*[7:0]
9533 	// The *RANK_SEL_SER_EN* = 0 is old mode.
9534 	// The *RANK_SEL_SER_EN* = 1 is new mode when background no any access.
9535 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11), 0, SHU_B0_DQ11_RG_RX_ARDQ_RANK_SEL_SER_EN_B0);
9536 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11), 0, SHU_B1_DQ11_RG_RX_ARDQ_RANK_SEL_SER_EN_B1);
9537 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD11), 0, SHU_CA_CMD11_RG_RX_ARCA_RANK_SEL_SER_EN_CA);
9538 
9539 	//@Darren, DLL off to stable fix middle transion from high to low or low to high at high vcore
9540 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_CA_DLL1, P_Fld(0x0, SHU_CA_DLL1_RG_ARDLL_PHDET_EN_CA)
9541 										| P_Fld(0x0, SHU_CA_DLL1_RG_ARDLL_PHDET_OUT_SEL_CA));
9542 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DLL1, P_Fld(0x0, SHU_B0_DLL1_RG_ARDLL_PHDET_EN_B0)
9543 										| P_Fld(0x0, SHU_B0_DLL1_RG_ARDLL_PHDET_OUT_SEL_B0));
9544 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DLL1, P_Fld(0x0, SHU_B1_DLL1_RG_ARDLL_PHDET_EN_B1)
9545 										| P_Fld(0x0, SHU_B1_DLL1_RG_ARDLL_PHDET_OUT_SEL_B1));
9546 
9547 	//MCK4X CG
9548 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1), 0, MISC_CTRL1_R_DMDQSIENCG_EN);
9549 	//@A60868, DQS PI mode for JMTR
9550 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DLL_ARPI2), 0, SHU_B0_DLL_ARPI2_RG_ARPI_CG_DQSIEN_B0); // DQS PI mode
9551 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DLL_ARPI2), 0, SHU_B1_DLL_ARPI2_RG_ARPI_CG_DQSIEN_B1); // DQS PI mode
9552 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_RX_EYE_SCAN_CG_EN); // enable toggle cnt
9553 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL4), 0, MISC_CTRL4_R_OPT2_CG_DQSIEN); // Remove to Golden settings for Jmeter clock
9554 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL), 0, MISC_STBCAL_DQSIENCG_NORMAL_EN); // @Darren need confirm for DQS*_ERR_CNT, APHY PICG freerun
9555 	//@A60868, End
9556 
9557 	// Bypass DQS glitch-free mode
9558 	// RG_RX_*RDQ_EYE_DLY_DQS_BYPASS_B**
9559 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6), 1, B0_DQ6_RG_RX_ARDQ_EYE_DLY_DQS_BYPASS_B0);
9560 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6), 1, B1_DQ6_RG_RX_ARDQ_EYE_DLY_DQS_BYPASS_B1);
9561 
9562 	//Enable DQ eye scan
9563 	//RG_*_RX_EYE_SCAN_EN
9564 	//RG_*_RX_VREF_EN
9565 	//RG_*_RX_SMT_EN
9566 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_RX_EYE_SCAN_EN);
9567 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), P_Fld(0x1, MISC_DUTYSCAN1_EYESCAN_DQS_SYNC_EN)
9568 										| P_Fld(0x1, MISC_DUTYSCAN1_EYESCAN_NEW_DQ_SYNC_EN)
9569 										| P_Fld(0x1, MISC_DUTYSCAN1_EYESCAN_DQ_SYNC_EN));
9570 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ5), 1, B0_DQ5_RG_RX_ARDQ_EYE_EN_B0);
9571 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ5), 1, B1_DQ5_RG_RX_ARDQ_EYE_EN_B1);
9572 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ5), 1, B0_DQ5_RG_RX_ARDQ_VREF_EN_B0);
9573 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ5), 1, B1_DQ5_RG_RX_ARDQ_VREF_EN_B1);
9574 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ3), 1, B0_DQ3_RG_RX_ARDQ_SMT_EN_B0);
9575 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ3), 1, B1_DQ3_RG_RX_ARDQ_SMT_EN_B1);
9576 	//@A60868, JMTR en
9577 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2), 1, B0_PHY2_RG_RX_ARDQS_JM_EN_B0);
9578 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2), 1, B1_PHY2_RG_RX_ARDQS_JM_EN_B1);
9579 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_JMETER), 1, MISC_JMETER_JMTR_EN);
9580 	//@A60868, End
9581 
9582 	//@A60868, JM_SEL = 1, JM_SEL = 0 for LPBK
9583 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2), 1, B0_PHY2_RG_RX_ARDQS_JM_SEL_B0);
9584 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2), 1, B1_PHY2_RG_RX_ARDQS_JM_SEL_B1);
9585 	//@A60868, End
9586 
9587 	//Enable MIOCK jitter meter mode ( RG_RX_MIOCK_JIT_EN=1)
9588 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_RX_MIOCK_JIT_EN);
9589 
9590 	//Disable DQ eye scan (b'1), for counter clear
9591 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 0, MISC_DUTYSCAN1_RX_EYE_SCAN_EN);
9592 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 0, MISC_DUTYSCAN1_DQSERRCNT_DIS);
9593 
9594 	for (u1RxGatingPI = u1RxGatingPI_start; u1RxGatingPI < u1RxGatingPI_end; u1RxGatingPI++)
9595 	{
9596 		msg("\n[DramcMiockJmeter] u1RxGatingPI = %d\n", u1RxGatingPI);
9597 
9598 		ucsearch_state = 0;
9599 		for (u1RankIdx = RANK_0; u1RankIdx < p->support_rank_num; u1RankIdx++)
9600 		{
9601 			vSetRank(p, u1RankIdx);
9602 			// SHU_RK_B0_DQSIEN_PI_DLY_DQSIEN_PI_B0[6] no use (ignore)
9603 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_PI_DLY), u1RxGatingPI, SHU_RK_B0_DQSIEN_PI_DLY_DQSIEN_PI_B0); // for rank*_B0
9604 			//Darren---vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B1_DQSIEN_PI_DLY), u1RxGatingPI, SHU_RK_B1_DQSIEN_PI_DLY_DQSIEN_PI_B1); // for rank*_B0
9605 		}
9606 		vSetRank(p, backup_rank);
9607 
9608 		for (ucdqs_dly = u2Jm_dly_start; ucdqs_dly < u2Jm_dly_end; ucdqs_dly += u2Jm_dly_step)
9609 		{
9610 
9611 			//@A60868, Set DQS delay (RG_*_RX_DQS_EYE_DLY)
9612 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2), ucdqs_dly, B0_PHY2_RG_RX_ARDQS_JM_DLY_B0);
9613 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2), ucdqs_dly, B1_PHY2_RG_RX_ARDQS_JM_DLY_B1);
9614 			//@A60868, End
9615 			DramPhyReset(p);
9616 
9617 			//Reset eye scan counters (reg_sw_rst): 1 to 0
9618 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_REG_SW_RST);
9619 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 0, MISC_DUTYSCAN1_REG_SW_RST);
9620 
9621 			//Enable DQ eye scan (b'1)
9622 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_RX_EYE_SCAN_EN);
9623 
9624 			//2ns/sample, here we delay 1ms about 500 samples
9625 			mcDELAY_US(10);
9626 
9627 			//Disable DQ eye scan (b'1), for counter latch
9628 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 0, MISC_DUTYSCAN1_RX_EYE_SCAN_EN);
9629 
9630 			//Read the counter values from registers (toggle_cnt*, dqs_err_cnt*);
9631 			u4sample_cnt = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_TOGGLE_CNT), MISC_DUTY_TOGGLE_CNT_TOGGLE_CNT);
9632 			u4ones_cnt[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_DQS0_ERR_CNT), MISC_DUTY_DQS0_ERR_CNT_DQS0_ERR_CNT);
9633 			//u4ones_cnt[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_DQS1_ERR_CNT), MISC_DUTY_DQS1_ERR_CNT_DQS1_ERR_CNT);
9634 			//u4ones_cnt[2] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_DQS2_ERR_CNT), MISC_DUTY_DQS2_ERR_CNT_DQS2_ERR_CNT);
9635 			//u4ones_cnt[3] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_DQS3_ERR_CNT), MISC_DUTY_DQS3_ERR_CNT_DQS3_ERR_CNT);
9636 			#ifdef ETT_PRINT_FORMAT
9637 			msg("%d : %d, %d\n", ucdqs_dly, u4sample_cnt, u4ones_cnt[0]);
9638 			#else
9639 			msg("%3d : %8d, %8d\n", ucdqs_dly, u4sample_cnt, u4ones_cnt[0]);
9640 			#endif
9641 
9642 			//change to boolean value
9643 			if (u4ones_cnt[0] < (u4sample_cnt / 2))
9644 			{
9645 				fgcurrent_value = 0;
9646 			}
9647 			else
9648 			{
9649 				fgcurrent_value = 1;
9650 			}
9651 
9652 			#if 1//more than 1T data
9653 			{
9654 				if (ucsearch_state == 0)
9655 				{
9656 					//record initial value at the beginning
9657 					fginitial_value = fgcurrent_value;
9658 					ucsearch_state = 1;
9659 				}
9660 				else if (ucsearch_state == 1)
9661 				{
9662 					// check if change value
9663 					if (fgcurrent_value != fginitial_value)
9664 					{
9665 						// start of the period
9666 						fginitial_value = fgcurrent_value;
9667 						ucstart_period = ucdqs_dly;
9668 						ucsearch_state = 2;
9669 					}
9670 				}
9671 				else if (ucsearch_state == 2)
9672 				{
9673 					// check if change value
9674 					if (fgcurrent_value != fginitial_value)
9675 					{
9676 						fginitial_value = fgcurrent_value;
9677 						ucmiddle_period = ucdqs_dly;
9678 						ucsearch_state = 3;
9679 					}
9680 				}
9681 				else if (ucsearch_state == 3)
9682 				{
9683 					// check if change value
9684 					if (fgcurrent_value != fginitial_value)
9685 					{
9686 						// end of the period, break the loop
9687 						ucend_period = ucdqs_dly;
9688 						ucsearch_state = 4;
9689 						break;
9690 					}
9691 				}
9692 				else
9693 				{
9694 					//nothing
9695 				}
9696 			}
9697 			#else //only 0.5T data
9698 			{
9699 				if (ucsearch_state == 0)
9700 				{
9701 					//record initial value at the beginning
9702 					fginitial_value = fgcurrent_value;
9703 					ucsearch_state = 1;
9704 				}
9705 				else if (ucsearch_state == 1)
9706 				{
9707 					// check if change value
9708 					if (fgcurrent_value != fginitial_value)
9709 					{
9710 						// start of the period
9711 						fginitial_value = fgcurrent_value;
9712 						ucstart_period = ucdqs_dly;
9713 						ucsearch_state = 2;
9714 					}
9715 				}
9716 				else if (ucsearch_state == 2)
9717 				{
9718 					// check if change value
9719 					if (fgcurrent_value != fginitial_value)
9720 					{
9721 						// end of the period, break the loop
9722 						ucend_period = ucdqs_dly;
9723 						ucsearch_state = 4;
9724 					   break;
9725 					}
9726 				}
9727 			}
9728 			#endif
9729 		}
9730 
9731 		if ((ucsearch_state == 4) || (ucsearch_state == 3))
9732 			break;
9733 	}
9734 
9735 	//restore to orignal value
9736 	DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
9737 
9738 	if (ucsearch_state != 4)
9739 	{
9740 		if (ucsearch_state != 3)
9741 		{
9742 			msg("\n\tMIOCK jitter meter - ch=%d\n", p->channel);
9743 			msg("\tLess than 0.5T data. Cannot calculate delay cell time\n\n");
9744 
9745 			u2g_num_dlycell_perT = 0;	//for LP3 and LP4 lookup table used
9746 
9747 			return DRAM_FAIL;
9748 		}
9749 		else
9750 		{
9751 			//Calculate 1 delay cell = ? ps
9752 			// 1T = ? delay cell
9753 			u2g_num_dlycell_perT = (ucmiddle_period - ucstart_period) * 2;
9754 			// 1T = ? ps
9755 		}
9756 	}
9757 	else
9758 	{
9759 		//Calculate 1 delay cell = ? ps
9760 		// 1T = ? delay cell
9761 		u2g_num_dlycell_perT = (ucend_period - ucstart_period);
9762 		// 1T = ? ps
9763 	}
9764 
9765 	u1ShuLevel = u4IO32ReadFldAlign(DDRPHY_REG_DVFS_STATUS, DVFS_STATUS_OTHER_SHU_GP);
9766 	u4PLL5_ADDR = DDRPHY_REG_SHU_PHYPLL1 + DDRPHY_AO_SHU_OFFSET * u1ShuLevel;
9767 	u4PLL8_ADDR = DDRPHY_REG_SHU_PHYPLL2 + DDRPHY_AO_SHU_OFFSET * u1ShuLevel;
9768 	u4PLL3_ADDR = DDRPHY_REG_SHU_PHYPLL3 + DDRPHY_AO_SHU_OFFSET * u1ShuLevel;
9769 	u4B0_DQ = DDRPHY_REG_SHU_B0_DQ1 + DDRPHY_AO_SHU_OFFSET * u1ShuLevel;
9770 	u4SDM_PCW = u4IO32ReadFldAlign(u4PLL5_ADDR, SHU_PHYPLL1_RG_RPHYPLL_SDM_PCW);
9771 	u4PREDIV = u4IO32ReadFldAlign(u4PLL8_ADDR, SHU_PHYPLL2_RG_RPHYPLL_PREDIV);
9772 	u4POSDIV = u4IO32ReadFldAlign(u4PLL8_ADDR, SHU_PHYPLL2_RG_RPHYPLL_POSDIV);
9773 	u4CKDIV4 = u4IO32ReadFldAlign(u4B0_DQ, SHU_B0_DQ1_RG_ARPI_MIDPI_CKDIV4_EN_B0);
9774 	u1FBKSEL = u4IO32ReadFldAlign(u4PLL3_ADDR, SHU_PHYPLL3_RG_RPHYPLL_FBKSEL);
9775 	u4VCOFreq = (((52 >> u4PREDIV) * (u4SDM_PCW >> 8)) >> u4POSDIV) << u1FBKSEL;
9776 	u4DataRate = u4VCOFreq >> u4CKDIV4;
9777 	u2real_freq = u4DataRate >> 1;
9778 	u2real_period = (U16) (1000000 / u2real_freq);
9779 
9780 	//calculate delay cell time
9781 	u2gdelay_cell_ps = u2real_period * 100 / u2g_num_dlycell_perT;
9782 
9783 	if (ucsearch_state == 4)
9784 	{ // 1T
9785 		msg("\n\tMIOCK jitter meter\tch=%d\n\n"
9786 						"1T = (%d-%d) = %d dly cells\n"
9787 						"Clock freq = %d MHz, period = %d ps, 1 dly cell = %d/100 ps\n",
9788 							p->channel,
9789 							ucend_period, ucstart_period, u2g_num_dlycell_perT,
9790 							u2real_freq, u2real_period, u2gdelay_cell_ps);
9791 	}
9792 	else
9793 	{ // 0.5T
9794 		msg("\n\tMIOCK jitter meter\tch=%d\n\n"
9795 						"1T = (%d-%d)*2 = %d dly cells\n"
9796 						"Clock freq = %d MHz, period = %d ps, 1 dly cell = %d/100 ps\n",
9797 							p->channel,
9798 							ucmiddle_period, ucstart_period, u2g_num_dlycell_perT,
9799 							u2real_freq, u2real_period, u2gdelay_cell_ps);
9800 	}
9801 
9802 	return DRAM_OK;
9803 
9804 // log example
9805 /* dly: sample_cnt	 DQS0_cnt  DQS1_cnt
9806 	0 : 10962054,		 0, 	   0
9807 	1 : 10958229,		 0, 	   0
9808 	2 : 10961109,		 0, 	   0
9809 	3 : 10946916,		 0, 	   0
9810 	4 : 10955421,		 0, 	   0
9811 	5 : 10967274,		 0, 	   0
9812 	6 : 10893582,		 0, 	   0
9813 	7 : 10974762,		 0, 	   0
9814 	8 : 10990278,		 0, 	   0
9815 	9 : 10972026,		 0, 	   0
9816    10 :  7421004,		 0, 	   0
9817    11 : 10943883,		 0, 	   0
9818    12 : 10984275,		 0, 	   0
9819    13 : 10955268,		 0, 	   0
9820    14 : 10960326,		 0, 	   0
9821    15 : 10952451,		 0, 	   0
9822    16 : 10956906,		 0, 	   0
9823    17 : 10960803,		 0, 	   0
9824    18 : 10944108,		 0, 	   0
9825    19 : 10959939,		 0, 	   0
9826    20 : 10959246,		 0, 	   0
9827    21 : 11002212,		 0, 	   0
9828    22 : 10919700,		 0, 	   0
9829    23 : 10977489,		 0, 	   0
9830    24 : 11009853,		 0, 	   0
9831    25 : 10991133,		 0, 	   0
9832    26 : 10990431,		 0, 	   0
9833    27 : 10970703,	 11161, 	   0
9834    28 : 10970775,	257118, 	   0
9835    29 : 10934442,  9450467, 	   0
9836    30 : 10970622, 10968475, 	   0
9837    31 : 10968831, 10968831, 	   0
9838    32 : 10956123, 10956123, 	   0
9839    33 : 10950273, 10950273, 	   0
9840    34 : 10975770, 10975770, 	   0
9841    35 : 10983024, 10983024, 	   0
9842    36 : 10981701, 10981701, 	   0
9843    37 : 10936782, 10936782, 	   0
9844    38 : 10889523, 10889523, 	   0
9845    39 : 10985913, 10985913,    55562
9846    40 : 10970235, 10970235,   272294
9847    41 : 10996056, 10996056,  9322868
9848    42 : 10972350, 10972350, 10969738
9849    43 : 10963917, 10963917, 10963917
9850    44 : 10967895, 10967895, 10967895
9851    45 : 10961739, 10961739, 10961739
9852    46 : 10937097, 10937097, 10937097
9853    47 : 10937952, 10937952, 10937952
9854    48 : 10926018, 10926018, 10926018
9855    49 : 10943793, 10943793, 10943793
9856    50 : 10954638, 10954638, 10954638
9857    51 : 10968048, 10968048, 10968048
9858    52 : 10944036, 10944036, 10944036
9859    53 : 11012112, 11012112, 11012112
9860    54 : 10969137, 10969137, 10969137
9861    55 : 10968516, 10968516, 10968516
9862    56 : 10952532, 10952532, 10952532
9863    57 : 10985832, 10985832, 10985832
9864    58 : 11002527, 11002527, 11002527
9865    59 : 10950660, 10873571, 10950660
9866    60 : 10949022, 10781797, 10949022
9867    61 : 10974366, 10700617, 10974366
9868    62 : 10972422,  1331974, 10972422
9869    63 : 10926567,		 0, 10926567
9870    64 : 10961658,		 0, 10961658
9871    65 : 10978893,		 0, 10978893
9872    66 : 10962828,		 0, 10962828
9873    67 : 10957599,		 0, 10957599
9874    68 : 10969227,		 0, 10969227
9875    69 : 10960722,		 0, 10960722
9876    70 : 10970937,		 0, 10963180
9877    71 : 10962054,		 0, 10711639
9878    72 : 10954719,		 0, 10612707
9879    73 : 10958778,		 0,   479589
9880    74 : 10973898,		 0, 	   0
9881    75 : 11004156,		 0, 	   0
9882    76 : 10944261,		 0, 	   0
9883    77 : 10955340,		 0, 	   0
9884    78 : 10998153,		 0, 	   0
9885    79 : 10998774,		 0, 	   0
9886    80 : 10953234,		 0, 	   0
9887    81 : 10960020,		 0, 	   0
9888    82 : 10923831,		 0, 	   0
9889    83 : 10951362,		 0, 	   0
9890    84 : 10965249,		 0, 	   0
9891    85 : 10949103,		 0, 	   0
9892    86 : 10948707,		 0, 	   0
9893    87 : 10941147,		 0, 	   0
9894    88 : 10966572,		 0, 	   0
9895    89 : 10971333,		 0, 	   0
9896    90 : 10943721,		 0, 	   0
9897    91 : 10949337,		 0, 	   0
9898    92 : 10965942,		 0, 	   0
9899    93 : 10970397,		 0, 	   0
9900    94 : 10956429,		 0, 	   0
9901    95 : 10939896,		 0, 	   0
9902    96 : 10967112,		 0, 	   0
9903    97 : 10951911,		 0, 	   0
9904    98 : 10953702,		 0, 	   0
9905    99 : 10971090,		 0, 	   0
9906   100 : 10939590,		 0, 	   0
9907   101 : 10993392,		 0, 	   0
9908   102 : 10975932,		 0, 	   0
9909   103 : 10949499,	 40748, 	   0
9910   104 : 10962522,	258638, 	   0
9911   105 : 10951524,	275292, 	   0
9912   106 : 10982475,	417642, 	   0
9913   107 : 10966887, 10564347, 	   0
9914   ===============================================================================
9915 	  MIOCK jitter meter - channel=0
9916   ===============================================================================
9917   1T = (107-29) = 78 delay cells
9918   Clock frequency = 936 MHz, Clock period = 1068 ps, 1 delay cell = 13 ps
9919 */
9920 }
9921 
9922 /* "picoseconds per delay cell" depends on Vcore only (frequency doesn't matter)
9923  * 1. Retrieve current freq's vcore voltage using pmic API
9924  * 2. Perform delay cell time calculation (Bypass if shuffle vcore value is the same as before)
9925  */
GetVcoreDelayCellTime(DRAMC_CTX_T * p,U8 shuffleIdx)9926 static void GetVcoreDelayCellTime(DRAMC_CTX_T *p, U8 shuffleIdx)
9927 {
9928 	U32 channel_i;
9929 
9930 #if __ETT__
9931 #if (FOR_DV_SIMULATION_USED==0 && SW_CHANGE_FOR_SIMULATION==0)
9932 	u4gVcore[shuffleIdx] = pmic_vcore_voltage_read();
9933 #endif
9934 
9935 	/* delay cell calculation is skipped if vcore is same as previous shuffle's */
9936 	if (u4gVcore[shuffleIdx] != u4previousVcore)
9937 	{
9938 		u4previousVcore = u4gVcore[shuffleIdx];
9939 		DramcMiockJmeter(p);
9940 	}
9941 #else
9942 		DramcMiockJmeter(p);
9943 #endif
9944 
9945 	for(channel_i=CHANNEL_A; channel_i < p->support_channel_num; channel_i++)
9946 	{
9947 		u2g_num_dlycell_perT_all[shuffleIdx][channel_i] = u2g_num_dlycell_perT;
9948 		u2gdelay_cell_ps_all[shuffleIdx][channel_i] = u2gdelay_cell_ps;
9949 	}
9950 #if __ETT__
9951 	msg("Freq=%d, CH_%d, VCORE=%d, cell=%d\n", p->frequency, p->channel, u4gVcore[shuffleIdx], u2gdelay_cell_ps_all[shuffleIdx][p->channel]);
9952 #endif
9953 
9954 	return;
9955 }
9956 
DramcMiockJmeterHQA(DRAMC_CTX_T * p)9957 void DramcMiockJmeterHQA(DRAMC_CTX_T *p)
9958 {
9959 	//do MiockJitterMeter@DDR2667
9960 	U8 shuffleIdx;
9961 
9962 	msg("[MiockJmeterHQA]\n");
9963 
9964 	shuffleIdx = get_shuffleIndex_by_Freq(p);
9965 
9966 	if(p->channel == CHANNEL_A)
9967 	{
9968 		if (p->frequency <= 600)
9969 		{
9970 			u2g_num_dlycell_perT_all[shuffleIdx][p->channel] = 0; // always lookup table
9971 			u2gdelay_cell_ps_all[shuffleIdx][p->channel] = 270; // @Darren, Wait arnold for lookup table
9972 		}
9973 		else
9974 			GetVcoreDelayCellTime(p, shuffleIdx);
9975 	}
9976 
9977 	u2gdelay_cell_ps_all[shuffleIdx][CHANNEL_B] = u2gdelay_cell_ps_all[shuffleIdx][CHANNEL_A];
9978 
9979 #ifdef FOR_HQA_TEST_USED
9980 	if (u2g_num_dlycell_perT_all[shuffleIdx][p->channel] == 0) GetVcoreDelayCellTimeFromTable(p); //lookup table
9981 #endif
9982 
9983 	/* Use highest freq's delay cell time measurement results as reference */
9984 	p->u2num_dlycell_perT = u2g_num_dlycell_perT_all[shuffleIdx][p->channel];
9985 	p->u2DelayCellTimex100 = u2gdelay_cell_ps_all[shuffleIdx][p->channel];
9986 	msg3("DelayCellTimex100 CH_%d, (VCORE=%d, cell=%d)\n",p->channel, u4gVcore[shuffleIdx], p->u2DelayCellTimex100);
9987 }
9988 #endif
9989 
9990 //-------------------------------------------------------------------------
9991 /** Dramc8PhaseCal
9992  *	start 8-Phase Calibration.
9993  *	@param p				Pointer of context created by DramcCtxCreate.
9994  *	@param block_no 		(U8): block 0 or 1.
9995  *	@retval status			(DRAM_STATUS_T): DRAM_OK or DRAM_FAIL
9996  */
9997 //-------------------------------------------------------------------------
9998 
Dramc8PhaseCal(DRAMC_CTX_T * p)9999 DRAM_STATUS_T Dramc8PhaseCal(DRAMC_CTX_T *p)
10000 {
10001 #if ENABLE_8PHASE_CALIBRATION
10002 	U8 u1DqsLevel = 0xff, u18Ph_dly_loop_break = 0;
10003 	U8 u1DqsienPI = 0;
10004 	U8 u18Phase_SM = DQS_8PH_DEGREE_0, u18Ph_dly = 0, u18Ph_start = 0, u18Ph_end = 0, u18Ph_dly_final = 0xff;
10005 	U16 u2R0 = 0xffff, u2R180 = 0xffff, u2R = 0xffff;
10006 	U16 u2P = 0xffff, ucdqs_dly = 0;
10007 	S16 s2Err_code = 0x7fff, s2Err_code_min = 0x7fff;
10008 	U16 u2Jm_dly_start = 0, u2Jm_dly_end = 512, u2Jm_dly_step = 1;
10009 	U32 u4sample_cnt, u4ones_cnt[DQS_NUMBER];
10010 	U8 backup_rank, u1RankIdx, u18PhDlyBackup = 0;
10011 	U8 u1loop_cnt = 0, u1early_break_cnt = 5;
10012 	U32 u4backup_broadcast= GetDramcBroadcast();
10013 	DRAM_STATUS_T eDRAMStatus = DRAM_OK;
10014 
10015 #ifdef DUMP_INIT_RG_LOG_TO_DE //for FT dump 3733 dram_init.c
10016 	return DRAM_OK;
10017 #endif
10018 
10019 	u1DqsienPI = 0x0;
10020 
10021 	// error handling
10022 	if (!p)
10023 	{
10024 		err("context NULL\n");
10025 		return DRAM_FAIL;
10026 	}
10027 
10028 	if (p->frequency < 1866)
10029 	{
10030 		//err("skip 8-Phase Calib Freq is %d < 1866 !!!\n", p->frequency);
10031 		return DRAM_OK;
10032 	}
10033 
10034 	U32 u4RegBackupAddress[] =
10035 	{
10036 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1)),
10037 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6)),
10038 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6)),
10039 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ5)),
10040 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ5)),
10041 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ3)),
10042 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ3)),
10043 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1)),
10044 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL4)),
10045 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2)),
10046 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2)),
10047 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DLL_ARPI2)),
10048 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DLL_ARPI2)),
10049 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11)),
10050 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11)),
10051 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD11)),
10052 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL)),
10053 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_PI_DLY)), // need porting to Jmeter
10054 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_PI_DLY + DDRPHY_AO_RANK_OFFSET)), // need porting to Jmeter
10055 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_JMETER)),
10056 		//(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL2)), // for gating on/off backup/restore
10057 		//(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DVFSCTL2)), // for gating on/off backup/restore
10058 		(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_STBCAL)), // for gating on/off backup/restore
10059 #if 0
10060 		(DRAMC_REG_ADDR(DDRPHY_REG_B0_DLL_ARPI0)),
10061 		(DRAMC_REG_ADDR(DDRPHY_REG_B1_DLL_ARPI0)),
10062 		(DRAMC_REG_ADDR(DDRPHY_REG_CA_DLL_ARPI0)),
10063 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ6)),
10064 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ6)),
10065 		(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD6)),
10066 #endif
10067 		((DDRPHY_REG_SHU_CA_DLL1)),
10068 		((DDRPHY_REG_SHU_B0_DLL1)),
10069 		((DDRPHY_REG_SHU_B1_DLL1)),
10070 		((DDRPHY_REG_B0_DQ2)),
10071 		((DDRPHY_REG_B1_DQ2)),
10072 		((DDRPHY_REG_CA_CMD2)),
10073 		((DDRPHY_REG_SHU_B0_DQ13)),
10074 		((DDRPHY_REG_SHU_B1_DQ13)),
10075 		((DDRPHY_REG_SHU_CA_CMD13)),
10076 
10077 		((DDRPHY_REG_SHU_CA_DLL1) + SHIFT_TO_CHB_ADDR),
10078 		((DDRPHY_REG_SHU_B0_DLL1) + SHIFT_TO_CHB_ADDR),
10079 		((DDRPHY_REG_SHU_B1_DLL1) + SHIFT_TO_CHB_ADDR),
10080 		((DDRPHY_REG_B0_DQ2) + SHIFT_TO_CHB_ADDR),
10081 		((DDRPHY_REG_B1_DQ2) + SHIFT_TO_CHB_ADDR),
10082 		((DDRPHY_REG_CA_CMD2) + SHIFT_TO_CHB_ADDR),
10083 		((DDRPHY_REG_SHU_B0_DQ13) + SHIFT_TO_CHB_ADDR),
10084 		((DDRPHY_REG_SHU_B1_DQ13) + SHIFT_TO_CHB_ADDR),
10085 		((DDRPHY_REG_SHU_CA_CMD13) + SHIFT_TO_CHB_ADDR),
10086 #if (CHANNEL_NUM > 2)
10087 		((DDRPHY_REG_SHU_CA_DLL1) + SHIFT_TO_CHC_ADDR),
10088 		((DDRPHY_REG_SHU_B0_DLL1) + SHIFT_TO_CHC_ADDR),
10089 		((DDRPHY_REG_SHU_B1_DLL1) + SHIFT_TO_CHC_ADDR),
10090 		((DDRPHY_REG_B0_DQ2) + SHIFT_TO_CHC_ADDR),
10091 		((DDRPHY_REG_B1_DQ2) + SHIFT_TO_CHC_ADDR),
10092 		((DDRPHY_REG_CA_CMD2) + SHIFT_TO_CHC_ADDR),
10093 		((DDRPHY_REG_SHU_B0_DQ13) + SHIFT_TO_CHC_ADDR),
10094 		((DDRPHY_REG_SHU_B1_DQ13) + SHIFT_TO_CHC_ADDR),
10095 		((DDRPHY_REG_SHU_CA_CMD13) + SHIFT_TO_CHC_ADDR),
10096 
10097 		((DDRPHY_REG_SHU_CA_DLL1) + SHIFT_TO_CHD_ADDR),
10098 		((DDRPHY_REG_SHU_B0_DLL1) + SHIFT_TO_CHD_ADDR),
10099 		((DDRPHY_REG_SHU_B1_DLL1) + SHIFT_TO_CHD_ADDR),
10100 		((DDRPHY_REG_B0_DQ2) + SHIFT_TO_CHD_ADDR),
10101 		((DDRPHY_REG_B1_DQ2) + SHIFT_TO_CHD_ADDR),
10102 		((DDRPHY_REG_CA_CMD2) + SHIFT_TO_CHD_ADDR),
10103 		((DDRPHY_REG_SHU_B0_DQ13) + SHIFT_TO_CHD_ADDR),
10104 		((DDRPHY_REG_SHU_B1_DQ13) + SHIFT_TO_CHD_ADDR),
10105 		((DDRPHY_REG_SHU_CA_CMD13) + SHIFT_TO_CHD_ADDR),
10106 #endif
10107 	};
10108 
10109 	backup_rank = u1GetRank(p);
10110 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
10111 
10112 	//backup register value
10113 	DramcBackupRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
10114 
10115 	//OE disable - start
10116 	vIO32WriteFldMulti_All(DDRPHY_REG_B0_DQ2, P_Fld( 0 , B0_DQ2_RG_TX_ARDQS_OE_TIE_SEL_B0 ) \
10117 														  | P_Fld( 1	   , B0_DQ2_RG_TX_ARDQS_OE_TIE_EN_B0  ) \
10118 														  | P_Fld( 0 , B0_DQ2_RG_TX_ARWCK_OE_TIE_SEL_B0 ) \
10119 														  | P_Fld( 1	   , B0_DQ2_RG_TX_ARWCK_OE_TIE_EN_B0  ) \
10120 														  | P_Fld( 0 , B0_DQ2_RG_TX_ARWCKB_OE_TIE_SEL_B0 ) \
10121 														  | P_Fld( 1	   , B0_DQ2_RG_TX_ARWCKB_OE_TIE_EN_B0  ) \
10122 														  | P_Fld( 0 , B0_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B0 ) \
10123 														  | P_Fld( 1	   , B0_DQ2_RG_TX_ARDQM_OE_TIE_EN_B0  ) \
10124 														  | P_Fld( 0 , B0_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B0 ) \
10125 														  | P_Fld( 0xff 	  , B0_DQ2_RG_TX_ARDQ_OE_TIE_EN_B0	) );
10126 
10127 	vIO32WriteFldMulti_All(DDRPHY_REG_B1_DQ2, P_Fld( 0 , B1_DQ2_RG_TX_ARDQS_OE_TIE_SEL_B1 ) \
10128 														  | P_Fld( 1	   , B1_DQ2_RG_TX_ARDQS_OE_TIE_EN_B1  ) \
10129 														  | P_Fld( 0 , B1_DQ2_RG_TX_ARWCK_OE_TIE_SEL_B1 ) \
10130 														  | P_Fld( 1	   , B1_DQ2_RG_TX_ARWCK_OE_TIE_EN_B1  ) \
10131 														  | P_Fld( 0 , B1_DQ2_RG_TX_ARWCKB_OE_TIE_SEL_B1 ) \
10132 														  | P_Fld( 1	   , B1_DQ2_RG_TX_ARWCKB_OE_TIE_EN_B1  ) \
10133 														  | P_Fld( 0 , B1_DQ2_RG_TX_ARDQM_OE_TIE_SEL_B1 ) \
10134 														  | P_Fld( 1	   , B1_DQ2_RG_TX_ARDQM_OE_TIE_EN_B1  ) \
10135 														  | P_Fld( 0 , B1_DQ2_RG_TX_ARDQ_OE_TIE_SEL_B1 ) \
10136 														  | P_Fld( 0xff 	  , B1_DQ2_RG_TX_ARDQ_OE_TIE_EN_B1	) );
10137 
10138 	vIO32WriteFldMulti_All(DDRPHY_REG_CA_CMD2, P_Fld( 0 , CA_CMD2_RG_TX_ARCLK_OE_TIE_SEL_CA ) \
10139 														  | P_Fld( 1	   , CA_CMD2_RG_TX_ARCLK_OE_TIE_EN_CA  ) \
10140 														  | P_Fld( 0 , CA_CMD2_RG_TX_ARCS_OE_TIE_SEL_CA ) \
10141 														  | P_Fld( 1	   , CA_CMD2_RG_TX_ARCS_OE_TIE_EN_CA  ) \
10142 														  | P_Fld( 0 , CA_CMD2_RG_TX_ARCA_OE_TIE_SEL_CA ) \
10143 														  | P_Fld( 0xff 	  , CA_CMD2_RG_TX_ARCA_OE_TIE_EN_CA  ) );
10144 
10145 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DQ13 , P_Fld( 0 , SHU_B0_DQ13_RG_TX_ARDQSB_OE_TIE_SEL_B0  ) \
10146 															  | P_Fld( 1	   , SHU_B0_DQ13_RG_TX_ARDQSB_OE_TIE_EN_B0	 ));
10147 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DQ13 , P_Fld( 0 , SHU_B1_DQ13_RG_TX_ARDQSB_OE_TIE_SEL_B1  ) \
10148 															  | P_Fld( 1	   , SHU_B1_DQ13_RG_TX_ARDQSB_OE_TIE_EN_B1	 ));
10149 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_CA_CMD13, P_Fld( 0 , SHU_CA_CMD13_RG_TX_ARCLKB_OE_TIE_SEL_CA ) \
10150 															  | P_Fld( 1	   , SHU_CA_CMD13_RG_TX_ARCLKB_OE_TIE_EN_CA  ));
10151 	//OE disable - end
10152 
10153 	u18PhDlyBackup = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ1), SHU_B0_DQ1_RG_ARPI_MIDPI_8PH_DLY_B0);
10154 
10155 	//DramcHWGatingOnOff(p, 0); // disable Gating tracking for DQS PI, Remove to vApplyConfigBeforeCalibration
10156 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_STBCAL), P_Fld(0x0, MISC_SHU_STBCAL_STBCALEN)
10157 										| P_Fld(0x0, MISC_SHU_STBCAL_STB_SELPHCALEN));
10158 
10159 #if 0 // 8-Phase calib must to do before DLL init for test only
10160 	//@A60868, Reset PI code to avoid 8-phase offset
10161 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DLL_ARPI0), 0, B0_DLL_ARPI0_RG_ARPI_RESETB_B0);
10162 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DLL_ARPI0), 0, B1_DLL_ARPI0_RG_ARPI_RESETB_B1);
10163 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_CA_DLL_ARPI0), 0, CA_DLL_ARPI0_RG_ARPI_RESETB_CA);
10164 	mcDELAY_US(1);
10165 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DLL_ARPI0), 1, B0_DLL_ARPI0_RG_ARPI_RESETB_B0);
10166 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DLL_ARPI0), 1, B1_DLL_ARPI0_RG_ARPI_RESETB_B1);
10167 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_CA_DLL_ARPI0), 1, CA_DLL_ARPI0_RG_ARPI_RESETB_CA);
10168 	//@A60868, End
10169 
10170 	// @A60868, DQSIEN PI offset clear to 0
10171 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ6), 0, SHU_B0_DQ6_RG_ARPI_OFFSET_DQSIEN_B0);
10172 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ6), 0, SHU_B1_DQ6_RG_ARPI_OFFSET_DQSIEN_B1);
10173 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD6), 0, SHU_CA_CMD6_RG_ARPI_OFFSET_DQSIEN_CA);
10174 #endif
10175 
10176 	// @A60868 for *RANK_SEL_SER_EN* = 0 to DA_RX_ARDQ_RANK_SEL_TXD_*[0]
10177 	//				for *RANK_SEL_SER_EN* = 1 to DA_RX_ARDQ_RANK_SEL_TXD_*[7:0]
10178 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ11), 0, SHU_B0_DQ11_RG_RX_ARDQ_RANK_SEL_SER_EN_B0);
10179 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ11), 0, SHU_B1_DQ11_RG_RX_ARDQ_RANK_SEL_SER_EN_B1);
10180 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD11), 0, SHU_CA_CMD11_RG_RX_ARCA_RANK_SEL_SER_EN_CA);
10181 
10182 	//@Darren, DLL off to stable fix middle transion from high to low or low to high at high vcore
10183 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_CA_DLL1, P_Fld(0x0, SHU_CA_DLL1_RG_ARDLL_PHDET_EN_CA)
10184 										| P_Fld(0x0, SHU_CA_DLL1_RG_ARDLL_PHDET_OUT_SEL_CA));
10185 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DLL1, P_Fld(0x0, SHU_B0_DLL1_RG_ARDLL_PHDET_EN_B0)
10186 										| P_Fld(0x0, SHU_B0_DLL1_RG_ARDLL_PHDET_OUT_SEL_B0));
10187 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DLL1, P_Fld(0x0, SHU_B1_DLL1_RG_ARDLL_PHDET_EN_B1)
10188 										| P_Fld(0x0, SHU_B1_DLL1_RG_ARDLL_PHDET_OUT_SEL_B1));
10189 
10190 	//MCK4X CG
10191 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1), 0, MISC_CTRL1_R_DMDQSIENCG_EN);
10192 	//@A60868, DQS PI mode for JMTR
10193 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DLL_ARPI2), 0, SHU_B0_DLL_ARPI2_RG_ARPI_CG_DQSIEN_B0); // DQS PI mode
10194 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DLL_ARPI2), 0, SHU_B1_DLL_ARPI2_RG_ARPI_CG_DQSIEN_B1); // DQS PI mode
10195 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_RX_EYE_SCAN_CG_EN); // enable toggle cnt
10196 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL4), 0, MISC_CTRL4_R_OPT2_CG_DQSIEN); // Remove to Golden settings for Jmeter clock
10197 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL), 0, MISC_STBCAL_DQSIENCG_NORMAL_EN); // for DQS*_ERR_CNT
10198 	//@A60868, End
10199 
10200 	// Bypass DQS glitch-free mode
10201 	// RG_RX_*RDQ_EYE_DLY_DQS_BYPASS_B**
10202 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6), 1, B0_DQ6_RG_RX_ARDQ_EYE_DLY_DQS_BYPASS_B0);
10203 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6), 1, B1_DQ6_RG_RX_ARDQ_EYE_DLY_DQS_BYPASS_B1);
10204 
10205 	//Enable DQ eye scan
10206 	//RG_*_RX_EYE_SCAN_EN
10207 	//RG_*_RX_VREF_EN
10208 	//RG_*_RX_SMT_EN
10209 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_RX_EYE_SCAN_EN);
10210 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), P_Fld(0x1, MISC_DUTYSCAN1_EYESCAN_DQS_SYNC_EN)
10211 										| P_Fld(0x1, MISC_DUTYSCAN1_EYESCAN_NEW_DQ_SYNC_EN)
10212 										| P_Fld(0x1, MISC_DUTYSCAN1_EYESCAN_DQ_SYNC_EN));
10213 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ5), 1, B0_DQ5_RG_RX_ARDQ_EYE_EN_B0);
10214 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ5), 1, B1_DQ5_RG_RX_ARDQ_EYE_EN_B1);
10215 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ5), 1, B0_DQ5_RG_RX_ARDQ_VREF_EN_B0);
10216 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ5), 1, B1_DQ5_RG_RX_ARDQ_VREF_EN_B1);
10217 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ3), 1, B0_DQ3_RG_RX_ARDQ_SMT_EN_B0);
10218 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ3), 1, B1_DQ3_RG_RX_ARDQ_SMT_EN_B1);
10219 	//@A60868, JMTR en
10220 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2), 1, B0_PHY2_RG_RX_ARDQS_JM_EN_B0);
10221 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2), 1, B1_PHY2_RG_RX_ARDQS_JM_EN_B1);
10222 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_JMETER), 1, MISC_JMETER_JMTR_EN);
10223 	//@A60868, End
10224 
10225 	//@A60868, JM_SEL = 1, JM_SEL = 0 for LPBK
10226 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2), 1, B0_PHY2_RG_RX_ARDQS_JM_SEL_B0);
10227 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2), 1, B1_PHY2_RG_RX_ARDQS_JM_SEL_B1);
10228 
10229 	//Enable MIOCK jitter meter mode ( RG_RX_MIOCK_JIT_EN=1)
10230 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_RX_MIOCK_JIT_EN);
10231 
10232 	//Disable DQ eye scan (b'1), for counter clear
10233 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 0, MISC_DUTYSCAN1_RX_EYE_SCAN_EN);
10234 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 0, MISC_DUTYSCAN1_DQSERRCNT_DIS);
10235 
10236 	for (u18Phase_SM = DQS_8PH_DEGREE_0; u18Phase_SM < DQS_8PH_DEGREE_MAX; u18Phase_SM++)
10237 	{
10238 		switch (u18Phase_SM)
10239 		{
10240 			case DQS_8PH_DEGREE_0:
10241 				u1DqsienPI = 16;
10242 				u18Ph_start = 0;
10243 				u18Ph_end = 1;
10244 				break;
10245 			case DQS_8PH_DEGREE_180:
10246 				u1DqsienPI = 48;
10247 				u18Ph_start = 0;
10248 				u18Ph_end = 1;
10249 				break;
10250 			case DQS_8PH_DEGREE_45:
10251 				u1DqsienPI = 24;
10252 				u18Ph_start = 0;
10253 				u18Ph_end = 32;
10254 				break;
10255 			default:
10256 				err("u18Phase_SM err!\n");
10257 				#if __ETT__
10258 				while (1);
10259 				#endif
10260 		}
10261 
10262 		msg("\n[Dramc8PhaseCal] 8-Phase SM_%d, 8PH_dly (%d~%d), DQSIEN PI = %d, 8PH_Dly = %d\n", u18Phase_SM, u18Ph_start, u18Ph_end, u1DqsienPI, u18PhDlyBackup);
10263 
10264 		//to see 1T(H,L) or 1T(L,H) from delaycell=0 to 127
10265 		//NOTE: Must set dual ranks for Rx path
10266 		for (u1RankIdx = RANK_0; u1RankIdx < p->support_rank_num; u1RankIdx++)
10267 		{
10268 			vSetRank(p, u1RankIdx);
10269 			// SHU_RK_B0_DQSIEN_PI_DLY_DQSIEN_PI_B0[6] no use (ignore)
10270 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_RK_B0_DQSIEN_PI_DLY), u1DqsienPI, SHU_RK_B0_DQSIEN_PI_DLY_DQSIEN_PI_B0); // for rank*_B0
10271 		}
10272 		vSetRank(p, backup_rank);
10273 
10274 		for (u18Ph_dly = u18Ph_start; u18Ph_dly < u18Ph_end; u18Ph_dly++)
10275 		{
10276 			msg("8PH dly = %d\n", u18Ph_dly);
10277 
10278 			u1DqsLevel = 0xff;
10279 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ1), u18Ph_dly, SHU_B0_DQ1_RG_ARPI_MIDPI_8PH_DLY_B0);
10280 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ1), u18Ph_dly, SHU_B1_DQ1_RG_ARPI_MIDPI_8PH_DLY_B1);
10281 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD1), u18Ph_dly, SHU_CA_CMD1_RG_ARPI_MIDPI_8PH_DLY_CA);
10282 
10283 			for (ucdqs_dly = u2Jm_dly_start; ucdqs_dly < u2Jm_dly_end; ucdqs_dly += u2Jm_dly_step)
10284 			{
10285 				//Set DQS delay (RG_*_RX_DQS_EYE_DLY)
10286 				vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B0_PHY2), ucdqs_dly, B0_PHY2_RG_RX_ARDQS_JM_DLY_B0);
10287 				vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_B1_PHY2), ucdqs_dly, B1_PHY2_RG_RX_ARDQS_JM_DLY_B1);
10288 				DramPhyReset(p);
10289 
10290 				//Reset eye scan counters (reg_sw_rst): 1 to 0
10291 				vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_REG_SW_RST);
10292 				vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 0, MISC_DUTYSCAN1_REG_SW_RST);
10293 
10294 				//Enable DQ eye scan (b'1)
10295 				vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 1, MISC_DUTYSCAN1_RX_EYE_SCAN_EN);
10296 
10297 				//2ns/sample, here we delay 1ms about 500 samples
10298 				mcDELAY_US(10);
10299 
10300 				//Disable DQ eye scan (b'1), for counter latch
10301 				vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTYSCAN1), 0, MISC_DUTYSCAN1_RX_EYE_SCAN_EN);
10302 
10303 				//Read the counter values from registers (toggle_cnt*, dqs_err_cnt*);
10304 				u4sample_cnt = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_TOGGLE_CNT), MISC_DUTY_TOGGLE_CNT_TOGGLE_CNT);
10305 				u4ones_cnt[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_DQS0_ERR_CNT), MISC_DUTY_DQS0_ERR_CNT_DQS0_ERR_CNT);
10306 				//u4ones_cnt[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_DQS1_ERR_CNT), MISC_DUTY_DQS1_ERR_CNT_DQS1_ERR_CNT);
10307 				//u4ones_cnt[2] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_DQS2_ERR_CNT), MISC_DUTY_DQS2_ERR_CNT_DQS2_ERR_CNT);
10308 				//u4ones_cnt[3] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DUTY_DQS3_ERR_CNT), MISC_DUTY_DQS3_ERR_CNT_DQS3_ERR_CNT);
10309 				//Darren-msg("%3d : %8d, %8d, %8d\n", ucdqs_dly, u4sample_cnt, u4ones_cnt[0], u4ones_cnt[1]);
10310 
10311 				//change to boolean value
10312 				if (u4ones_cnt[0] < (u4sample_cnt / 2))
10313 				{
10314 					if (u1DqsLevel == 0xff) // print once
10315 					{
10316 						msg("[L] %d, %8d\n", ucdqs_dly, u4ones_cnt[0]);
10317 						//msg("[L] %d, %8d, %8d\n", ucdqs_dly, u4ones_cnt[0], u4ones_cnt[1]);
10318 					}
10319 
10320 					u1DqsLevel = 0;
10321 				}
10322 				else
10323 				{
10324 					if (u1DqsLevel == 0)  // from low to high
10325 					{
10326 						u1DqsLevel = 1;
10327 						msg("[H] %d, %8d\n", ucdqs_dly, u4ones_cnt[0]);
10328 						//msg("[H] %d, %8d, %8d\n", ucdqs_dly, u4ones_cnt[0], u4ones_cnt[1]);
10329 
10330 						if (u18Phase_SM == DQS_8PH_DEGREE_0)
10331 						{
10332 							u2R0 = ucdqs_dly;
10333 							msg("R0 = %d\n", u2R0);
10334 							break; // break ucdqs_dly for loop
10335 						}
10336 						else if (u18Phase_SM == DQS_8PH_DEGREE_180)
10337 						{
10338 							u2R180 = ucdqs_dly;
10339 							if (u2R180 > u2R0)
10340 							{
10341 								u2R = u2R0 + ((u2R180 - u2R0) >> 2); // u2R180 >= u2R0 for (u1R180 - u1R0)/4 for 180 degree. /2 for 90 degree
10342 								msg("R = %d, R180 = %d\n", u2R, u2R180);
10343 								break; // break ucdqs_dly for loop
10344 							}
10345 							else
10346 							{
10347 								u1DqsLevel = 0xff; //next u2Jm_dly to find edge (L->H)
10348 							}
10349 						}
10350 						else if (u18Phase_SM == DQS_8PH_DEGREE_45)
10351 						{
10352 							u2P = ucdqs_dly;
10353 							if (u2P > u2R0) // u2P ~= DQS_8PH_DEGREE_180
10354 							{
10355 								// Absolute to find min diff
10356 								if (u2R > u2P)
10357 									s2Err_code = u2R - u2P;
10358 								else
10359 									s2Err_code = u2P - u2R;
10360 
10361 								if (s2Err_code == 0)
10362 								{
10363 									u18Ph_dly_final = u18Ph_dly;
10364 									u18Ph_dly_loop_break = 1;
10365 								}
10366 								else if (s2Err_code < s2Err_code_min)
10367 								{
10368 									s2Err_code_min = s2Err_code;
10369 									u18Ph_dly_final = u18Ph_dly;
10370 									u1loop_cnt = 0;
10371 								}
10372 								else if (s2Err_code >= s2Err_code_min)
10373 								{
10374 									// check early break for u18Ph_dly for loop
10375 									u1loop_cnt++;
10376 									if (u1loop_cnt > u1early_break_cnt)
10377 									{
10378 										u18Ph_dly_loop_break = 1;
10379 									}
10380 								}
10381 
10382 								msg("diff (P-R) = %d, min = %d, break count = %d\n", s2Err_code, s2Err_code_min, u1loop_cnt);
10383 
10384 								break; // if (s2Err_code == s2Err_code_min) for next u18Ph_dly
10385 							}
10386 							else
10387 							{
10388 								u1DqsLevel = 0xff; //next u2Jm_dly to find edge (L->H)
10389 							}
10390 						}
10391 						else
10392 						{
10393 							err("u18Phase_SM err!\n");
10394 							#if __ETT__
10395 							while (1);
10396 							#endif
10397 						}
10398 					}
10399 				}
10400 
10401 			}
10402 
10403 			// Error handing
10404 			if ((u1DqsLevel == 0xff) || (u1DqsLevel == 0))
10405 			{
10406 				// (u1DqsLevel == 0) => skip from 1 to all's 0 or all's 0
10407 				// (u1DqsLevel == 0xff) => skip all's 1
10408 				// NOTE: 8-Phase calib must from 0 to 1
10409 				u18Ph_dly_final = u18PhDlyBackup; //rollback to init settings
10410 				eDRAMStatus = DRAM_FAIL;
10411 				err("\n[Dramc8PhaseCal] 8-Phase SM_%d is fail (to Default)!!!\n", u18Phase_SM);
10412 				goto exit;
10413 			} else if (u18Ph_dly_loop_break == 1)
10414 				break;
10415 
10416 		}
10417 	}
10418 
10419 exit:
10420 	msg("\n[Dramc8PhaseCal] u18Ph_dly_final = %d\n\n", u18Ph_dly_final);
10421 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B0_DQ1, u18Ph_dly_final, SHU_B0_DQ1_RG_ARPI_MIDPI_8PH_DLY_B0);
10422 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B1_DQ1, u18Ph_dly_final, SHU_B1_DQ1_RG_ARPI_MIDPI_8PH_DLY_B1);
10423 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CA_CMD1, u18Ph_dly_final, SHU_CA_CMD1_RG_ARPI_MIDPI_8PH_DLY_CA);
10424 
10425 	//restore to orignal value
10426 	DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
10427 	DramcBroadcastOnOff(u4backup_broadcast);
10428 
10429 	return eDRAMStatus;
10430 #endif
10431 }
10432 
10433 #if SIMULATION_SW_IMPED
DramcSwImpedanceSaveRegister(DRAMC_CTX_T * p,U8 ca_freq_option,U8 dq_freq_option,U8 save_to_where)10434 void DramcSwImpedanceSaveRegister(DRAMC_CTX_T *p, U8 ca_freq_option, U8 dq_freq_option, U8 save_to_where)
10435 {
10436 	U32 backup_broadcast;
10437 
10438 	backup_broadcast = GetDramcBroadcast();
10439 
10440 	DramcBroadcastOnOff(DRAMC_BROADCAST_ON);
10441 	//DQ
10442 	vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING1 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[dq_freq_option][DRVP], SHU_MISC_DRVING1_DQDRVP2) | P_Fld(gDramcSwImpedanceResult[dq_freq_option][DRVN], SHU_MISC_DRVING1_DQDRVN2));
10443 	vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING2 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[dq_freq_option][DRVP], SHU_MISC_DRVING2_DQDRVP1) | P_Fld(gDramcSwImpedanceResult[dq_freq_option][DRVN], SHU_MISC_DRVING2_DQDRVN1));
10444 	vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING3 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[dq_freq_option][ODTP], SHU_MISC_DRVING3_DQODTP2) | P_Fld(gDramcSwImpedanceResult[dq_freq_option][ODTN], SHU_MISC_DRVING3_DQODTN2));
10445 	vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING4 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[dq_freq_option][ODTP], SHU_MISC_DRVING4_DQODTP1) | P_Fld(gDramcSwImpedanceResult[dq_freq_option][ODTN], SHU_MISC_DRVING4_DQODTN1));
10446 
10447 	//DQS
10448 	#if SUPPORT_HYNIX_RX_DQS_WEAK_PULL
10449 	if (p->vendor_id == VENDOR_HYNIX)
10450 	{  U32 temp_value[4];
10451 		int i;
10452 		for(i=0; i<4; i++)
10453 		{
10454 			temp_value[i] = SwImpedanceAdjust(gDramcSwImpedanceResult[dq_freq_option][i], 2);
10455 		}
10456 		 vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING1 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(temp_value[0], SHU_MISC_DRVING1_DQSDRVP2) | P_Fld(temp_value[1], SHU_MISC_DRVING1_DQSDRVN2));
10457 		 vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING1 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(temp_value[0], SHU_MISC_DRVING1_DQSDRVP1) | P_Fld(temp_value[1], SHU_MISC_DRVING1_DQSDRVN1));
10458 		 vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING3 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(temp_value[2], SHU_MISC_DRVING3_DQSODTP2) | P_Fld(temp_value[3], SHU_MISC_DRVING3_DQSODTN2));
10459 		 vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING3 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(temp_value[2], SHU_MISC_DRVING3_DQSODTP) | P_Fld(temp_value[3], SHU_MISC_DRVING3_DQSODTN));
10460 	}
10461 	else
10462 	#endif
10463 	{
10464 		vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING1 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[dq_freq_option][DRVP], SHU_MISC_DRVING1_DQSDRVP2) | P_Fld(gDramcSwImpedanceResult[dq_freq_option][DRVN], SHU_MISC_DRVING1_DQSDRVN2));
10465 		vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING1 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[dq_freq_option][DRVP], SHU_MISC_DRVING1_DQSDRVP1) | P_Fld(gDramcSwImpedanceResult[dq_freq_option][DRVN], SHU_MISC_DRVING1_DQSDRVN1));
10466 		vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING3 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[dq_freq_option][ODTP], SHU_MISC_DRVING3_DQSODTP2) | P_Fld(gDramcSwImpedanceResult[dq_freq_option][ODTN], SHU_MISC_DRVING3_DQSODTN2));
10467 		vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING3 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[dq_freq_option][ODTP], SHU_MISC_DRVING3_DQSODTP) | P_Fld(gDramcSwImpedanceResult[dq_freq_option][ODTN], SHU_MISC_DRVING3_DQSODTN));
10468 	}
10469 
10470 	//CMD & CLK
10471 	vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING2 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[ca_freq_option][DRVP], SHU_MISC_DRVING2_CMDDRVP2) | P_Fld(gDramcSwImpedanceResult[ca_freq_option][DRVN], SHU_MISC_DRVING2_CMDDRVN2));
10472 	vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING2 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[ca_freq_option][DRVP], SHU_MISC_DRVING2_CMDDRVP1) | P_Fld(gDramcSwImpedanceResult[ca_freq_option][DRVN], SHU_MISC_DRVING2_CMDDRVN1));
10473 	vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING4 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[ca_freq_option][ODTP], SHU_MISC_DRVING4_CMDODTP2) | P_Fld(gDramcSwImpedanceResult[ca_freq_option][ODTN], SHU_MISC_DRVING4_CMDODTN2));
10474 	vIO32WriteFldMulti((DDRPHY_REG_SHU_MISC_DRVING4 + save_to_where * SHU_GRP_DDRPHY_OFFSET), P_Fld(gDramcSwImpedanceResult[ca_freq_option][ODTP], SHU_MISC_DRVING4_CMDODTP1) | P_Fld(gDramcSwImpedanceResult[ca_freq_option][ODTN], SHU_MISC_DRVING4_CMDODTN1));
10475 
10476 	//RG_TX_*RCKE_DRVP/RG_TX_*RCKE_DRVN doesn't set, so set 0xA first
10477 	//@Maoauo confirm, RG no function
10478 	//vIO32WriteFldAlign((DDRPHY_SHU_CA_CMD11 + save_to_where * SHU_GRP_DDRPHY_OFFSET), gDramcSwImpedanceResult[ca_freq_option][DRVP], SHU_CA_CMD11_RG_TX_ARCKE_DRVP);
10479 	//vIO32WriteFldAlign((DDRPHY_SHU_CA_CMD11 + save_to_where * SHU_GRP_DDRPHY_OFFSET), gDramcSwImpedanceResult[ca_freq_option][DRVN], SHU_CA_CMD11_RG_TX_ARCKE_DRVN);
10480 
10481 	//CKE
10482 	// CKE is full swing.
10483 	// LP4/LP4X set DRVP/DRVN as LP3's default value
10484 	// DRVP=8 -> 0xA for 868 by Alucary Chen
10485 	// DRVN=9 -> 0xA for 868 by Alucary Chen
10486 	//DRVP[4:0] = RG_TX_ARCMD_PU_PRE<1:0>, RG_TX_ARCLK_DRVN_PRE<2:0> for La_fite only
10487 	//@Darren-vIO32WriteFldAlign((DDRPHY_REG_SHU_CA_CMD3 + save_to_where * SHU_GRP_DDRPHY_OFFSET), (8>>3)&0x3, SHU_CA_CMD3_RG_TX_ARCMD_PU_PRE); //Darren need confirm
10488 	//@Darren-vIO32WriteFldAlign((DDRPHY_REG_SHU_CA_CMD0 + save_to_where * SHU_GRP_DDRPHY_OFFSET), 8&0x7, SHU_CA_CMD0_RG_TX_ARCLK_DRVN_PRE); //Darren need confirm
10489 	//DRVN[4:0] = RG_ARCMD_REV<12:8>
10490 	//@Darren-vIO32WriteFldAlign_All((DDRPHY_SHU_CA_DLL2 + save_to_where * SHU_GRP_DDRPHY_OFFSET), 9, SHU_CA_DLL2_RG_TX_ARCKE_DRVN_B0);
10491 	#if (fcFOR_CHIP_ID == fcA60868) // for 868 CS and CKE control together
10492 	vIO32WriteFldAlign((DDRPHY_REG_MISC_SHU_DRVING8 + save_to_where * SHU_GRP_DDRPHY_OFFSET), 0xA, MISC_SHU_DRVING8_CS_DRVP);
10493 	vIO32WriteFldAlign((DDRPHY_REG_MISC_SHU_DRVING8 + save_to_where * SHU_GRP_DDRPHY_OFFSET), 0xA, MISC_SHU_DRVING8_CS_DRVN);
10494 	#elif (fcFOR_CHIP_ID == fcMargaux)
10495 	// @Darren, confirm with ACD Alucary,
10496 	// MISC_SHU_DRVING8_CS_DRVP & MISC_SHU_DRVING8_CS_DRVN -> DA_TX_ARCKE_DRVP_C0[4:0]	 & DA_TX_ARCKE_DRVN_C0[4:0]
10497 	vIO32WriteFldAlign((DDRPHY_REG_MISC_SHU_DRVING8 + save_to_where * SHU_GRP_DDRPHY_OFFSET), 0xF, MISC_SHU_DRVING8_CS_DRVP);
10498 	vIO32WriteFldAlign((DDRPHY_REG_MISC_SHU_DRVING8 + save_to_where * SHU_GRP_DDRPHY_OFFSET), 0x14, MISC_SHU_DRVING8_CS_DRVN);
10499 	#endif
10500 
10501 	DramcBroadcastOnOff(backup_broadcast);
10502 }
10503 
10504 //-------------------------------------------------------------------------
10505 /** vImpCalVrefSel
10506  *	Set IMP_VREF_SEL for DRVP, DRVN, Run-time/Tracking
10507  *	(Refer to "IMPCAL Settings" document register "RG_RIMP_VREF_SEL" settings)
10508  *	@param p				Pointer of context created by DramcCtxCreate.
10509  *	@param	freq_region 	(enum): pass freq_region (IMP_LOW_FREQ/IMP_HIGH_FREQ) for LP4X
10510  *	@param	u1ImpCalStage	(U8): During DRVP, DRVN, run-time/tracking stages
10511  *								  some vref_sel values are different
10512  */
10513 //-------------------------------------------------------------------------
10514 /* Definitions to make IMPCAL_VREF_SEL function more readable */
10515 #define IMPCAL_STAGE_DRVP	  0
10516 #define IMPCAL_STAGE_DRVN	  1
10517 #define IMPCAL_STAGE_ODTP	  2
10518 #define IMPCAL_STAGE_ODTN	  3
10519 #define IMPCAL_STAGE_TRACKING 4
10520 
10521 /* LP4X IMP_VREF_SEL w/o term ==== */
10522 #define IMP_TRACK_LP4X_LOWFREQ_VREF_SEL  0x37 // for <= DDR3733
10523 #define IMP_TRACK_LP4X_HIGHFREQ_VREF_SEL  0x3a // for > 3733 and Samsung NT-ODTN
10524 /* LPDDR5 IMP_VREF_SEL w/o term ==== */
10525 #define IMP_TRACK_LP5_LOWFREQ_VREF_SEL	0x38 // for <= DDR3733
10526 #define IMP_TRACK_LP5_HIGHFREQ_VREF_SEL  0x3a // for > 3733 and Samsung NT-ODTN
10527 
10528 static const U8 ImpLP4VrefSel[IMP_VREF_MAX][IMP_DRV_MAX] = {
10529 				  /* DRVP  DRVN  ODTP  ODTN */
10530 /* IMP_LOW_FREQ */	{0x37, 0x33, 0x00, 0x37},
10531 /* IMP_HIGH_FREQ */ {0x3a, 0x33, 0x00, 0x3a},
10532 /* IMP_NT_ODTN */	{0x2a, 0x2a, 0x00, 0x3a}
10533 };
10534 
10535 static const U8 ImpLP5VrefSel[IMP_VREF_MAX][IMP_DRV_MAX] = {
10536 				  /* DRVP  DRVN  ODTP  ODTN */
10537 /* IMP_LOW_FREQ */	{0x38, 0x33, 0x00, 0x38},
10538 /* IMP_HIGH_FREQ */ {0x3a, 0x33, 0x00, 0x3a},
10539 /* IMP_NT_ODTN */	{0x2a, 0x2a, 0x00, 0x3a}
10540 };
10541 
10542 /* Refer to "IMPCAL Settings" document register "RG_RIMP_VREF_SEL" settings */
10543 // @Maoauo: DRVP/ODTN for IMP tracking. But DRVN not support IMP tracking. (before La_fite)
10544 // DRVP/DRVN/ODTN for IMP tracking after Pe_trus
vImpCalVrefSel(DRAMC_CTX_T * p,DRAMC_IMP_T efreq_region,U8 u1ImpCalStage)10545 static void vImpCalVrefSel(DRAMC_CTX_T *p, DRAMC_IMP_T efreq_region, U8 u1ImpCalStage)
10546 {
10547 	U8 u1RegTmpValue = 0;
10548 	U32 u4DrvFld = 0;
10549 
10550 	if (p->dram_type == TYPE_LPDDR4X)
10551 	{
10552 		if (u1ImpCalStage == IMPCAL_STAGE_TRACKING)
10553 			u1RegTmpValue = (efreq_region == IMP_LOW_FREQ) ? IMP_TRACK_LP4X_LOWFREQ_VREF_SEL : IMP_TRACK_LP4X_HIGHFREQ_VREF_SEL;
10554 		else
10555 			u1RegTmpValue = ImpLP4VrefSel[efreq_region][u1ImpCalStage];
10556 	}
10557 	else if (p->dram_type == TYPE_LPDDR5)
10558 	{
10559 		if (u1ImpCalStage == IMPCAL_STAGE_TRACKING)
10560 			u1RegTmpValue = (efreq_region == IMP_LOW_FREQ) ? IMP_TRACK_LP5_LOWFREQ_VREF_SEL : IMP_TRACK_LP5_HIGHFREQ_VREF_SEL;
10561 		else
10562 			u1RegTmpValue = ImpLP5VrefSel[efreq_region][u1ImpCalStage];
10563 	}
10564 	else
10565 	{
10566 		err("[vImpCalVrefSel] Warnning: Need confirm DRAM type for IMP_VREF_SEL !!!\n");
10567 		#if __ETT__
10568 		while(1);
10569 		#endif
10570 	}
10571 
10572 	switch (u1ImpCalStage)
10573 	{
10574 		case IMPCAL_STAGE_DRVP:
10575 			u4DrvFld = SHU_CA_CMD12_RG_RIMP_VREF_SEL_DRVP;
10576 			break;
10577 		case IMPCAL_STAGE_DRVN:
10578 			u4DrvFld = SHU_CA_CMD12_RG_RIMP_VREF_SEL_DRVN;
10579 			break;
10580 		case IMPCAL_STAGE_ODTN:
10581 			u4DrvFld = SHU_CA_CMD12_RG_RIMP_VREF_SEL_ODTN;
10582 			break;
10583 		default:
10584 			err("[vImpCalVrefSel] Warnning: Need confirm u1ImpCalStage for SW IMP Calibration !!!\n");
10585 			break;
10586 	}
10587 
10588 	// dbg msg after vref_sel selection
10589 	msg3("[vImpCalVrefSel] IMP_VREF_SEL 0x%x, IMPCAL stage:%u, freq_region:%u\n",
10590 					  u1RegTmpValue, u1ImpCalStage, efreq_region);
10591 
10592 	/* Set IMP_VREF_SEL register field's value */
10593 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD12), u1RegTmpValue, u4DrvFld);
10594 
10595 	return;
10596 }
10597 
DramcSwImpCalResult(DRAMC_CTX_T * p,const char * drvType,U32 u4Fld)10598 static U32 DramcSwImpCalResult(DRAMC_CTX_T *p, const char *drvType, U32 u4Fld)
10599 {
10600 	U32 u4ImpxDrv = 0, u4ImpCalResult = 0;
10601 	U32 u4CheckImpChange = (u4Fld == SHU_MISC_IMPCAL1_IMPDRVP)? 1: 0;
10602 
10603 	for (u4ImpxDrv = 0; u4ImpxDrv < 32; u4ImpxDrv++)
10604 	{
10605 #if 0 // for A60868 no need
10606 		if (u4ImpxDrv == 16) //0~15, 29~31
10607 			u4ImpxDrv = 29;
10608 #endif
10609 
10610 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_MISC_IMPCAL1), u4ImpxDrv, u4Fld);
10611 		mcDELAY_US(1);
10612 		u4ImpCalResult = u4IO32ReadFldAlign((DDRPHY_REG_MISC_PHY_RGS_CMD), MISC_PHY_RGS_CMD_RGS_RIMPCALOUT);
10613 		msg2("OCD %s=%d ,CALOUT=%d\n", drvType, u4ImpxDrv, u4ImpCalResult);
10614 
10615 		if (u4ImpCalResult == u4CheckImpChange)//first found
10616 		{
10617 			msg2("\nOCD %s calibration OK! %s=%d\n\n", drvType, drvType, u4ImpxDrv);
10618 			break;
10619 		}
10620 	}
10621 
10622 	if (u4ImpxDrv == 32) // Can't find SwImp drv results
10623 	{
10624 		u4ImpxDrv = 31;
10625 		msg2("\nOCD %s calibration FAIL! %s=%d\n\n", drvType, drvType, u4ImpxDrv);
10626 	}
10627 
10628 	return u4ImpxDrv;
10629 }
10630 
DramcSwImpedanceCal(DRAMC_CTX_T * p,U8 u1Para,DRAMC_IMP_T freq_region)10631 DRAM_STATUS_T DramcSwImpedanceCal(DRAMC_CTX_T *p, U8 u1Para, DRAMC_IMP_T freq_region)
10632 {
10633 	U32 u4DRVP_Result = 0xff, u4ODTN_Result = 0xff, u4DRVN_Result = 0xff;
10634 	//U32 u4BaklReg_DDRPHY_MISC_IMP_CTRL0, u4BaklReg_DDRPHY_MISC_IMP_CTRL1;
10635 	U32 u4BaklReg_DRAMC_REG_IMPCAL;
10636 	U8 backup_channel;
10637 	U32 backup_broadcast;
10638 	U8 u1DrvType = 0, u1CALI_ENP = 0, u1CALI_ENN = 0, u1DDR4 = 0;
10639 	U32 u4SwImpCalResult = 0, u4DrvFld = 0;
10640 	const char *drvStr = "NULL";
10641 
10642 	backup_broadcast = GetDramcBroadcast();
10643 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
10644 
10645 	//default set FAIL
10646 	vSetCalibrationResult(p, DRAM_CALIBRATION_SW_IMPEDANCE, DRAM_FAIL);
10647 
10648 	//Suspend: DA_RIMP_DMSUS=1
10649 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_LP_CTRL, P_Fld(0x0, MISC_LP_CTRL_RG_ARDMSUS_10) | \
10650 						P_Fld(0x0, MISC_LP_CTRL_RG_ARDMSUS_10_LP_SEL) | \
10651 						P_Fld(0x0, MISC_LP_CTRL_RG_RIMP_DMSUS_10) | \
10652 						P_Fld(0x0, MISC_LP_CTRL_RG_RIMP_DMSUS_10_LP_SEL));
10653 
10654 	//Disable IMP HW Tracking
10655 	//Hw Imp tracking disable for all channels Because SwImpCal will be K again when resume from DDR reserved mode
10656 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_IMPCAL, 0, MISC_IMPCAL_IMPCAL_HW);
10657 
10658 	backup_channel = p->channel;
10659 	vSetPHY2ChannelMapping(p, CHANNEL_A);
10660 
10661 	//Register backup
10662 	//u4BaklReg_DDRPHY_MISC_IMP_CTRL0 = u4IO32Read4B((DDRPHY_MISC_IMP_CTRL0));
10663 	//u4BaklReg_DDRPHY_MISC_IMP_CTRL1 = u4IO32Read4B((DDRPHY_MISC_IMP_CTRL1));
10664 	u4BaklReg_DRAMC_REG_IMPCAL = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_MISC_IMPCAL));
10665 
10666 	//RG_IMPCAL_VREF_SEL (now set in vImpCalVrefSel())
10667 	//RG_IMPCAL_LP3_EN=0, RG_IMPCAL_LP4_EN=1
10668 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_IMP_CTRL1), 0, MISC_IMP_CTRL1_RG_RIMP_PRE_EN);
10669 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_IMPCAL), P_Fld(0, MISC_IMPCAL_IMPCAL_CALI_ENN) | P_Fld(1, MISC_IMPCAL_IMPCAL_IMPPDP) | \
10670 						P_Fld(1, MISC_IMPCAL_IMPCAL_IMPPDN));	 //RG_RIMP_BIAS_EN and RG_RIMP_VREF_EN move to IMPPDP and IMPPDN
10671 
10672 	if (is_lp5_family(p))
10673 		u1DDR4 = 0;
10674 	else //LPDDR4
10675 		u1DDR4 = 1;
10676 
10677 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_IMP_CTRL1), P_Fld(1, MISC_IMP_CTRL1_RG_IMP_EN) | \
10678 						P_Fld(0, MISC_IMP_CTRL1_RG_RIMP_DDR3_SEL) | \
10679 						P_Fld(1, MISC_IMP_CTRL1_RG_RIMP_VREF_EN) | \
10680 						P_Fld(u1DDR4, MISC_IMP_CTRL1_RG_RIMP_DDR4_SEL));
10681 	mcDELAY_US(1);
10682 
10683 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_IMPCAL), 1, MISC_IMPCAL_IMPCAL_CALI_EN);
10684 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_MISC_IMPCAL1), P_Fld(0, SHU_MISC_IMPCAL1_IMPDRVN) | P_Fld(0, SHU_MISC_IMPCAL1_IMPDRVP));
10685 
10686 
10687 	//LP4X: ODTN/DRVN/DRVP calibration start
10688 	for (u1DrvType = DRVP; u1DrvType < IMP_DRV_MAX; u1DrvType++) // Calibration sequence for DRVP, DRVN and ODTN
10689 	{
10690 		if (u1DrvType == ODTP) // no use, skip ODTP
10691 			continue;
10692 
10693 		/* Set IMP_VREF_SEL value for DRVP/DRVN and ODTN */
10694 		vImpCalVrefSel(p, freq_region, u1DrvType);
10695 
10696 		switch (u1DrvType)
10697 		{
10698 			case DRVP:
10699 				drvStr = "DRVP";
10700 				u1CALI_ENP = 0x1;
10701 				u1CALI_ENN = 0x0;
10702 				u4DrvFld = SHU_MISC_IMPCAL1_IMPDRVP;
10703 				u4DRVP_Result = 0;
10704 				break;
10705 			case DRVN:
10706 			case ODTN:
10707 				drvStr = (u1DrvType == DRVN)? "DRVN" : "ODTN";
10708 				u1CALI_ENP = 0x0;
10709 				u1CALI_ENN = (u1DrvType == DRVN)? 0x0: 0x1; // 0x1 change to ODTN path
10710 				u4DrvFld = SHU_MISC_IMPCAL1_IMPDRVN;
10711 				break;
10712 			default:
10713 				err("[DramcSwImpedanceCal] Warnning: Need confirm u1DrvType for SW IMP Calibration !!!\n");
10714 				break;
10715 		}
10716 
10717 		// @A60868 for DRVn/p and ODTn select
10718 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_IMPCAL), u1CALI_ENP, MISC_IMPCAL_IMPCAL_CALI_ENP);  //MISC_IMP_CTRL1_RG_IMP_OCD_PUCMP_EN move to CALI_ENP
10719 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_IMPCAL), u1CALI_ENN, MISC_IMPCAL_IMPCAL_CALI_ENN);  //MISC_IMP_CTRL1_RG_RIMP_ODT_EN move to CALI_ENN
10720 
10721 		msg2("\n\n\tK %s\n", drvStr);
10722 
10723 		//DRVP=DRVP_FINAL
10724 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_MISC_IMPCAL1), u4DRVP_Result, SHU_MISC_IMPCAL1_IMPDRVP);  //PUCMP_EN move to CALI_ENP
10725 		//RIMP_DRV05 for LP4/5
10726 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD12), 0, SHU_CA_CMD12_RG_RIMP_DRV05);
10727 
10728 
10729 		//If RGS_TX_OCD_IMPCALOUTX=1
10730 		//RG_IMPX_DRVN++;
10731 		//Else save RG_IMPX_DRVN value and assign to DRVN
10732 		u4SwImpCalResult = DramcSwImpCalResult(p, drvStr, u4DrvFld);
10733 
10734 		switch (u1DrvType)
10735 		{
10736 			case DRVP:
10737 				u4DRVP_Result = u4SwImpCalResult;
10738 				break;
10739 			case DRVN:
10740 				u4DRVN_Result = u4SwImpCalResult;
10741 				break;
10742 			case ODTN:
10743 				u4ODTN_Result = u4SwImpCalResult;
10744 				break;
10745 			default:
10746 				err("[DramcSwImpedanceCal] Warnning: Need confirm u4SwImpCalResult for SW IMP Calibration !!!\n");
10747 				break;
10748 		}
10749 	}
10750 	//Register Restore
10751 	vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_MISC_IMPCAL), u4BaklReg_DRAMC_REG_IMPCAL);
10752 	//vIO32Write4B((DDRPHY_MISC_IMP_CTRL0), u4BaklReg_DDRPHY_MISC_IMP_CTRL0);
10753 	//vIO32Write4B((DDRPHY_MISC_IMP_CTRL1), u4BaklReg_DDRPHY_MISC_IMP_CTRL1);
10754 
10755 
10756 /*** default value if K fail
10757 	LP3:  DRVP=8, DRVN=9
10758 	LP4:  DRVP=6, DRVN=9, ODTN=14
10759 	LP4X(UT): DRVP=12, DRVN=9
10760 	LP4X(T):  DRVP=5, DRVN=9, ODTN=14
10761 	LP4P: DRVP=8, DRVN=10
10762 ***/
10763 	msg("[SwImpedanceCal] DRVP=%d, DRVN=%d, ODTN=%d\n", u4DRVP_Result, u4DRVN_Result, u4ODTN_Result);
10764 
10765 	#if 0//HYNIX_IMPX_ADJUST
10766 	if (u1Para)
10767 	{
10768 		u4ODTN_Result = ImpedanceAdjustment_Hynix(u4ODTN_Result, u1Para);
10769 	}
10770 	#endif
10771 
10772 	gDramcSwImpedanceResult[freq_region][DRVP] = u4DRVP_Result;
10773 	gDramcSwImpedanceResult[freq_region][DRVN] = u4DRVN_Result;
10774 	gDramcSwImpedanceResult[freq_region][ODTP] = 0;
10775 	gDramcSwImpedanceResult[freq_region][ODTN] = u4ODTN_Result;
10776 
10777 #if RUNTIME_SHMOO_RELEATED_FUNCTION && SUPPORT_SAVE_TIME_FOR_CALIBRATION
10778 	{
10779 		U8 u1drv;
10780 		{
10781 			for (u1drv = 0; u1drv < 4; u1drv++)
10782 			{
10783 				if (p->femmc_Ready == 0)
10784 					p->pSavetimeData->u1SwImpedanceResule[freq_region][u1drv] = gDramcSwImpedanceResult[freq_region][u1drv];
10785 				else
10786 				{
10787 					gDramcSwImpedanceResult[freq_region][u1drv] = p->pSavetimeData->u1SwImpedanceResule[freq_region][u1drv];
10788 					vSetCalibrationResult(p, DRAM_CALIBRATION_SW_IMPEDANCE, DRAM_FAST_K);
10789 				}
10790 			}
10791 		}
10792 	}
10793 #endif
10794 
10795 	msg("freq_region=%d, Reg: DRVP=%d, DRVN=%d, ODTN=%d\n", freq_region, gDramcSwImpedanceResult[freq_region][DRVP],
10796 									gDramcSwImpedanceResult[freq_region][DRVN], gDramcSwImpedanceResult[freq_region][ODTN]);
10797 
10798 #if APPLY_SIGNAL_WAVEFORM_SETTINGS_ADJUST
10799 	if ((p->dram_type == TYPE_LPDDR4) && (freq_region == 0))
10800 	{
10801 		gDramcSwImpedanceResult[freq_region][DRVP] = SwImpedanceAdjust(gDramcSwImpedanceResult[freq_region][DRVP], gDramcSwImpedanceAdjust[freq_region][DRVP]);
10802 		gDramcSwImpedanceResult[freq_region][DRVN] = SwImpedanceAdjust(gDramcSwImpedanceResult[freq_region][DRVN], gDramcSwImpedanceAdjust[freq_region][ODTN]);
10803 	}
10804 	else
10805 	{
10806 		gDramcSwImpedanceResult[freq_region][DRVP] = SwImpedanceAdjust(gDramcSwImpedanceResult[freq_region][DRVP], gDramcSwImpedanceAdjust[freq_region][DRVP]);
10807 		gDramcSwImpedanceResult[freq_region][ODTN] = SwImpedanceAdjust(gDramcSwImpedanceResult[freq_region][ODTN], gDramcSwImpedanceAdjust[freq_region][ODTN]);
10808 	}
10809 
10810 	msg("freq_region=%d, Reg: DRVP=%d, DRVN=%d, ODTN=%d (After Adjust)\n", freq_region, gDramcSwImpedanceResult[freq_region][DRVP],
10811 										gDramcSwImpedanceResult[freq_region][DRVN], gDramcSwImpedanceResult[freq_region][ODTN]);
10812 #endif
10813 
10814 #if __FLASH_TOOL_DA__
10815 	if((gDramcSwImpedanceResult[freq_region][ODTN] ==0)||(gDramcSwImpedanceResult[freq_region][ODTN] >=31))
10816 	{
10817 		msg("[WARNING] freq_region = %d, ODTN = %d ==> unexpect value\n", freq_region, gDramcSwImpedanceResult[freq_region][ODTN]);
10818 		PINInfo_flashtool.IMP_ERR_FLAG |= (0x1<<(freq_region+ODTN));
10819 	}
10820 	else if((gDramcSwImpedanceResult[freq_region][DRVP] ==0)||(gDramcSwImpedanceResult[freq_region][DRVP] >=31))
10821 	{
10822 		msg("[WARNING] freq_region = %d, DRVP = %d ==> unexpect value\n", freq_region, gDramcSwImpedanceResult[freq_region][DRVP]);
10823 		PINInfo_flashtool.IMP_ERR_FLAG |= (0x1<<(freq_region+DRVP));
10824 	}
10825 	else if((gDramcSwImpedanceResult[freq_region][DRVN] ==0)||(gDramcSwImpedanceResult[freq_region][DRVN] >=31))
10826 	{
10827 		msg("[WARNING] freq_region = %d, DRVN = %d ==> unexpect value\n", freq_region, gDramcSwImpedanceResult[freq_region][DRVN]);
10828 		PINInfo_flashtool.IMP_ERR_FLAG |= (0x1<<(freq_region+DRVN));
10829 	}
10830 	else
10831 #endif
10832 	{
10833 	vSetCalibrationResult(p, DRAM_CALIBRATION_SW_IMPEDANCE, DRAM_OK);
10834 	}
10835 	msg3("[DramcSwImpedanceCal] Done\n\n");
10836 
10837 	vSetPHY2ChannelMapping(p, backup_channel);
10838 	DramcBroadcastOnOff(backup_broadcast);
10839 
10840 	return DRAM_OK;
10841 }
10842 #endif //SIMULATION_SW_IMPED
10843 
10844 #if ENABLE_WRITE_DBI || TX_K_DQM_WITH_WDBI
DramcWriteShiftMCKForWriteDBI(DRAMC_CTX_T * p,S8 iShiftMCK)10845 void DramcWriteShiftMCKForWriteDBI(DRAMC_CTX_T *p, S8 iShiftMCK)
10846 {
10847 	U8 ucDataRateDivShift = 0;
10848 	S8 s1UIMove = 0;
10849 
10850 	ucDataRateDivShift = u1MCK2UI_DivShift(p);
10851 	s1UIMove = iShiftMCK * (S8)(1 << ucDataRateDivShift);
10852 	ShiftDQUI(p, s1UIMove, ALL_BYTES);
10853 }
10854 #endif
10855 
10856 #if ENABLE_DUTY_SCAN_V2
10857 
10858 #define DutyPrintAllLog 		0
10859 #define DutyPrintCalibrationLog 0
10860 
10861 #define DUTY_OFFSET_START -28
10862 #define DUTY_OFFSET_END 28
10863 #define DUTY_OFFSET_STEP 4
10864 
10865 #define CLOCK_PI_START 0
10866 #define CLOCK_PI_END 63
10867 
10868 #if FOR_DV_SIMULATION_USED
10869 #define CLOCK_PI_STEP 8
10870 #else
10871 #define CLOCK_PI_STEP 2
10872 #endif
10873 
10874 #define ClockDutyFailLowerBound 4500	// 45%
10875 #define ClockDutyFailUpperBound 5500	// 55%
10876 #define ClockDutyMiddleBound	5000	// 50%
10877 
10878 /*
10879 * duty form smallest to biggest
10880 * 011111->011110->...->000001-->000000=100000->100001-->...->111111
10881 */
DramcDutyDelayRGSettingConvert(DRAMC_CTX_T * p,S8 scDutyDelay,U8 * tDly)10882 static U8 DramcDutyDelayRGSettingConvert(DRAMC_CTX_T *p, S8 scDutyDelay,
10883 	U8 *tDly)
10884 {
10885 	U8 tDelay;
10886 
10887 	if (scDutyDelay < 0)
10888 	{
10889 		tDelay = -scDutyDelay;
10890 	}
10891 	else if (scDutyDelay > 0)
10892 	{
10893 		tDelay = scDutyDelay + (1 << 5);
10894 	}
10895 	else
10896 	{
10897 		tDelay = 0;
10898 	}
10899 
10900 	*tDly = tDelay;
10901 	return tDelay;
10902 }
10903 
DramcClockDutySetClkDelayCell(DRAMC_CTX_T * p,S8 * scDutyDelay)10904 static void DramcClockDutySetClkDelayCell(DRAMC_CTX_T *p, S8 *scDutyDelay)
10905 {
10906 	U8 u1ShuffleIdx = 0;
10907 	U32 save_offset;
10908 	U8 tDelay;
10909 
10910 	DramcDutyDelayRGSettingConvert(p, scDutyDelay[0], &tDelay);
10911 
10912 #if DUTY_SCAN_V2_ONLY_K_HIGHEST_FREQ
10913 	for(u1ShuffleIdx = 0; u1ShuffleIdx<DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
10914 #endif
10915 	{
10916 		save_offset = u1ShuffleIdx * SHU_GRP_DDRPHY_OFFSET;
10917 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_TXDUTY + save_offset),
10918 			P_Fld(tDelay, SHU_CA_TXDUTY_DA_TX_ARCLK_DUTY_DLY));
10919 	}
10920 }
10921 
DQSDutyScan_SetDqsDelayCell(DRAMC_CTX_T * p,S8 * scDutyDelay)10922 static void DQSDutyScan_SetDqsDelayCell(DRAMC_CTX_T *p, S8 *scDutyDelay)
10923 {
10924 	U8 u1ShuffleIdx = 0, u1DQSIdx;
10925 	U32 save_offset;
10926 	U8 tDelay[2];
10927 
10928 //	  msg("CH%d, Final DQS0 duty delay cell = %d\n", p->channel, scDutyDelay[0]);
10929 //	  msg("CH%d, Final DQS1 duty delay cell = %d\n", p->channel, scDutyDelay[1]);
10930 
10931 	for(u1DQSIdx=0; u1DQSIdx<2; u1DQSIdx++)
10932 	{
10933 		DramcDutyDelayRGSettingConvert(p, scDutyDelay[u1DQSIdx], &(tDelay[u1DQSIdx]));
10934 	}
10935 
10936 #if DUTY_SCAN_V2_ONLY_K_HIGHEST_FREQ
10937 	for(u1ShuffleIdx = 0; u1ShuffleIdx<DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
10938 #endif
10939 	{
10940 		{
10941 			for(u1DQSIdx = 0; u1DQSIdx<2; u1DQSIdx++)
10942 			{
10943 				save_offset = u1ShuffleIdx * SHU_GRP_DDRPHY_OFFSET + u1DQSIdx*DDRPHY_AO_B0_B1_OFFSET;
10944 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_TXDUTY) + save_offset,
10945 					P_Fld(tDelay[u1DQSIdx], SHU_B0_TXDUTY_DA_TX_ARDQS_DUTY_DLY_B0));
10946 			}
10947 		}
10948 	}
10949 }
10950 
WCKDutyScan_SetWCKDelayCell(DRAMC_CTX_T * p,S8 * scDutyDelay)10951 static void WCKDutyScan_SetWCKDelayCell(DRAMC_CTX_T *p, S8 *scDutyDelay)
10952 {
10953 	U8 u1ShuffleIdx = 0, u1DQSIdx;
10954 	U32 save_offset;
10955 	U8 tDelay[2];
10956 
10957 	for(u1DQSIdx=0; u1DQSIdx<2; u1DQSIdx++)
10958 	{
10959 		DramcDutyDelayRGSettingConvert(p, scDutyDelay[u1DQSIdx], &(tDelay[u1DQSIdx]));
10960 	}
10961 
10962 #if DUTY_SCAN_V2_ONLY_K_HIGHEST_FREQ
10963 	for(u1ShuffleIdx = 0; u1ShuffleIdx<DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
10964 #endif
10965 	{
10966 		{
10967 			for(u1DQSIdx = 0; u1DQSIdx<2; u1DQSIdx++)
10968 			{
10969 				save_offset = u1ShuffleIdx * SHU_GRP_DDRPHY_OFFSET + u1DQSIdx*DDRPHY_AO_B0_B1_OFFSET;
10970 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_TXDUTY) + save_offset,
10971 					P_Fld(tDelay[u1DQSIdx], SHU_B0_TXDUTY_DA_TX_ARWCK_DUTY_DLY_B0));
10972 			}
10973 		}
10974 	}
10975 }
10976 
10977 #if APPLY_DQDQM_DUTY_CALIBRATION
DQDQMDutyScan_SetDQDQMDelayCell(DRAMC_CTX_T * p,U8 u1ChannelIdx,S8 * scDutyDelay,U8 k_type)10978 static void DQDQMDutyScan_SetDQDQMDelayCell(DRAMC_CTX_T *p, U8 u1ChannelIdx, S8 *scDutyDelay, U8 k_type)
10979 {
10980 	U8 u1ShuffleIdx = 0, u1DQSIdx;
10981 	U32 save_offset;
10982 	U8 tDelay[2];
10983 
10984 	for(u1DQSIdx=0; u1DQSIdx<2; u1DQSIdx++)
10985 	{
10986 		DramcDutyDelayRGSettingConvert(p, scDutyDelay[u1DQSIdx], &(tDelay[u1DQSIdx]));
10987 	}
10988 
10989 #if DUTY_SCAN_V2_ONLY_K_HIGHEST_FREQ
10990 	for(u1ShuffleIdx = 0; u1ShuffleIdx<DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
10991 #endif
10992 	{
10993 		for(u1DQSIdx = 0; u1DQSIdx<2; u1DQSIdx++)
10994 		{
10995 			save_offset = u1ShuffleIdx * SHU_GRP_DDRPHY_OFFSET + u1DQSIdx*DDRPHY_AO_B0_B1_OFFSET;
10996 
10997 			if (k_type == DutyScan_Calibration_K_DQ)
10998 			{
10999 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_TXDUTY) + save_offset,
11000 					P_Fld(tDelay[u1DQSIdx], SHU_B0_TXDUTY_DA_TX_ARDQ_DUTY_DLY_B0));
11001 			}
11002 
11003 			if (k_type == DutyScan_Calibration_K_DQM)
11004 			{
11005 				vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_TXDUTY) + save_offset,
11006 					P_Fld(tDelay[u1DQSIdx], SHU_B0_TXDUTY_DA_TX_ARDQM_DUTY_DLY_B0));
11007 			}
11008 		}
11009 	}
11010 }
11011 
11012 #if 0
11013 void DQDQMDutyScan_CopyDQRG2DQMRG(DRAMC_CTX_T *p)
11014 {
11015 	U8 u1ShuffleIdx = 0, u1DQSIdx, u1RankIdx = 0;
11016 	U32 save_offset;
11017 	U8 ucDQDQMDelay;
11018 	U8 ucRev_DQDQM_Bit0, ucRev_DQDQM_Bit1;
11019 
11020 #if DUTY_SCAN_V2_ONLY_K_HIGHEST_FREQ
11021 	for(u1ShuffleIdx = 0; u1ShuffleIdx<DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
11022 #endif
11023 	{
11024 			for(u1DQSIdx = 0; u1DQSIdx<2; u1DQSIdx++)
11025 			{
11026 			save_offset = u1ShuffleIdx * SHU_GRP_DDRPHY_OFFSET + u1DQSIdx*DDRPHY_AO_B0_B1_OFFSET_0X80;
11027 
11028 			ucDQDQMDelay = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_SHU_B0_DQ3) + save_offset, SHU_B0_DQ3_RG_ARDQ_DUTYREV_B0_DQ_DLY);
11029 			ucRev_DQDQM_Bit0 = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_SHU_B0_DQ3) + save_offset, SHU_B0_DQ3_RG_TX_ARDQS0_PU_PRE_B0_BIT0);
11030 			ucRev_DQDQM_Bit1 = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_SHU_B0_DQ3) + save_offset, SHU_B0_DQ3_RG_TX_ARDQS0_PU_PRE_B0_BIT0);
11031 
11032 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_SHU_B0_DQ3) + save_offset, P_Fld(ucDQDQMDelay, SHU_B0_DQ3_RG_ARDQ_DUTYREV_B0_DQM_DLY)
11033 																	| P_Fld(ucRev_DQDQM_Bit0, SHU_B0_DQ3_RG_TX_ARDQS0_PU_B0_BIT0)
11034 																	| P_Fld(ucRev_DQDQM_Bit1, SHU_B0_DQ3_RG_TX_ARDQS0_PU_B0_BIT1));
11035 			}
11036 		}
11037 	}
11038 #endif
11039 #endif
11040 
11041 S8 gcFinal_K_CLK_delay_cell[CHANNEL_NUM][DQS_NUMBER];
11042 S8 gcFinal_K_DQS_delay_cell[CHANNEL_NUM][DQS_NUMBER];
11043 S8 gcFinal_K_WCK_delay_cell[CHANNEL_NUM][DQS_NUMBER];
11044 #if APPLY_DQDQM_DUTY_CALIBRATION
11045 S8 gcFinal_K_DQ_delay_cell[CHANNEL_NUM][DQS_NUMBER];
11046 S8 gcFinal_K_DQM_delay_cell[CHANNEL_NUM][DQS_NUMBER];
11047 #endif
11048 
DramcNewDutyCalibration(DRAMC_CTX_T * p)11049 void DramcNewDutyCalibration(DRAMC_CTX_T *p)
11050 {
11051 	U8 u1backup_channel, u1backup_rank;
11052 
11053 #if(DQS_DUTY_SLT_CONDITION_TEST)
11054 		U16 u2TestCnt, u2FailCnt=0, u2TestCntTotal =20; //fra 400;
11055 		U8 u1ByteIdx, u1PI_FB;
11056 		U32 u4Variance;
11057 #endif
11058 
11059 	u1backup_rank = u1GetRank(p);
11060 	vSetRank(p, RANK_0);
11061 
11062 #if !FT_DSIM_USED
11063 #if DUTY_SCAN_V2_ONLY_K_HIGHEST_FREQ
11064 	if((p->frequency == u2DFSGetHighestFreq(p)) && (Get_PRE_MIOCK_JMETER_HQA_USED_flag()==0))
11065 #else
11066 	//TODO if(Get_PRE_MIOCK_JMETER_HQA_USED_flag()==0)
11067 #endif
11068 #endif
11069 	{
11070 		U8 u1ChannelIdx;
11071 		u1backup_channel = vGetPHY2ChannelMapping(p);
11072 
11073 		#if SUPPORT_SAVE_TIME_FOR_CALIBRATION
11074 		if(p->femmc_Ready==1)
11075 		{
11076 			for(u1ChannelIdx=CHANNEL_A; u1ChannelIdx<p->support_channel_num; u1ChannelIdx++)
11077 			{
11078 				vSetPHY2ChannelMapping(p, u1ChannelIdx);
11079 				DramcClockDutySetClkDelayCell(p, p->pSavetimeData->s1ClockDuty_clk_delay_cell[p->channel]);
11080 				DQSDutyScan_SetDqsDelayCell(p, p->pSavetimeData->s1DQSDuty_clk_delay_cell[p->channel]);
11081 				WCKDutyScan_SetWCKDelayCell(p, p->pSavetimeData->s1WCKDuty_clk_delay_cell[p->channel]);
11082 				#if APPLY_DQDQM_DUTY_CALIBRATION
11083 				DQDQMDutyScan_SetDQDQMDelayCell(p, p->channel, p->pSavetimeData->s1DQMDuty_clk_delay_cell[p->channel], DutyScan_Calibration_K_DQM);
11084 				DQDQMDutyScan_SetDQDQMDelayCell(p, p->channel, p->pSavetimeData->s1DQDuty_clk_delay_cell[p->channel], DutyScan_Calibration_K_DQ);
11085 				#endif
11086 			}
11087 			vSetPHY2ChannelMapping(p, u1backup_channel);
11088 			return;
11089 		}
11090 		#endif
11091 
11092 		vSetPHY2ChannelMapping(p, u1backup_channel);
11093 	}
11094 
11095 	vSetRank(p, u1backup_rank);
11096 }
11097 #endif
11098