xref: /aosp_15_r20/external/coreboot/src/device/dram/ddr4.c (revision b9411a12aaaa7e1e6a6fb7c5e057f44ee179a49c)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 
3 #include <console/console.h>
4 #include <cbmem.h>
5 #include <device/device.h>
6 #include <device/dram/ddr4.h>
7 #include <string.h>
8 #include <memory_info.h>
9 #include <smbios.h>
10 #include <types.h>
11 
12 enum ddr4_speed_grade {
13 	DDR4_1600,
14 	DDR4_1866,
15 	DDR4_2133,
16 	DDR4_2400,
17 	DDR4_2666,
18 	DDR4_2933,
19 	DDR4_3200
20 };
21 
22 struct ddr4_speed_attr {
23 	uint32_t min_clock_mhz; // inclusive
24 	uint32_t max_clock_mhz; // inclusive
25 	uint32_t reported_mts;
26 };
27 
28 /**
29  * DDR4 speed attributes derived from JEDEC 79-4C tables 169 & 170
30  *
31  * min_clock_mhz = 1000/max_tCk_avg(ns) + 1
32  *                 Adding 1 to make minimum inclusive
33  * max_clock_mhz = 1000/min_tCk_avg(ns)
34  * reported_mts  = Standard reported DDR4 speed in MT/s
35  *                 May be 1 less than the actual max MT/s
36  */
37 static const struct ddr4_speed_attr ddr4_speeds[] = {
38 	[DDR4_1600] = {.min_clock_mhz =  668, .max_clock_mhz =  800, .reported_mts = 1600},
39 	[DDR4_1866] = {.min_clock_mhz =  801, .max_clock_mhz =  934, .reported_mts = 1866},
40 	[DDR4_2133] = {.min_clock_mhz =  935, .max_clock_mhz = 1067, .reported_mts = 2133},
41 	[DDR4_2400] = {.min_clock_mhz = 1068, .max_clock_mhz = 1200, .reported_mts = 2400},
42 	[DDR4_2666] = {.min_clock_mhz = 1201, .max_clock_mhz = 1333, .reported_mts = 2666},
43 	[DDR4_2933] = {.min_clock_mhz = 1334, .max_clock_mhz = 1466, .reported_mts = 2933},
44 	[DDR4_3200] = {.min_clock_mhz = 1467, .max_clock_mhz = 1600, .reported_mts = 3200}
45 };
46 
47 typedef enum {
48 	BLOCK_0, /* Base Configuration and DRAM Parameters */
49 	BLOCK_1,
50 	BLOCK_1_L, /* Standard Module Parameters */
51 	BLOCK_1_H, /* Hybrid Module Parameters */
52 	BLOCK_2,
53 	BLOCK_2_L, /* Hybrid Module Extended Function Parameters */
54 	BLOCK_2_H, /* Manufacturing Information */
55 	BLOCK_3    /* End user programmable */
56 } spd_block_type;
57 
58 typedef struct {
59 	spd_block_type type;
60 	uint16_t start;     /* starting offset from beginning of the spd */
61 	uint16_t len;       /* size of the block */
62 	uint16_t crc_start; /* offset from start of crc bytes, 0 if none */
63 } spd_block;
64 
65 /* 'SPD contents architecture' as per datasheet */
66 const spd_block spd_blocks[] = {
67 	{.type = BLOCK_0, 0, 128, 126},   {.type = BLOCK_1, 128, 128, 126},
68 	{.type = BLOCK_1_L, 128, 64, 0},  {.type = BLOCK_1_H, 192, 64, 0},
69 	{.type = BLOCK_2_L, 256, 64, 62}, {.type = BLOCK_2_H, 320, 64, 0},
70 	{.type = BLOCK_3, 384, 128, 0}
71 };
72 
verify_block(const spd_block * block,spd_ddr4_raw_data spd)73 static bool verify_block(const spd_block *block, spd_ddr4_raw_data spd)
74 {
75 	uint16_t crc, spd_crc;
76 
77 	spd_crc = (spd[block->start + block->crc_start + 1] << 8) |
78 		  spd[block->start + block->crc_start];
79 	crc = ddr_crc16(&spd[block->start], block->len - 2);
80 
81 	return spd_crc == crc;
82 }
83 
84 /* Check if given block is 'reserved' for a given module type */
block_exists(spd_block_type type,u8 dimm_type)85 static bool block_exists(spd_block_type type, u8 dimm_type)
86 {
87 	bool is_hybrid;
88 
89 	switch (type) {
90 	case BLOCK_0: /* fall-through */
91 	case BLOCK_1: /* fall-through */
92 	case BLOCK_1_L: /* fall-through */
93 	case BLOCK_1_H: /* fall-through */
94 	case BLOCK_2_H: /* fall-through */
95 	case BLOCK_3: /* fall-through */
96 		return true;
97 	case BLOCK_2_L:
98 		is_hybrid = (dimm_type >> 4) & ((1 << 3) - 1);
99 		if (is_hybrid)
100 			return true;
101 		return false;
102 	default: /* fall-through */
103 		return false;
104 	}
105 }
106 
107 /**
108  * Converts DDR4 clock speed in MHz to the standard reported speed in MT/s
109  */
ddr4_speed_mhz_to_reported_mts(uint16_t speed_mhz)110 uint16_t ddr4_speed_mhz_to_reported_mts(uint16_t speed_mhz)
111 {
112 	for (enum ddr4_speed_grade speed = 0; speed < ARRAY_SIZE(ddr4_speeds); speed++) {
113 		const struct ddr4_speed_attr *speed_attr = &ddr4_speeds[speed];
114 		if (speed_mhz >= speed_attr->min_clock_mhz &&
115 		    speed_mhz <= speed_attr->max_clock_mhz) {
116 			return speed_attr->reported_mts;
117 		}
118 	}
119 	printk(BIOS_ERR, "DDR4 speed of %d MHz is out of range\n", speed_mhz);
120 	return 0;
121 }
122 
123 /**
124  * \brief Decode the raw SPD data
125  *
126  * Decodes a raw SPD data from a DDR4 DIMM, and organizes it into a
127  * @ref dimm_attr structure. The SPD data must first be read in a contiguous
128  * array, and passed to this function.
129  *
130  * @param dimm pointer to @ref dimm_attr structure where the decoded data is to
131  *	       be stored
132  * @param spd array of raw data previously read from the SPD.
133  *
134  * @return @ref spd_status enumerator
135  *		SPD_STATUS_OK -- decoding was successful
136  *		SPD_STATUS_INVALID -- invalid SPD or not a DDR4 SPD
137  *		SPD_STATUS_CRC_ERROR -- checksum mismatch
138  */
spd_decode_ddr4(struct dimm_attr_ddr4_st * dimm,spd_ddr4_raw_data spd)139 int spd_decode_ddr4(struct dimm_attr_ddr4_st *dimm, spd_ddr4_raw_data spd)
140 {
141 	u8 reg8;
142 	u8 bus_width, sdram_width;
143 	u16 cap_per_die_mbit;
144 	u16 spd_bytes_total, spd_bytes_used;
145 	const uint16_t spd_bytes_used_table[] = {0, 128, 256, 384, 512};
146 
147 	/* Make sure that the SPD dump is indeed from a DDR4 module */
148 	if (spd[2] != SPD_MEMORY_TYPE_DDR4_SDRAM) {
149 		printk(BIOS_ERR, "Not a DDR4 SPD!\n");
150 		dimm->dram_type = SPD_MEMORY_TYPE_UNDEFINED;
151 		return SPD_STATUS_INVALID;
152 	}
153 
154 	spd_bytes_total = (spd[0] >> 4) & 0x7;
155 	spd_bytes_used = spd[0] & 0xf;
156 
157 	if (!spd_bytes_total || !spd_bytes_used) {
158 		printk(BIOS_ERR, "SPD failed basic sanity checks\n");
159 		return SPD_STATUS_INVALID;
160 	}
161 
162 	if (spd_bytes_total >= 3)
163 		printk(BIOS_WARNING, "SPD Bytes Total value is reserved\n");
164 
165 	spd_bytes_total = 256 << (spd_bytes_total - 1);
166 
167 	if (spd_bytes_used > 4) {
168 		printk(BIOS_ERR, "SPD Bytes Used value is reserved\n");
169 		return SPD_STATUS_INVALID;
170 	}
171 
172 	spd_bytes_used = spd_bytes_used_table[spd_bytes_used];
173 
174 	if (spd_bytes_used > spd_bytes_total) {
175 		printk(BIOS_ERR, "SPD Bytes Used is greater than SPD Bytes Total\n");
176 		return SPD_STATUS_INVALID;
177 	}
178 
179 	/* Verify CRC of blocks that have them, do not step over 'used' length */
180 	for (int i = 0; i < ARRAY_SIZE(spd_blocks); i++) {
181 		/* this block is not checksummed */
182 		if (spd_blocks[i].crc_start == 0)
183 			continue;
184 		/* we shouldn't have this block */
185 		if (spd_blocks[i].start + spd_blocks[i].len > spd_bytes_used)
186 			continue;
187 		/* check if block exists in the current schema */
188 		if (!block_exists(spd_blocks[i].type, spd[3]))
189 			continue;
190 		if (!verify_block(&spd_blocks[i], spd)) {
191 			printk(BIOS_ERR, "CRC failed for block %d\n", i);
192 			return SPD_STATUS_CRC_ERROR;
193 		}
194 	}
195 
196 	dimm->dram_type = SPD_MEMORY_TYPE_DDR4_SDRAM;
197 	dimm->dimm_type = spd[3] & ((1 << 4) - 1);
198 
199 	reg8 = spd[13] & ((1 << 4) - 1);
200 	dimm->bus_width = reg8;
201 	bus_width = 8 << (reg8 & ((1 << 3) - 1));
202 
203 	reg8 = spd[12] & ((1 << 3) - 1);
204 	dimm->sdram_width = reg8;
205 	sdram_width = 4 << reg8;
206 
207 	reg8 = spd[4] & ((1 << 4) - 1);
208 	dimm->cap_per_die_mbit = reg8;
209 	cap_per_die_mbit = (1 << reg8) * 256;
210 
211 	reg8 = (spd[12] >> 3) & ((1 << 3) - 1);
212 	dimm->ranks = reg8 + 1;
213 
214 	if (!bus_width || !sdram_width) {
215 		printk(BIOS_ERR, "SPD information is invalid");
216 		dimm->size_mb = 0;
217 		return SPD_STATUS_INVALID;
218 	}
219 
220 	/* seems to be only one, in mV */
221 	dimm->vdd_voltage = 1200;
222 
223 	/* calculate size */
224 	/* FIXME: this is wrong for 3DS devices */
225 	dimm->size_mb = cap_per_die_mbit / 8 * bus_width / sdram_width * dimm->ranks;
226 
227 	dimm->ecc_extension = spd[SPD_PRIMARY_SDRAM_WIDTH] & SPD_ECC_8BIT;
228 
229 	/* make sure we have the manufacturing information block */
230 	if (spd_bytes_used > 320) {
231 		dimm->manufacturer_id = (spd[351] << 8) | spd[350];
232 		memcpy(dimm->part_number, &spd[329], SPD_DDR4_PART_LEN);
233 		dimm->part_number[SPD_DDR4_PART_LEN] = 0;
234 		memcpy(dimm->serial_number, &spd[325], sizeof(dimm->serial_number));
235 	}
236 	return SPD_STATUS_OK;
237 }
238 
spd_add_smbios17_ddr4(const u8 channel,const u8 slot,const u16 selected_freq,const struct dimm_attr_ddr4_st * info)239 enum cb_err spd_add_smbios17_ddr4(const u8 channel, const u8 slot, const u16 selected_freq,
240 				  const struct dimm_attr_ddr4_st *info)
241 {
242 	struct memory_info *mem_info;
243 	struct dimm_info *dimm;
244 
245 	/*
246 	 * Allocate CBMEM area for DIMM information used to populate SMBIOS
247 	 * table 17
248 	 */
249 	mem_info = cbmem_find(CBMEM_ID_MEMINFO);
250 	if (!mem_info) {
251 		mem_info = cbmem_add(CBMEM_ID_MEMINFO, sizeof(*mem_info));
252 
253 		printk(BIOS_DEBUG, "CBMEM entry for DIMM info: %p\n", mem_info);
254 		if (!mem_info)
255 			return CB_ERR;
256 
257 		memset(mem_info, 0, sizeof(*mem_info));
258 	}
259 
260 	if (mem_info->dimm_cnt >= ARRAY_SIZE(mem_info->dimm)) {
261 		printk(BIOS_WARNING, "BUG: Too many DIMM infos for %s.\n", __func__);
262 		return CB_ERR;
263 	}
264 
265 	dimm = &mem_info->dimm[mem_info->dimm_cnt];
266 	if (info->size_mb) {
267 		dimm->ddr_type = MEMORY_TYPE_DDR4;
268 		dimm->ddr_frequency = selected_freq;
269 		dimm->dimm_size = info->size_mb;
270 		dimm->channel_num = channel;
271 		dimm->rank_per_dimm = info->ranks;
272 		dimm->dimm_num = slot;
273 		memcpy(dimm->module_part_number, info->part_number, SPD_DDR4_PART_LEN);
274 		dimm->mod_id = info->manufacturer_id;
275 		dimm->mod_type = info->dimm_type;
276 		dimm->bus_width = info->bus_width;
277 		memcpy(dimm->serial, info->serial_number,
278 		       MIN(sizeof(dimm->serial), sizeof(info->serial_number)));
279 
280 		dimm->vdd_voltage = info->vdd_voltage;
281 		mem_info->dimm_cnt++;
282 	}
283 
284 	return CB_SUCCESS;
285 }
286 
287 /* Returns MRS command */
ddr4_wr_to_mr0_map(u8 wr)288 static uint32_t ddr4_wr_to_mr0_map(u8 wr)
289 {
290 	static const unsigned int enc[] = {0, 1, 2, 3, 4, 5, 7, 6, 8};
291 	int wr_idx = wr/2 - 5;
292 	if (wr_idx < 0 || wr_idx >= ARRAY_SIZE(enc))
293 		die("WR index out of bounds: %d (derived from %d)\n", wr_idx, wr);
294 
295 	return enc[wr_idx] << 9;
296 }
297 
298 /* Returns MRS command */
ddr4_cas_to_mr0_map(u8 cas)299 static uint32_t ddr4_cas_to_mr0_map(u8 cas)
300 {
301 	static const unsigned int enc[] = {
302 		/*
303 		 * The only non-zero bits are at positions (LSB0): 12, 6, 5, 4, 2.
304 		 */
305 		0x0000,		/* CL = 9 */
306 		0x0004,		/* CL = 10 */
307 		0x0010,		/* CL = 11 */
308 		0x0014,		/* CL = 12 */
309 		0x0020,		/* CL = 13 */
310 		0x0024,		/* CL = 14 */
311 		0x0030,		/* CL = 15 */
312 		0x0034,		/* CL = 16 */
313 		0x0064,		/* CL = 17 */
314 		0x0040,		/* CL = 18 */
315 		0x0070,		/* CL = 19 */
316 		0x0044,		/* CL = 20 */
317 		0x0074,		/* CL = 21 */
318 		0x0050,		/* CL = 22 */
319 		0x0060,		/* CL = 23 */
320 		0x0054,		/* CL = 24 */
321 		0x1000,		/* CL = 25 */
322 		0x1004,		/* CL = 26 */
323 		0x1010,		/* CL = 27 (only 3DS) */
324 		0x1014,		/* CL = 28 */
325 		0x1020,		/* reserved for CL = 29 */
326 		0x1024,		/* CL = 30 */
327 		0x1030,		/* reserved for CL = 31 */
328 		0x1034,		/* CL = 32 */
329 	};
330 
331 	int cas_idx = cas - 9;
332 	if (cas_idx < 0 || cas_idx >= ARRAY_SIZE(enc))
333 		die("CAS index out of bounds: %d (derived from %d)\n", cas_idx, cas);
334 
335 	return enc[cas_idx];
336 }
337 
ddr4_get_mr0(u8 write_recovery,enum ddr4_mr0_dll_reset dll_reset,u8 cas,enum ddr4_mr0_burst_type burst_type,enum ddr4_mr0_burst_length burst_length)338 uint32_t ddr4_get_mr0(u8 write_recovery,
339 		      enum ddr4_mr0_dll_reset dll_reset,
340 		      u8 cas,
341 		      enum ddr4_mr0_burst_type burst_type,
342 		      enum ddr4_mr0_burst_length burst_length)
343 {
344 	uint32_t cmd = 0 << 20;
345 
346 	cmd |= ddr4_wr_to_mr0_map(write_recovery);
347 	cmd |= dll_reset << 8;
348 	cmd |= DDR4_MR0_MODE_NORMAL << 7;
349 	cmd |= ddr4_cas_to_mr0_map(cas);
350 	cmd |= burst_type << 3;
351 	cmd |= burst_length << 0;
352 
353 	return cmd;
354 }
355 
ddr4_get_mr1(enum ddr4_mr1_qoff qoff,enum ddr4_mr1_tdqs tdqs,enum ddr4_mr1_rtt_nom rtt_nom,enum ddr4_mr1_write_leveling write_leveling,enum ddr4_mr1_odimp output_drive_impedance,enum ddr4_mr1_additive_latency additive_latency,enum ddr4_mr1_dll dll_enable)356 uint32_t ddr4_get_mr1(enum ddr4_mr1_qoff qoff,
357 		      enum ddr4_mr1_tdqs tdqs,
358 		      enum ddr4_mr1_rtt_nom rtt_nom,
359 		      enum ddr4_mr1_write_leveling write_leveling,
360 		      enum ddr4_mr1_odimp output_drive_impedance,
361 		      enum ddr4_mr1_additive_latency additive_latency,
362 		      enum ddr4_mr1_dll dll_enable)
363 {
364 	uint32_t cmd = 1 << 20;
365 
366 	cmd |= qoff << 12;
367 	cmd |= tdqs << 11;
368 	cmd |= rtt_nom << 8;
369 	cmd |= write_leveling << 7;
370 	cmd |= output_drive_impedance << 1;
371 	cmd |= additive_latency << 3;
372 	cmd |= dll_enable << 0;
373 
374 	return cmd;
375 }
376 
377 /* Returns MRS command */
ddr4_cwl_to_mr2_map(u8 cwl)378 static uint32_t ddr4_cwl_to_mr2_map(u8 cwl)
379 {
380 	/* Encoding is (starting with 0): 9, 10, 11, 12, 14, 16, 18, 20 */
381 	if (cwl < 14)
382 		cwl -= 9;
383 	else
384 		cwl = (cwl - 14) / 2 + 4;
385 
386 	return cwl << 3;
387 }
388 
ddr4_get_mr2(enum ddr4_mr2_wr_crc wr_crc,enum ddr4_mr2_rtt_wr rtt_wr,enum ddr4_mr2_lp_asr self_refresh,u8 cwl)389 uint32_t ddr4_get_mr2(enum ddr4_mr2_wr_crc wr_crc,
390 		      enum ddr4_mr2_rtt_wr rtt_wr,
391 		      enum ddr4_mr2_lp_asr self_refresh, u8 cwl)
392 {
393 	uint32_t cmd = 2 << 20;
394 
395 	cmd |= wr_crc << 12;
396 	cmd |= rtt_wr << 9;
397 	cmd |= self_refresh << 6;
398 	cmd |= ddr4_cwl_to_mr2_map(cwl);
399 
400 	return cmd;
401 }
402 
ddr4_get_mr3(enum ddr4_mr3_mpr_read_format mpr_read_format,enum ddr4_mr3_wr_cmd_lat_crc_dm command_latency_crc_dm,enum ddr4_mr3_fine_gran_ref fine_refresh,enum ddr4_mr3_temp_sensor_readout temp_sensor,enum ddr4_mr3_pda pda,enum ddr4_mr3_geardown_mode geardown,enum ddr4_mr3_mpr_operation mpr_operation,u8 mpr_page)403 uint32_t ddr4_get_mr3(enum ddr4_mr3_mpr_read_format mpr_read_format,
404 		      enum ddr4_mr3_wr_cmd_lat_crc_dm command_latency_crc_dm,
405 		      enum ddr4_mr3_fine_gran_ref fine_refresh,
406 		      enum ddr4_mr3_temp_sensor_readout temp_sensor,
407 		      enum ddr4_mr3_pda pda,
408 		      enum ddr4_mr3_geardown_mode geardown,
409 		      enum ddr4_mr3_mpr_operation mpr_operation,
410 		      u8 mpr_page)
411 {
412 	uint32_t cmd = 3 << 20;
413 
414 	cmd |= mpr_read_format << 11;
415 	cmd |= command_latency_crc_dm << 9;
416 	cmd |= fine_refresh << 6;
417 	cmd |= temp_sensor << 5;
418 	cmd |= pda << 4;
419 	cmd |= geardown << 3;
420 	cmd |= mpr_operation << 2;
421 	cmd |= (mpr_page & 3) << 0;
422 
423 	return cmd;
424 }
425 
ddr4_get_mr4(enum ddr4_mr4_hppr hppr,enum ddr4_mr4_wr_preamble wr_preamble,enum ddr4_mr4_rd_preamble rd_preamble,enum ddr4_mr4_rd_preamble_training rd_preamble_train,enum ddr4_mr4_self_refr_abort self_ref_abrt,enum ddr4_mr4_cs_to_cmd_latency cs2cmd_lat,enum ddr4_mr4_sppr sppr,enum ddr4_mr4_internal_vref_mon int_vref_mon,enum ddr4_mr4_temp_controlled_refr temp_ctrl_ref,enum ddr4_mr4_max_pd_mode max_pd)426 uint32_t ddr4_get_mr4(enum ddr4_mr4_hppr hppr,
427 		      enum ddr4_mr4_wr_preamble wr_preamble,
428 		      enum ddr4_mr4_rd_preamble rd_preamble,
429 		      enum ddr4_mr4_rd_preamble_training rd_preamble_train,
430 		      enum ddr4_mr4_self_refr_abort self_ref_abrt,
431 		      enum ddr4_mr4_cs_to_cmd_latency cs2cmd_lat,
432 		      enum ddr4_mr4_sppr sppr,
433 		      enum ddr4_mr4_internal_vref_mon int_vref_mon,
434 		      enum ddr4_mr4_temp_controlled_refr temp_ctrl_ref,
435 		      enum ddr4_mr4_max_pd_mode max_pd)
436 {
437 	uint32_t cmd = 4 << 20;
438 
439 	cmd |= hppr << 13;
440 	cmd |= wr_preamble << 12;
441 	cmd |= rd_preamble << 11;
442 	cmd |= rd_preamble_train << 10;
443 	cmd |= self_ref_abrt << 9;
444 	cmd |= cs2cmd_lat << 6;
445 	cmd |= sppr << 5;
446 	cmd |= int_vref_mon << 4;
447 	cmd |= temp_ctrl_ref << 2;
448 	cmd |= max_pd << 1;
449 
450 	return cmd;
451 }
452 
ddr4_get_mr5(enum ddr4_mr5_rd_dbi rd_dbi,enum ddr4_mr5_wr_dbi wr_dbi,enum ddr4_mr5_data_mask dm,enum ddr4_mr5_rtt_park rtt_park,enum ddr4_mr5_odt_pd odt_pd,enum ddr4_mr5_ca_parity_lat pl)453 uint32_t ddr4_get_mr5(enum ddr4_mr5_rd_dbi rd_dbi,
454 		      enum ddr4_mr5_wr_dbi wr_dbi,
455 		      enum ddr4_mr5_data_mask dm,
456 		      enum ddr4_mr5_rtt_park rtt_park,
457 		      enum ddr4_mr5_odt_pd odt_pd,
458 		      enum ddr4_mr5_ca_parity_lat pl)
459 {
460 	uint32_t cmd = 5 << 20;
461 
462 	cmd |= rd_dbi << 12;
463 	cmd |= wr_dbi << 11;
464 	cmd |= dm << 10;
465 	cmd |= rtt_park << 6;
466 	cmd |= odt_pd << 5;
467 	cmd |= pl << 0;
468 
469 	return cmd;
470 }
471 
472 /* Returns MRS command */
ddr4_tccd_l_to_mr6_map(u8 tccd_l)473 static uint32_t ddr4_tccd_l_to_mr6_map(u8 tccd_l)
474 {
475 	if (tccd_l < 4 || tccd_l > 8)
476 		die("tCCD_l out of range: %d\n", tccd_l);
477 
478 	return (tccd_l - 4) << 10;
479 }
480 
ddr4_get_mr6(u8 tccd_l,enum ddr4_mr6_vrefdq_training vrefdq_training,enum ddr4_mr6_vrefdq_training_range range,u8 vrefdq_value)481 uint32_t ddr4_get_mr6(u8 tccd_l,
482 		      enum ddr4_mr6_vrefdq_training vrefdq_training,
483 		      enum ddr4_mr6_vrefdq_training_range range,
484 		      u8 vrefdq_value)
485 {
486 	uint32_t cmd = 6 << 20;
487 
488 	cmd |= ddr4_tccd_l_to_mr6_map(tccd_l);
489 	cmd |= vrefdq_training << 7;
490 	cmd |= range << 6;
491 	cmd |= vrefdq_value & 0x3F;
492 
493 	return cmd;
494 }
495 
496 /*
497  * ZQCL: A16 = H, A15 = H, A14 = L, A10 = H, rest either L or H
498  * ZQCS: A16 = H, A15 = H, A14 = L, A10 = L, rest either L or H
499  */
ddr4_get_zqcal_cmd(enum ddr4_zqcal_ls long_short)500 uint32_t ddr4_get_zqcal_cmd(enum ddr4_zqcal_ls long_short)
501 {
502 	uint32_t cmd = 1 << 16 | 1 << 15;
503 
504 	cmd |= long_short << 10;
505 
506 	return cmd;
507 }
508