xref: /aosp_15_r20/external/coreboot/src/northbridge/intel/gm45/gm45.h (revision b9411a12aaaa7e1e6a6fb7c5e057f44ee179a49c)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 
3 #ifndef __NORTHBRIDGE_INTEL_GM45_GM45_H__
4 #define __NORTHBRIDGE_INTEL_GM45_GM45_H__
5 
6 #include <stdint.h>
7 
8 typedef enum {
9 	FSB_CLOCK_1067MHz = 0,
10 	FSB_CLOCK_800MHz  = 1,
11 	FSB_CLOCK_667MHz  = 2,
12 } fsb_clock_t;
13 
14 typedef enum { /* Steppings below B1 were pre-production,
15 		  conversion stepping A1 is a newer GL40 with support for 800 MT/s on FSB/DDR.
16 		  We'll support B1, B2, B3, and conversion stepping A1. */
17 	STEPPING_A0 = 0,
18 	STEPPING_A1 = 1,
19 	STEPPING_A2 = 2,
20 	STEPPING_A3 = 3,
21 	STEPPING_B0 = 4,
22 	STEPPING_B1 = 5,
23 	STEPPING_B2 = 6,
24 	STEPPING_B3 = 7,
25 	STEPPING_CONVERSION_A1 = 9,
26 } stepping_t;
27 
28 typedef enum {
29 	GMCH_GM45 = 0,
30 	GMCH_GM47,
31 	GMCH_GM49,
32 	GMCH_GE45,
33 	GMCH_GL40,
34 	GMCH_GL43,
35 	GMCH_GS40,
36 	GMCH_GS45,
37 	GMCH_PM45,
38 	GMCH_UNKNOWN
39 } gmch_gfx_t;
40 
41 typedef enum {
42 	MEM_CLOCK_533MHz = 0,
43 	MEM_CLOCK_400MHz = 1,
44 	MEM_CLOCK_333MHz = 2,
45 	MEM_CLOCK_1067MT = 0,
46 	MEM_CLOCK_800MT  = 1,
47 	MEM_CLOCK_667MT  = 2,
48 } mem_clock_t;
49 
50 typedef enum {
51 	DDR1 = 1,
52 	DDR2 = 2,
53 	DDR3 = 3,
54 } ddr_t;
55 
56 typedef enum {
57 	CHANNEL_MODE_SINGLE,
58 	CHANNEL_MODE_DUAL_ASYNC,
59 	CHANNEL_MODE_DUAL_INTERLEAVED,
60 } channel_mode_t;
61 
62 typedef enum { /* as in DDR3 spd */
63 	CHIP_WIDTH_x4	= 0,
64 	CHIP_WIDTH_x8	= 1,
65 	CHIP_WIDTH_x16	= 2,
66 	CHIP_WIDTH_x32	= 3,
67 } chip_width_t;
68 
69 typedef enum { /* as in DDR3 spd */
70 	CHIP_CAP_256M	= 0,
71 	CHIP_CAP_512M	= 1,
72 	CHIP_CAP_1G	= 2,
73 	CHIP_CAP_2G	= 3,
74 	CHIP_CAP_4G	= 4,
75 	CHIP_CAP_8G	= 5,
76 	CHIP_CAP_16G	= 6,
77 } chip_capacity_t;
78 
79 typedef enum { /* as in DDR2 spd */
80 	REFRESH_15_625	= 0,
81 	REFRESH_3_9	= 1,
82 	REFRESH_7_8	= 2,
83 	REFRESH_31_25	= 3,
84 	REFRESH_62_5	= 4,
85 	REFRESH_125	= 5,
86 } refresh_rate_t;
87 
88 typedef struct {
89 	unsigned int	CAS;
90 	fsb_clock_t	fsb_clock;
91 	mem_clock_t	mem_clock;
92 	channel_mode_t	channel_mode;
93 	unsigned int	tRAS;
94 	unsigned int	tRP;
95 	unsigned int	tRCD;
96 	unsigned int	tRFC;
97 	unsigned int	tWR;
98 	unsigned int	tRD;
99 	unsigned int	tRRD;
100 	unsigned int	tFAW;
101 	unsigned int	tWL;
102 } timings_t;
103 
104 typedef struct {
105 	unsigned int	card_type; /* 0x0: unpopulated,
106 				      0xa - 0xf: raw card type A - F */
107 	chip_width_t	chip_width;
108 	chip_capacity_t	chip_capacity;
109 	refresh_rate_t	refresh;
110 	unsigned int	page_size; /* of whole DIMM in Bytes (4096 or 8192) */
111 	unsigned int	banks;
112 	unsigned int	ranks;
113 	unsigned int	rank_capacity_mb; /* per rank in Megabytes */
114 } dimminfo_t;
115 
116 /* The setup is one DIMM per channel, so there's no need to find a
117    common timing setup between multiple chips (but chip and controller
118    still need to be coordinated */
119 typedef struct {
120 	stepping_t	stepping;
121 	int		txt_enabled;
122 	int		cores;
123 	gmch_gfx_t	gfx_type;
124 	int		max_ddr2_mt;
125 	int		max_ddr3_mt;
126 	fsb_clock_t	max_fsb;
127 	int		max_fsb_mhz;
128 	int		max_render_mhz;
129 	int		enable_igd;
130 	int		enable_peg;
131 	u16		ggc;
132 
133 	/* to be filled in romstage main: */
134 	int		spd_type;
135 	timings_t	selected_timings;
136 	dimminfo_t	dimms[2];
137 	u8		spd_map[4];
138 	int		gs45_low_power_mode; /* low power mode of GMCH_GS45 */
139 	int		sff; /* small form factor option (soldered down DIMM) */
140 } sysinfo_t;
141 
142 #define TOTAL_CHANNELS 2
143 #define CHANNEL_IS_POPULATED(dimms, idx) (dimms[idx].card_type != 0)
144 #define CHANNEL_IS_CARDF(dimms, idx) (dimms[idx].card_type == 0xf)
145 #define IF_CHANNEL_POPULATED(dimms, idx) if (dimms[idx].card_type != 0)
146 #define FOR_EACH_CHANNEL(idx) \
147 	for (idx = 0; idx < TOTAL_CHANNELS; ++idx)
148 #define FOR_EACH_POPULATED_CHANNEL(dimms, idx) \
149 	FOR_EACH_CHANNEL(idx) IF_CHANNEL_POPULATED(dimms, idx)
150 
151 #define RANKS_PER_CHANNEL 4 /* Only two may be populated */
152 #define IF_RANK_POPULATED(dimms, ch, r) \
153 	if (dimms[ch].card_type && ((r) < dimms[ch].ranks))
154 #define FOR_EACH_RANK_IN_CHANNEL(r) \
155 	for (r = 0; r < RANKS_PER_CHANNEL; ++r)
156 #define FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r) \
157 	FOR_EACH_RANK_IN_CHANNEL(r) IF_RANK_POPULATED(dimms, ch, r)
158 #define FOR_EACH_RANK(ch, r) \
159 	FOR_EACH_CHANNEL(ch) FOR_EACH_RANK_IN_CHANNEL(r)
160 #define FOR_EACH_POPULATED_RANK(dimms, ch, r) \
161 	FOR_EACH_RANK(ch, r) IF_RANK_POPULATED(dimms, ch, r)
162 
163 #define DDR3_MAX_CAS 18
164 
165 enum {
166 	VCO_2666 = 4,
167 	VCO_3200 = 0,
168 	VCO_4000 = 1,
169 	VCO_5333 = 2,
170 };
171 
172 /* Offsets of read/write training results in CMOS.
173    They will be restored upon S3 resumes. */
174 #define CMOS_READ_TRAINING	0x80 /* 16 bytes */
175 #define CMOS_WRITE_TRAINING	0x90 /* 16 bytes (could be reduced to 10 bytes) */
176 
177 #include "memmap.h"
178 
179 /*
180  * D0:F0
181  */
182 #define D0F0_EPBAR_LO		0x40
183 #define D0F0_EPBAR_HI		0x44
184 #define D0F0_MCHBAR_LO		0x48
185 #define D0F0_MCHBAR_HI		0x4c
186 #define D0F0_GGC		0x52
187 #define D0F0_DEVEN		0x54
188 #define D0F0_PCIEXBAR_LO	0x60
189 #define D0F0_PCIEXBAR_HI	0x64
190 #define D0F0_DMIBAR_LO		0x68
191 #define D0F0_DMIBAR_HI		0x6c
192 #define D0F0_PMBASE		0x78
193 #define D0F0_PAM(x)		(0x90 + (x)) /* 0-6 */
194 #define D0F0_REMAPBASE		0x98
195 #define D0F0_REMAPLIMIT		0x9a
196 #define D0F0_SMRAM		0x9d
197 #define D0F0_ESMRAMC		0x9e
198 #define D0F0_TOM		0xa0
199 #define D0F0_TOUUD		0xa2
200 #define D0F0_TOLUD		0xb0
201 #define D0F0_SKPD		0xdc /* Scratchpad Data */
202 #define D0F0_CAPID0		0xe0
203 
204 /*
205  * D1:F0 PEG
206  */
207 #define PEG_CAP			0xa2
208 #define SLOTCAP			0xb4
209 #define PEGLC			0xec
210 #define D1F0_VCCAP		0x104
211 #define D1F0_VC0RCTL		0x114
212 
213 /*
214  * Graphics frequencies
215  */
216 #define GCFGC_PCIDEV		PCI_DEV(0, 2, 0)
217 #define GCFGC_OFFSET		0xf0
218 #define GCFGC_CR_SHIFT		0
219 #define GCFGC_CR_MASK		(0xf << GCFGC_CR_SHIFT)
220 #define GCFGC_CS_SHIFT		8
221 #define GCFGC_CS_MASK		(0xf << GCFGC_CS_SHIFT)
222 #define GCFGC_CD_SHIFT		12
223 #define GCFGC_CD_MASK		(0x1 << GCFGC_CD_SHIFT)
224 #define GCFGC_UPDATE_SHIFT	5
225 #define GCFGC_UPDATE		(0x1 << GCFGC_UPDATE_SHIFT)
226 
227 /*
228  * MCHBAR
229  */
230 
231 #include <northbridge/intel/common/fixed_bars.h>
232 
233 #define HPLLVCO_MCHBAR		0x0c0f
234 
235 #define PMSTS_MCHBAR		0x0f14	/* Self refresh channel status */
236 #define PMSTS_WARM_RESET	(1 << 1)
237 #define PMSTS_BOTH_SELFREFRESH	(1 << 0)
238 
239 #define CLKCFG_MCHBAR		0x0c00
240 #define CLKCFG_FSBCLK_SHIFT	0
241 #define CLKCFG_FSBCLK_MASK	(7 << CLKCFG_FSBCLK_SHIFT)
242 #define CLKCFG_MEMCLK_SHIFT	4
243 #define CLKCFG_MEMCLK_MASK	(7 << CLKCFG_MEMCLK_SHIFT)
244 #define CLKCFG_UPDATE		(1 << 12)
245 
246 #define SSKPD_MCHBAR		0x0c1c
247 #define SSKPD_CLK_SHIFT		0
248 #define SSKPD_CLK_MASK		(7 << SSKPD_CLK_SHIFT)
249 
250 #define DCC_MCHBAR		0x200
251 #define DCC_NO_CHANXOR		(1 << 10)
252 #define DCC_INTERLEAVED		(1 <<  1)
253 #define DCC_CMD_SHIFT		16
254 #define DCC_CMD_MASK		(7 << DCC_CMD_SHIFT)
255 #define DCC_CMD_NOP		(1 << DCC_CMD_SHIFT)
256 #define DCC_CMD_ABP		(2 << DCC_CMD_SHIFT)
257 				/* For mode register mr0: */
258 #define DCC_SET_MREG		(3 << DCC_CMD_SHIFT)
259 				/* For extended mode registers mr1 to mr3: */
260 #define DCC_SET_EREG		(4 << DCC_CMD_SHIFT)
261 #define DCC_SET_EREG_SHIFT	21
262 #define DCC_SET_EREG_MASK	(DCC_CMD_MASK | (3 << DCC_SET_EREG_SHIFT))
263 #define DCC_SET_EREGx(x)	((DCC_SET_EREG |			     \
264 					(((x) - 1) << DCC_SET_EREG_SHIFT)) & \
265 				 DCC_SET_EREG_MASK)
266 #define DCC_CMD_CBR		(6 << DCC_CMD_SHIFT)
267 
268 /* Per channel DRAM Row Attribute registers (32-bit) */
269 #define CxDRA_MCHBAR(x)		(0x1208 + ((x) * 0x0100))
270 #define CxDRA_PAGESIZE_SHIFT(r)	((r) * 4)	/* Per rank r */
271 #define CxDRA_PAGESIZE_MASKr(r)	(0x7 << CxDRA_PAGESIZE_SHIFT(r))
272 #define CxDRA_PAGESIZE_MASK	0x0000ffff
273 #define CxDRA_PAGESIZE(r, p)	/* for log2(dimm page size in bytes) p */ \
274 	((((p) - 10) << CxDRA_PAGESIZE_SHIFT(r)) & CxDRA_PAGESIZE_MASKr(r))
275 #define CxDRA_BANKS_SHIFT(r)	(((r) * 3) + 16)
276 #define CxDRA_BANKS_MASKr(r)	(0x3 << CxDRA_BANKS_SHIFT(r))
277 #define CxDRA_BANKS_MASK	0x07ff0000
278 #define CxDRA_BANKS(r, b)	/* for number of banks b */ \
279 	(((b) << (CxDRA_BANKS_SHIFT(r) - 3)) & CxDRA_BANKS_MASKr(r))
280 
281 /*
282  * Per channel DRAM Row Boundary registers (32-bit)
283  * Every two ranks share one register and must be programmed at the same time.
284  * All registers (4 ranks per channel) have to be set.
285  */
286 #define CxDRBy_MCHBAR(x, r)	(0x1200 + ((x) * 0x0100) + (((r) / 2) * 4))
287 #define CxDRBy_BOUND_SHIFT(r)	(((r) % 2) * 16)
288 #define CxDRBy_BOUND_MASK(r)	(0x1fc << CxDRBy_BOUND_SHIFT(r))
289 #define CxDRBy_BOUND_MB(r, b)	/* for boundary in MB b */ \
290 	((((b) >> 5) << CxDRBy_BOUND_SHIFT(r)) & CxDRBy_BOUND_MASK(r))
291 
292 #define CxDRC0_MCHBAR(x)	(0x1230 + ((x) * 0x0100))
293 #define CxDRC0_RANKEN0		(1 << 24)	/* Rank Enable */
294 #define CxDRC0_RANKEN1		(1 << 25)
295 #define CxDRC0_RANKEN2		(1 << 26)
296 #define CxDRC0_RANKEN3		(1 << 27)
297 #define CxDRC0_RANKEN(r)	(1 << (24 + (r)))
298 #define CxDRC0_RANKEN_MASK	(0xf << 24)
299 #define CxDRC0_RMS_SHIFT	8		/* Refresh Mode Select */
300 #define CxDRC0_RMS_MASK		(7 << CxDRC0_RMS_SHIFT)
301 #define CxDRC0_RMS_78US		(2 << CxDRC0_RMS_SHIFT)
302 #define CxDRC0_RMS_39US		(3 << CxDRC0_RMS_SHIFT)
303 
304 #define CxDRC1_MCHBAR(x)	(0x1234 + ((x) * 0x0100))
305 #define CxDRC1_SSDS_SHIFT	24
306 #define CxDRC1_SSDS_MASK	(0xff << CxDRC1_SSDS_SHIFT)
307 #define CxDRC1_DS		(0x91 << CxDRC1_SSDS_SHIFT)
308 #define CxDRC1_SS		(0xb1 << CxDRC1_SSDS_SHIFT)
309 #define CxDRC1_NOTPOP(r)	(1 << (16 + (r))) /* Write 1 for Not Populated */
310 #define CxDRC1_NOTPOP_MASK	(0xf << 16)
311 #define CxDRC1_MUSTWR		(3 << 11)
312 
313 #define CxDRC2_MCHBAR(x)	(0x1238 + ((x) * 0x0100))
314 #define CxDRC2_NOTPOP(r)	(1 << (24 + (r))) /* Write 1 for Not Populated */
315 #define CxDRC2_NOTPOP_MASK	(0xf << 24)
316 #define CxDRC2_MUSTWR		(1 << 12)
317 #define CxDRC2_CLK1067MT	(1 << 0)
318 
319 /* DRAM Timing registers (32-bit each) */
320 #define CxDRT0_MCHBAR(x)	(0x1210 + ((x) * 0x0100))
321 #define CxDRT0_BtB_WtP_SHIFT	26
322 #define CxDRT0_BtB_WtP_MASK	(0x1f << CxDRT0_BtB_WtP_SHIFT)
323 #define CxDRT0_BtB_WtR_SHIFT	20
324 #define CxDRT0_BtB_WtR_MASK	(0x1f << CxDRT0_BtB_WtR_SHIFT)
325 #define CxDRT1_MCHBAR(x)	(0x1214 + ((x) * 0x0100))
326 #define CxDRT2_MCHBAR(x)	(0x1218 + ((x) * 0x0100))
327 #define CxDRT3_MCHBAR(x)	(0x121c + ((x) * 0x0100))
328 #define CxDRT4_MCHBAR(x)	(0x1220 + ((x) * 0x0100))
329 #define CxDRT5_MCHBAR(x)	(0x1224 + ((x) * 0x0100))
330 #define CxDRT6_MCHBAR(x)	(0x1228 + ((x) * 0x0100))
331 
332 /* Clock disable registers (32-bit each) */
333 #define CxDCLKDIS_MCHBAR(x)	(0x120c + ((x) * 0x0100))
334 #define CxDCLKDIS_MASK		3
335 #define CxDCLKDIS_ENABLE	3 /* Always enable both clock pairs. */
336 
337 /* On-Die-Termination registers (2x 32-bit per channel) */
338 #define CxODT_HIGH(x)		(0x124c + ((x) * 0x0100))
339 #define CxODT_LOW(x)		(0x1248 + ((x) * 0x0100))
340 
341 /* Write Training registers. */
342 #define CxWRTy_MCHBAR(ch, s)	(0x1470 + ((ch) * 0x0100) + ((3 - (s)) * 4))
343 
344 #define CxGTEW(x)		(0x1270 + ((x) * 0x100))
345 #define CxGTC(x)		(0x1274 + ((x) * 0x100))
346 #define CxDTPEW(x)		(0x1278 + ((x) * 0x100))
347 #define CxDTAEW(x)		(0x1280 + ((x) * 0x100))
348 #define CxDTC(x)		(0x1288 + ((x) * 0x100))
349 
350 
351 /*
352  * DMIBAR
353  */
354 
355 #define DMIVCECH	0x000	/* 32bit */
356 #define DMIPVCCAP1	0x004	/* 32bit */
357 
358 #define DMIVC0RCAP	0x010	/* 32bit */
359 #define DMIVC0RCTL	0x014	/* 32bit */
360 #define DMIVC0RSTS	0x01a	/* 16bit */
361 #define  VC0NP		(1 << 1)
362 
363 #define DMIVC1RCAP	0x01c	/* 32bit */
364 #define DMIVC1RCTL	0x020	/* 32bit */
365 #define DMIVC1RSTS	0x026	/* 16bit */
366 #define  VC1NP		(1 << 1)
367 
368 #define DMIESD		0x044	/* 32bit */
369 
370 #define DMILE1D		0x050	/* 32bit */
371 #define DMILE1A		0x058	/* 64bit */
372 #define DMILE2D		0x060	/* 32bit */
373 #define DMILE2A		0x068	/* 64bit */
374 
375 #define DMILCAP		0x084	/* 32bit */
376 #define DMILCTL		0x088	/* 16bit */
377 #define DMILSTS		0x08a	/* 16bit */
378 
379 /*
380  * EPBAR
381  */
382 
383 #define EPPVCCAP1	0x004	/* 32bit */
384 #define EPPVCCTL	0x00c	/* 32bit */
385 
386 #define EPVC0RCAP	0x010	/* 32bit */
387 #define EPVC0RCTL	0x014	/* 32bit */
388 #define EPVC0RSTS	0x01a	/* 16bit */
389 
390 #define EPVC1RCAP	0x01c	/* 32bit */
391 #define EPVC1RCTL	0x020	/* 32bit */
392 #define EPVC1RSTS	0x026	/* 16bit */
393 
394 #define EPVC1MTS	0x028	/* 32bit */
395 #define EPVC1ITC	0x02c	/* 32bit */
396 
397 #define EPVC1IST	0x038	/* 64bit */
398 
399 #define EPESD		0x044	/* 32bit */
400 
401 #define EPLE1D		0x050	/* 32bit */
402 #define EPLE1A		0x058	/* 64bit */
403 #define EPLE2D		0x060	/* 32bit */
404 #define EPLE2A		0x068	/* 64bit */
405 
406 #define EP_PORTARB(x)	(0x100 + 4 * (x))	/* 256bit */
407 
408 void gm45_early_init(void);
409 void gm45_early_reset(void);
410 
411 void enter_raminit_or_reset(void);
412 void get_gmch_info(sysinfo_t *);
413 void raminit(sysinfo_t *, int s3resume);
414 void raminit_thermal(const sysinfo_t *);
415 void setup_sdram_meminfo(const sysinfo_t *);
416 void init_igd(const sysinfo_t *const);
417 void init_pm(const sysinfo_t *, int do_freq_scaling_cfg);
418 void igd_compute_ggc(sysinfo_t *const sysinfo);
419 
420 int raminit_read_vco_index(void);
421 u32 raminit_get_rank_addr(unsigned int channel, unsigned int rank);
422 
423 void raminit_rcomp_calibration(stepping_t stepping);
424 void raminit_reset_readwrite_pointers(void);
425 void raminit_receive_enable_calibration(int ddr_type, const timings_t *, const dimminfo_t *);
426 void raminit_write_training(const mem_clock_t, const dimminfo_t *, int s3resume);
427 void raminit_read_training(const dimminfo_t *, int s3resume);
428 
429 void gm45_late_init(stepping_t);
430 
431 u32 decode_igd_memory_size(u32 gms);
432 u32 decode_igd_gtt_size(u32 gsm);
433 u32 decode_tseg_size(u8 esmramc);
434 
435 void init_iommu(void);
436 
437 /* romstage mainboard hookups */
438 void mb_setup_superio(void); /* optional */
439 void get_mb_spd_addrmap(u8 spd_addrmap[4]);
440 void mb_pre_raminit_setup(sysinfo_t *); /* optional */
441 void mb_post_raminit_setup(void); /* optional */
442 
443 struct blc_pwm_t {
444 	char ascii_string[13];
445 	int pwm_freq; /* In Hz */
446 };
447 int get_blc_values(const struct blc_pwm_t **entries);
448 u16 get_blc_pwm_freq_value(void);
449 
450 #include <device/device.h>
451 #include <edid.h>
452 
453 struct acpi_rsdp;
454 unsigned long northbridge_write_acpi_tables(const struct device *device, unsigned long start,
455 						struct acpi_rsdp *rsdp);
456 
457 #endif /* __NORTHBRIDGE_INTEL_GM45_GM45_H__ */
458