1 /*==============================================================================
2 Copyright(c) 2024 Intel Corporation
3 Permission is hereby granted, free of charge, to any person obtaining a
4 copy of this software and associated documentation files(the "Software"),
5 to deal in the Software without restriction, including without limitation
6 the rights to use, copy, modify, merge, publish, distribute, sublicense,
7 and / or sell copies of the Software, and to permit persons to whom the
8 Software is furnished to do so, subject to the following conditions:
9
10 The above copyright notice and this permission notice shall be included
11 in all copies or substantial portions of the Software.
12
13 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
14 OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
17 OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
18 ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
19 OTHER DEALINGS IN THE SOFTWARE.
20 ============================================================================*/
21 #include "Internal/Common/GmmLibInc.h"
22 #include "External/Common/GmmCachePolicy.h"
23 #include "External/Common/CachePolicy/GmmCachePolicyXe2_LPG.h"
24 //=============================================================================
25 //
26 // Function: GmmXe2_LPGCachePolicy::InitCachePolicy()
27 //
28 // Desc: This function initializes the Xe2 cache policy
29 //
30 // Return: GMM_STATUS
31 //
32 //-----------------------------------------------------------------------------
InitCachePolicy()33 GMM_STATUS GmmLib::GmmXe2_LPGCachePolicy::InitCachePolicy()
34 {
35 __GMM_ASSERTPTR(pCachePolicy, GMM_ERROR);
36
37 #define DEFINE_CACHE_ELEMENT(usage, l3_cc, l3_clos, l1cc, l2cc, l4cc, coherency, igPAT, segov) DEFINE_CP_ELEMENT(usage, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, segov, 0, 0, l1cc, l2cc, l4cc, coherency, l3_cc, l3_clos, igPAT)
38
39 #include "GmmXe2_LPGCachePolicy.h"
40
41 SetUpMOCSTable();
42 SetupPAT();
43
44 // Define index of cache element
45 uint32_t Usage = 0;
46 uint32_t ReservedPATIdx = 13; /* Rsvd PAT section 13-19 */
47
48 #if (_WIN32 && (_DEBUG || _RELEASE_INTERNAL))
49 void *pKmdGmmContext = NULL;
50 #if (defined(__GMM_KMD__))
51 pKmdGmmContext = pGmmLibContext->GetGmmKmdContext();
52 #endif
53 OverrideCachePolicy(pKmdGmmContext);
54 #endif
55 // Process the cache policy and fill in the look up table
56 for (; Usage < GMM_RESOURCE_USAGE_MAX; Usage++)
57 {
58 bool CachePolicyError = false;
59 int32_t PATIdx = -1, CPTblIdx = -1, PATIdxCompressed = -1, CoherentPATIdx = -1;
60 uint32_t i, j;
61 GMM_XE2_PRIVATE_PAT UsagePATElement = {0};
62 GMM_CACHE_POLICY_TBL_ELEMENT UsageEle = {0};
63 GMM_PTE_CACHE_CONTROL_BITS PTE = {0};
64
65 // MOCS data
66 {
67
68 // Get L3 ,L4 and Convert GMM indicative values to actual regiser values.
69 GetL3L4(&UsageEle, &UsagePATElement, Usage);
70 // Convert L1 GMM indicative values to actual regiser values and store into pCachePolicy to return to UMD's.
71 SetL1CachePolicy(Usage);
72
73 if ((!pGmmLibContext->GetSkuTable().FtrL3TransientDataFlush) && (UsageEle.L3.PhysicalL3.L3CC == GMM_GFX_PHY_L3_MT_WB_XD))
74 {
75 UsageEle.L3.PhysicalL3.L3CC = GMM_GFX_PHY_L3_MT_WB; // No Transient Flush Support
76 }
77
78 /* If MOCS is not needed fall back to Defer to PAT i.e MOCS#0 */
79 if (false == UsageEle.L3.PhysicalL3.igPAT)
80 {
81 /* Set cache policy index to defered to PAT i.e. MOCS Index 0 */
82 CPTblIdx = 0;
83 }
84 else
85 {
86 /* MOCS Index 1-3 are valid */
87 for (j = 1; j <= CurrentMaxMocsIndex; j++)
88 {
89 GMM_CACHE_POLICY_TBL_ELEMENT *TblEle = &pGmmLibContext->GetCachePolicyTlbElement()[j];
90 if (UsageEle.L3.PhysicalL3.L4CC == TblEle->L3.PhysicalL3.L4CC &&
91 UsageEle.L3.PhysicalL3.L3CC == TblEle->L3.PhysicalL3.L3CC &&
92 UsageEle.L3.PhysicalL3.L3CLOS == TblEle->L3.PhysicalL3.L3CLOS &&
93 UsageEle.L3.PhysicalL3.igPAT == true)
94 {
95 CPTblIdx = j;
96 break;
97 }
98 }
99 }
100
101 if (CPTblIdx == -1)
102 {
103
104 {
105 /* Invalid MOCS setting Fail the GMM Initialzation */
106 GMM_ASSERTDPF(false, "CRITICAL: Cache Policy Usage value for L3/L4 specified by Client is not defined in Fixed MOCS Table");
107 CachePolicyError = true;
108
109 }
110 }
111 }
112
113 /*
114 Validate Caching restrictions as below
115 1. MemoryType WB-XD must be used in Non-Coherent and allowed only for displayable surfaces
116 2. Coherent mode(1-way/2-way) must be Memory Type WB
117 3. No 2-way coherency on dGPU
118 4. Memory Type WT is available only for L4 in Non Coherent Mode
119 5. Memory Type UC must be used in Non-Coherent Mode
120 */
121
122 // PAT data
123 {
124 if (!pGmmLibContext->GetSkuTable().FtrL3TransientDataFlush && (UsagePATElement.Xe2.L3CC == GMM_GFX_PHY_L3_MT_WB_XD))
125 {
126 UsagePATElement.Xe2.L3CC = GMM_GFX_PHY_L3_MT_WB; // No Transient Flush Support
127 }
128
129 /* Find a PATIndex from the PAT table for uncompressed case*/
130 if ((UsagePATElement.Xe2.L4CC == GMM_GFX_PHY_L4_MT_WT) && (UsagePATElement.Xe2.L3CC == GMM_GFX_PHY_L3_MT_WB_XD))
131 {
132
133 // With L3:XD, L4:WT, NC combination
134 if (pGmmLibContext->GetSkuTable().FtrDiscrete)
135 {
136 // On BMG, L4 is a pass through, demote L4 to UC, keep L3 at XD
137 PATIdx = PAT6;
138 }
139 else
140 {
141 // On LNL, L3:XD is not needed
142 PATIdx = PAT13;
143 }
144 }
145 else
146 {
147 for (i = 0; i <= CurrentMaxPATIndex; i++)
148 {
149 GMM_PRIVATE_PAT PAT = GetPrivatePATEntry(i);
150 if (UsagePATElement.Xe2.L4CC == PAT.Xe2.L4CC &&
151 UsagePATElement.Xe2.Coherency == PAT.Xe2.Coherency &&
152 UsagePATElement.Xe2.L3CC == PAT.Xe2.L3CC &&
153 UsagePATElement.Xe2.L3CLOS == PAT.Xe2.L3CLOS &&
154 false == PAT.Xe2.LosslessCompressionEn)
155 {
156 PATIdx = i;
157 break;
158 }
159 }
160 }
161
162 /* Find a PATIndex from the PAT table for compressed case*/
163 for (i = 0; i <= CurrentMaxPATIndex; i++)
164 {
165 GMM_PRIVATE_PAT PAT = GetPrivatePATEntry(i);
166 if (UsagePATElement.Xe2.L4CC == PAT.Xe2.L4CC &&
167 UsagePATElement.Xe2.Coherency == PAT.Xe2.Coherency &&
168 UsagePATElement.Xe2.L3CC == PAT.Xe2.L3CC &&
169 UsagePATElement.Xe2.L3CLOS == PAT.Xe2.L3CLOS &&
170 true == PAT.Xe2.LosslessCompressionEn)
171 {
172 PATIdxCompressed = i;
173 break;
174 }
175 }
176
177 if (PATIdx == -1)
178 {
179 // Didn't find the caching settings in one of the already programmed PAT table entries.
180 // Need to add a new lookup table entry.
181 GMM_ASSERTDPF(
182 "Cache Policy Init Error: Invalid Cache Programming, too many unique caching combinations"
183 "(we only support NumPATRegisters = %d)",
184 CurrentMaxPATIndex);
185 CachePolicyError = true;
186
187 PATIdx = GMM_PAT_ERROR;
188 }
189
190 /* Find a PATIndex for a coherent uncompressed case, if usage is 2-way or 1-way already, take that, otherwise search for oneway*/
191 if ((UsagePATElement.Xe2.Coherency == GMM_GFX_PHY_COHERENT_ONE_WAY_IA_SNOOP) ||
192 (UsagePATElement.Xe2.Coherency == GMM_GFX_PHY_COHERENT_TWO_WAY_IA_GPU_SNOOP))
193 {
194 //Already coherent
195 CoherentPATIdx = PATIdx;
196 }
197 else
198 {
199 // search for equivalent one way coherent index
200 for (i = 0; i <= CurrentMaxPATIndex; i++)
201 {
202 GMM_PRIVATE_PAT PAT = GetPrivatePATEntry(i);
203 if (UsagePATElement.Xe2.L4CC == PAT.Xe2.L4CC &&
204 UsagePATElement.Xe2.L3CC == PAT.Xe2.L3CC &&
205 UsagePATElement.Xe2.L3CLOS == PAT.Xe2.L3CLOS &&
206 GMM_GFX_PHY_COHERENT_ONE_WAY_IA_SNOOP == PAT.Xe2.Coherency)
207 {
208 if ((false == PAT.Xe2.LosslessCompressionEn) && (CoherentPATIdx == -1))
209 {
210 CoherentPATIdx = i;
211 }
212 if (CoherentPATIdx != -1)
213 {
214 break;
215 }
216 }
217 }
218 if (CoherentPATIdx == -1)
219 {
220 //redo matching based on L3:UC, L4:UC, we should find one
221 for (i = 0; i <= CurrentMaxPATIndex; i++)
222 {
223 GMM_PRIVATE_PAT PAT = GetPrivatePATEntry(i);
224 if (GMM_GFX_PHY_L4_MT_UC == PAT.Xe2.L4CC &&
225 GMM_GFX_PHY_L3_MT_UC == PAT.Xe2.L3CC &&
226 UsagePATElement.Xe2.L3CLOS == PAT.Xe2.L3CLOS &&
227 GMM_GFX_PHY_COHERENT_ONE_WAY_IA_SNOOP == PAT.Xe2.Coherency)
228 {
229 if ((false == PAT.Xe2.LosslessCompressionEn) && (CoherentPATIdx == -1))
230 {
231 CoherentPATIdx = i;
232 }
233
234 if (CoherentPATIdx != -1)
235 {
236 break;
237 }
238 }
239 }
240 }
241 }
242 }
243
244 pCachePolicy[Usage].PATIndex = PATIdx;
245 pCachePolicy[Usage].CoherentPATIndex = GET_COHERENT_PATINDEX_LOWER_BITS(CoherentPATIdx); // Coherent uncompressed lower bits
246 pCachePolicy[Usage].CoherentPATIndexHigherBit = GET_COHERENT_PATINDEX_HIGHER_BIT(CoherentPATIdx); // Coherent uncompressed higher bits
247 pCachePolicy[Usage].PATIndexCompressed = PATIdxCompressed;
248 pCachePolicy[Usage].PTE.DwordValue = GMM_GET_PTE_BITS_FROM_PAT_IDX(PATIdx) & 0xFFFFFFFF;
249 pCachePolicy[Usage].PTE.HighDwordValue = GMM_GET_PTE_BITS_FROM_PAT_IDX(PATIdx) >> 32;
250 pCachePolicy[Usage].MemoryObjectOverride.XE_HP.Index = CPTblIdx;
251 pCachePolicy[Usage].MemoryObjectOverride.XE_HP.EncryptedData = 0;
252 pCachePolicy[Usage].Override = ALWAYS_OVERRIDE;
253
254
255 if (CachePolicyError)
256 {
257 GMM_ASSERTDPF(false, "Cache Policy Init Error: Invalid Cache Programming ");
258
259 return GMM_INVALIDPARAM;
260 }
261 }
262 return GMM_SUCCESS;
263 }
264
265 //=============================================================================
266 //
267 // Function: __:GetL3L4
268 //
269 // Desc: This function // converting GMM indicative values to actual register values
270 //
271 // Parameters:
272 //
273 // Return: GMM_STATUS
274 //
275 //-----------------------------------------------------------------------------
276
GetL3L4(GMM_CACHE_POLICY_TBL_ELEMENT * pUsageEle,GMM_XE2_PRIVATE_PAT * pUsagePATElement,uint32_t Usage)277 void GmmLib::GmmXe2_LPGCachePolicy::GetL3L4(GMM_CACHE_POLICY_TBL_ELEMENT *pUsageEle, GMM_XE2_PRIVATE_PAT *pUsagePATElement, uint32_t Usage)
278 {
279
280 //MOCS
281 pUsageEle->L3.PhysicalL3.Reserved0 = pUsageEle->L3.PhysicalL3.Reserved = 0;
282 //L3CLOS
283 pUsageEle->L3.PhysicalL3.L3CLOS = 0;
284 //IgPAT
285 pUsageEle->L3.PhysicalL3.igPAT = pCachePolicy[Usage].IgnorePAT;
286
287
288 //PAT
289 pUsagePATElement->Xe2.Reserved1 = 0;
290 pUsagePATElement->Xe2.Reserved2 = 0;
291
292 pUsagePATElement->Xe2.L3CLOS = 0;
293 switch (pCachePolicy[Usage].L3CC)
294 {
295 case GMM_UC:
296 pUsageEle->L3.PhysicalL3.L3CC = GMM_GFX_PHY_L3_MT_UC;
297 pUsagePATElement->Xe2.L3CC = GMM_GFX_PHY_L3_MT_UC;
298 break;
299 case GMM_WB:
300 pUsageEle->L3.PhysicalL3.L3CC = GMM_GFX_PHY_L3_MT_WB;
301 pUsagePATElement->Xe2.L3CC = GMM_GFX_PHY_L3_MT_WB;
302 break;
303 case GMM_WBTD:
304 pUsageEle->L3.PhysicalL3.L3CC = GMM_GFX_PHY_L3_MT_WB_XD; // Transient:Display on Xe2
305 pUsagePATElement->Xe2.L3CC = GMM_GFX_PHY_L3_MT_WB_XD;
306 break;
307 default:
308 pUsageEle->L3.PhysicalL3.L3CC = GMM_GFX_PHY_L3_MT_UC;
309 pUsagePATElement->Xe2.L3CC = GMM_GFX_PHY_L3_MT_UC;
310 }
311
312 switch (pCachePolicy[Usage].L4CC)
313 {
314 case GMM_UC:
315 pUsageEle->L3.PhysicalL3.L4CC = GMM_GFX_PHY_L4_MT_UC;
316 pUsagePATElement->Xe2.L4CC = GMM_GFX_PHY_L4_MT_UC;
317 break;
318 case GMM_WB:
319 pUsageEle->L3.PhysicalL3.L4CC = GMM_GFX_PHY_L4_MT_WB;
320 pUsagePATElement->Xe2.L4CC = GMM_GFX_PHY_L4_MT_WB;
321 break;
322 case GMM_WT:
323 pUsageEle->L3.PhysicalL3.L4CC = GMM_GFX_PHY_L4_MT_WT;
324 pUsagePATElement->Xe2.L4CC = GMM_GFX_PHY_L4_MT_WT;
325 break;
326 default:
327 pUsageEle->L3.PhysicalL3.L4CC = GMM_GFX_PHY_L4_MT_UC;
328 pUsagePATElement->Xe2.L4CC = GMM_GFX_PHY_L4_MT_UC;
329 }
330
331 switch (pCachePolicy[Usage].Coherency)
332 {
333 case GMM_NON_COHERENT_NO_SNOOP:
334 pUsagePATElement->Xe2.Coherency = GMM_GFX_NON_COHERENT_NO_SNOOP;
335 break;
336 case GMM_COHERENT_ONE_WAY_IA_SNOOP:
337 pUsagePATElement->Xe2.Coherency = GMM_GFX_COHERENT_ONE_WAY_IA_SNOOP;
338 break;
339 case GMM_COHERENT_TWO_WAY_IA_GPU_SNOOP:
340 pUsagePATElement->Xe2.Coherency = GMM_GFX_COHERENT_TWO_WAY_IA_GPU_SNOOP;
341 break;
342 default:
343 pUsagePATElement->Xe2.Coherency = GMM_GFX_NON_COHERENT_NO_SNOOP;
344 break;
345 }
346
347 if (pGmmLibContext->GetWaTable().Wa_14018443005 &&
348 (pCachePolicy[Usage].L3CC == GMM_UC) &&
349 (ISWA_1401844305USAGE(Usage)) &&
350 (pGmmLibContext->GetClientType() != GMM_KMD_VISTA) &&
351 (pGmmLibContext->GetClientType() != GMM_OCL_VISTA))
352 {
353 pUsageEle->L3.PhysicalL3.L3CC = GMM_GFX_PHY_L3_MT_WB;
354 pUsagePATElement->Xe2.L3CC = GMM_GFX_PHY_L3_MT_WB;
355 pCachePolicy[Usage].L3CC = GMM_WB;
356 }
357 }
358
359 /////////////////////////////////////////////////////////////////////////////////////
360 /// A simple getter function returning the PAT (cache policy) for a given
361 /// use Usage of the named resource pResInfo.
362 /// Typically used to populate PPGTT/GGTT.
363 ///
364 /// @param[in] pResInfo: Resource info for resource, can be NULL.
365 /// @param[in] Usage: Current usage for resource.
366 /// @param[in] pCompressionEnabl: for Xe2 compression parameter
367 /// @param[in] IsCpuCacheable: Indicates Cacheability
368 /// @return PATIndex
369 /////////////////////////////////////////////////////////////////////////////////////
CachePolicyGetPATIndex(GMM_RESOURCE_INFO * pResInfo,GMM_RESOURCE_USAGE_TYPE Usage,bool * pCompressionEnable,bool IsCpuCacheable)370 uint32_t GMM_STDCALL GmmLib::GmmXe2_LPGCachePolicy::CachePolicyGetPATIndex(GMM_RESOURCE_INFO *pResInfo, GMM_RESOURCE_USAGE_TYPE Usage, bool *pCompressionEnable, bool IsCpuCacheable)
371 {
372 __GMM_ASSERT(pGmmLibContext->GetCachePolicyElement(Usage).Initialized);
373
374 uint32_t PATIndex = pGmmLibContext->GetCachePolicyElement(Usage).PATIndex;
375 GMM_CACHE_POLICY_ELEMENT TempElement = pGmmLibContext->GetCachePolicyElement(Usage);
376 uint32_t TempCoherentPATIndex = 0;
377
378 // This is to check if PATIndexCompressed, CoherentPATIndex are valid
379 // Increment by 1 to have the rollover and value resets to 0 if the PAT in not valid.
380 TempElement.PATIndexCompressed += 1;
381 TempCoherentPATIndex = (uint32_t)GET_COHERENT_PATINDEX_VALUE(pGmmLibContext, Usage);
382
383 // Higher bit of CoherentPATIndex would tell us if its a valid or not.0--> valid, 1-->invalid
384 uint32_t CoherentPATIndex = (uint32_t)((GET_COHERENT_PATINDEX_HIGHER_BIT(TempCoherentPATIndex) == 1) ? GMM_PAT_ERROR : GET_COHERENT_PATINDEX_VALUE(pGmmLibContext, Usage));
385 //For PATIndexCompressed, rollover value would be 0 if its invalid
386 uint32_t PATIndexCompressed = (uint32_t)(TempElement.PATIndexCompressed == 0 ? GMM_PAT_ERROR : pGmmLibContext->GetCachePolicyElement(Usage).PATIndexCompressed);
387 uint32_t ReturnPATIndex = GMM_PAT_ERROR;
388 bool CompressionEnable = (pCompressionEnable) ? *pCompressionEnable : false;
389
390 // Prevent wrong Usage for XAdapter resources. UMD does not call GetMemoryObject on shader resources but,
391 // when they add it someone could call it without knowing the restriction.
392 if (pResInfo &&
393 pResInfo->GetResFlags().Info.XAdapter &&
394 (Usage != GMM_RESOURCE_USAGE_XADAPTER_SHARED_RESOURCE))
395 {
396 __GMM_ASSERT(false);
397 }
398
399 #if (defined __linux__ || defined(WDDM_LINUX))
400 IsCpuCacheable = false;
401 #endif
402 // requested compressed and coherent
403 if (CompressionEnable && IsCpuCacheable)
404 {
405 // return coherent uncompressed
406 ReturnPATIndex = CoherentPATIndex;
407 CompressionEnable = false;
408 GMM_ASSERTDPF(false, "Coherent Compressed is not supported on Xe2. However, respecting the coherency and returning CoherentPATIndex");
409 }
410 // requested compressed only
411 else if (CompressionEnable)
412 {
413
414 if (GMM_PAT_ERROR != PATIndexCompressed)
415 {
416 // return compresed, may or may not coherent which depends on orinigal usage
417 ReturnPATIndex = PATIndexCompressed;
418 CompressionEnable = true;
419 }
420 else
421 {
422 // return original index
423 ReturnPATIndex = PATIndex;
424 CompressionEnable = false;
425 }
426 }
427 // requested coherent only
428 else if (IsCpuCacheable)
429 {
430 //return coherent uncompressed
431 ReturnPATIndex = CoherentPATIndex;
432 CompressionEnable = false;
433 }
434 /* Requested UnCompressed PAT */
435 else
436 {
437 if (GMM_PAT_ERROR != PATIndex)
438 {
439 ReturnPATIndex = PATIndex;
440 CompressionEnable = false;
441 }
442 }
443
444 /* No valid PAT Index found */
445 if (GMM_PAT_ERROR == ReturnPATIndex)
446 {
447 ReturnPATIndex = GMM_XE2_DEFAULT_PAT_INDEX; //default to uncached PAT index 2: GMM_CP_NON_COHERENT_UC
448 CompressionEnable = false;
449 __GMM_ASSERT(false);
450 }
451
452 if (pCompressionEnable)
453 {
454 *pCompressionEnable = CompressionEnable;
455 }
456
457 return ReturnPATIndex;
458 }
459
460 //=============================================================================
461 //
462 // Function: SetUpMOCSTable
463 //
464 // Desc:
465 //
466 // Parameters:
467 //
468 // Return: GMM_STATUS
469 //
470 //-----------------------------------------------------------------------------
SetUpMOCSTable()471 void GmmLib::GmmXe2_LPGCachePolicy::SetUpMOCSTable()
472 {
473 GMM_CACHE_POLICY_TBL_ELEMENT *pCachePolicyTlbElement = &(pGmmLibContext->GetCachePolicyTlbElement()[0]);
474
475 #define L4_WB (0x0)
476 #define L4_WT (0x1)
477 #define L4_UC (0x3)
478
479 #define L3_WB (0x0)
480 #define L3_XD (pGmmLibContext->GetSkuTable().FtrL3TransientDataFlush ? 0x1 : 0x0)
481 #define L3_UC (0x3)
482
483 #define GMM_DEFINE_MOCS(indx, L4Caching, L3Caching, L3ClassOfService, ignorePAT) \
484 { \
485 pCachePolicyTlbElement[indx].L3.PhysicalL3.L4CC = L4Caching; \
486 pCachePolicyTlbElement[indx].L3.PhysicalL3.Reserved0 = 0; \
487 pCachePolicyTlbElement[indx].L3.PhysicalL3.L3CC = L3Caching; \
488 pCachePolicyTlbElement[indx].L3.PhysicalL3.L3CLOS = L3ClassOfService; \
489 pCachePolicyTlbElement[indx].L3.PhysicalL3.igPAT = ignorePAT; \
490 }
491
492 // clang-format off
493 // Default MOCS Table
494 for(uint32_t j = 0; j < GMM_XE2_NUM_MOCS_ENTRIES; j++)
495 { // Index CachingPolicy L3Caching L3ClassOfService ignorePAT
496 GMM_DEFINE_MOCS( j, L4_UC, L3_UC, 0 , 0 )
497 }
498
499 // Index L4 CachingPolicy L3 CachingPolicy L3 CLOS ignorePAT
500 GMM_DEFINE_MOCS( 0 , L4_UC , L3_WB , 0 , 0) // Defer to PAT
501 GMM_DEFINE_MOCS( 1 , L4_UC , L3_WB , 0 , 1) // L3
502 GMM_DEFINE_MOCS( 2 , L4_WB , L3_UC , 0 , 1) // L4
503 GMM_DEFINE_MOCS( 3 , L4_UC , L3_UC , 0 , 1) // UC
504 GMM_DEFINE_MOCS( 4 , L4_WB , L3_WB , 0 , 1) // L3+L4
505
506 CurrentMaxMocsIndex = 4;
507 CurrentMaxL1HdcMocsIndex = 0;
508 CurrentMaxSpecialMocsIndex = 0;
509 // clang-format on
510
511 #undef GMM_DEFINE_MOCS
512 #undef L4_WB
513 #undef L4_WT
514 #undef L4_UC
515
516 #undef L3_WB
517 #undef L3_XD
518 #undef L3_UC
519 }
520
521
522 //=============================================================================
523 //
524 // Function: SetupPAT
525 //
526 // Desc:
527 //
528 // Parameters:
529 //
530 // Return: GMM_STATUS
531 //
532 //-----------------------------------------------------------------------------
SetupPAT()533 GMM_STATUS GmmLib::GmmXe2_LPGCachePolicy::SetupPAT()
534 {
535 GMM_PRIVATE_PAT *pPATTlbElement = &(pGmmLibContext->GetPrivatePATTable()[0]);
536
537 #define L4_WB (0x0)
538 #define L4_WT (0x1)
539 #define L4_UC (0x3)
540
541 #define L3_WB (0x0)
542 #define L3_XD (pGmmLibContext->GetSkuTable().FtrL3TransientDataFlush ? 0x1 : 0x0)
543 #define L3_UC (0x3)
544 #define L3_XA (0x2) // WB Transient App
545
546 #define GMM_DEFINE_PAT_ELEMENT(indx, Coh, L4Caching, L3Caching, L3ClassOfService, CompressionEn, NoCachePromote) \
547 { \
548 pPATTlbElement[indx].Xe2.Coherency = Coh; \
549 pPATTlbElement[indx].Xe2.L4CC = L4Caching; \
550 pPATTlbElement[indx].Xe2.Reserved1 = 0; \
551 pPATTlbElement[indx].Xe2.Reserved2 = 0; \
552 pPATTlbElement[indx].Xe2.L3CC = L3Caching; \
553 pPATTlbElement[indx].Xe2.L3CLOS = L3ClassOfService; \
554 pPATTlbElement[indx].Xe2.LosslessCompressionEn = CompressionEn; \
555 pPATTlbElement[indx].Xe2.NoCachingPromote = NoCachePromote; \
556 }
557
558 // clang-format off
559
560 // Default PAT Table
561 // 32 nos
562 for (uint32_t i = 0; i < (NumPATRegisters); i++)
563 { // Index Coherency CachingPolicy L3Caching L3ClassOfService CompressionEn NoCachingPromote
564 GMM_DEFINE_PAT_ELEMENT( i, 3, L4_UC, L3_UC, 0, 0, 0);
565 }
566
567 // Fixed PAT Table
568 // Index Coherency L4 CachingPolicy L3 CachingPolicy L3 CLOS CompressionEn NoCachingPromote
569 //Group: GGT/PPGTT[4]
570 GMM_DEFINE_PAT_ELEMENT( 0 , 0 , L4_UC , L3_WB , 0 , 0 , 0) // | L3_WB
571 GMM_DEFINE_PAT_ELEMENT( 1 , 2 , L4_UC , L3_WB , 0 , 0 , 0) // | L3_WB | 1 way coherent
572 GMM_DEFINE_PAT_ELEMENT( 2 , 3 , L4_UC , L3_WB , 0 , 0 , 0) // | L3_WB | 2 way coherent
573 GMM_DEFINE_PAT_ELEMENT( 3 , 0 , L4_UC , L3_UC , 0 , 0 , 0) // **UC
574 //Group: 1 way Coh
575 GMM_DEFINE_PAT_ELEMENT( 4 , 2 , L4_WB , L3_UC , 0 , 0 , 0) // L4_WB | 1 way coherent
576 GMM_DEFINE_PAT_ELEMENT( 5 , 2 , L4_UC , L3_UC , 0 , 0 , 0) // **UC | 1 way coherent
577 //Group: Compression Disabled
578 GMM_DEFINE_PAT_ELEMENT( 6 , 0 , L4_UC , L3_XD , 0 , 0 , 1) // | L3_XD
579 GMM_DEFINE_PAT_ELEMENT( 7 , 3 , L4_WB , L3_UC , 0 , 0 , 0) // L4_WB | 2 way coherent
580 GMM_DEFINE_PAT_ELEMENT( 8 , 0 , L4_WB , L3_UC , 0 , 0 , 0) // L4_WB
581 //Group: Compression Enabled
582 GMM_DEFINE_PAT_ELEMENT( 9 , 0 , L4_UC , L3_WB , 0 , 1 , 0) // | L3_WB | Comp
583 GMM_DEFINE_PAT_ELEMENT( 10 , 0 , L4_WB , L3_UC , 0 , 1 , 0) // L4_WB | Comp
584 GMM_DEFINE_PAT_ELEMENT( 11 , 0 , L4_UC , L3_XD , 0 , 1 , 1) // | L3_XD | Comp
585 GMM_DEFINE_PAT_ELEMENT( 12 , 0 , L4_UC , L3_UC , 0 , 1 , 0) // **UC | Comp
586
587 GMM_DEFINE_PAT_ELEMENT( 13 , 0 , L4_WB , L3_WB , 0 , 0 , 0) // L4_WB | L3_WB
588 GMM_DEFINE_PAT_ELEMENT( 14 , 0 , L4_WB , L3_WB , 0 , 1 , 0) // L4_WB | L3_WB | Comp
589 GMM_DEFINE_PAT_ELEMENT( 15 , 0 , L4_WT , L3_XD , 0 , 1 , 1) // L4_WT | L3_XD | Comp
590
591 //Reserved 16-19
592 //Group: CLOS1
593 GMM_DEFINE_PAT_ELEMENT( 20 , 0 , L4_UC , L3_WB , 1 , 0 , 0) // | L3_WB
594 GMM_DEFINE_PAT_ELEMENT( 21 , 0 , L4_UC , L3_WB , 1 , 1 , 0) // | L3_WB | Comp
595 GMM_DEFINE_PAT_ELEMENT( 22 , 2 , L4_UC , L3_WB , 1 , 0 , 0) // | L3_WB | 1 way coherent
596 GMM_DEFINE_PAT_ELEMENT( 23 , 3 , L4_UC , L3_WB , 1 , 0 , 0) // | L3_WB | 2 way coherent
597 //Group:CLOS2=>Clone of CLOS1
598 GMM_DEFINE_PAT_ELEMENT( 24 , 0 , L4_UC , L3_WB , 2 , 0 , 0) // | L3_WB
599 GMM_DEFINE_PAT_ELEMENT( 25 , 0 , L4_UC , L3_WB , 2 , 1 , 0) // | L3_WB | Comp
600 GMM_DEFINE_PAT_ELEMENT( 26 , 2 , L4_UC , L3_WB , 2 , 0 , 0) // | L3_WB | 1 way coherent
601 GMM_DEFINE_PAT_ELEMENT( 27 , 3 , L4_UC , L3_WB , 2 , 0 , 0) // | L3_WB | 2 way coherent
602 //Group:CLOS3=>Clone of CLOS1
603 GMM_DEFINE_PAT_ELEMENT( 28 , 0 , L4_UC , L3_WB , 3 , 0 , 0) // | L3_WB
604 GMM_DEFINE_PAT_ELEMENT( 29 , 0 , L4_UC , L3_WB , 3 , 1 , 0) // | L3_WB | Comp
605 GMM_DEFINE_PAT_ELEMENT( 30 , 2 , L4_UC , L3_WB , 3 , 0 , 0) // | L3_WB | 1 way coherent
606 GMM_DEFINE_PAT_ELEMENT( 31 , 3 , L4_UC , L3_WB , 3 , 0 , 0) // | L3_WB | 2 way coherent
607
608 CurrentMaxPATIndex = 31;
609
610 // clang-format on
611 #undef GMM_DEFINE_PAT
612 #undef L4_WB
613 #undef L4_WT
614 #undef L4_UC
615
616 #undef L3_WB
617 #undef L3_XD
618 #undef L3_UC
619 return GMM_SUCCESS;
620 }
621