1/*
2 * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <common/bl_common.h>
10#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <plat_macros.S>
13#include <services/arm_arch_svc.h>
14#include "wa_cve_2022_23960_bhb.S"
15
16/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20	.globl cortex_a76_reset_func
21	.globl cortex_a76_core_pwr_dwn
22	.globl cortex_a76_disable_wa_cve_2018_3639
23
24/* 64-bit only core */
25#if CTX_INCLUDE_AARCH32_REGS == 1
26#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
27#endif
28
29#define ESR_EL3_A64_SMC0	0x5e000000
30#define ESR_EL3_A32_SMC0	0x4e000000
31
32#if DYNAMIC_WORKAROUND_CVE_2018_3639
33	/*
34	 * This macro applies the mitigation for CVE-2018-3639.
35	 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
36	 * SMC calls from a lower EL running in AArch32 or AArch64
37	 * will go through the fast and return early.
38	 *
39	 * The macro saves x2-x3 to the context. In the fast path
40	 * x0-x3 registers do not need to be restored as the calling
41	 * context will have saved them. The macro also saves
42	 * x29-x30 to the context in the sync_exception path.
43	 */
44	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
45	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
46	.if \_is_sync_exception
47	stp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
48	mov_imm	w2, \_esr_el3_val
49	bl	apply_cve_2018_3639_sync_wa
50	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
51	.endif
52	/*
53	 * Always enable v4 mitigation during EL3 execution. This is not
54	 * required for the fast path above because it does not perform any
55	 * memory loads.
56	 */
57	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
58	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
59	msr	CORTEX_A76_CPUACTLR2_EL1, x2
60	isb
61
62	/*
63	 * The caller may have passed arguments to EL3 via x2-x3.
64	 * Restore these registers from the context before jumping to the
65	 * main runtime vector table entry.
66	 */
67	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
68	.endm
69#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
70
71#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
72vector_base cortex_a76_wa_cve_vbar
73
74	/* ---------------------------------------------------------------------
75	 * Current EL with SP_EL0 : 0x0 - 0x200
76	 * ---------------------------------------------------------------------
77	 */
78vector_entry cortex_a76_sync_exception_sp_el0
79	b	sync_exception_sp_el0
80end_vector_entry cortex_a76_sync_exception_sp_el0
81
82vector_entry cortex_a76_irq_sp_el0
83	b	irq_sp_el0
84end_vector_entry cortex_a76_irq_sp_el0
85
86vector_entry cortex_a76_fiq_sp_el0
87	b	fiq_sp_el0
88end_vector_entry cortex_a76_fiq_sp_el0
89
90vector_entry cortex_a76_serror_sp_el0
91	b	serror_sp_el0
92end_vector_entry cortex_a76_serror_sp_el0
93
94	/* ---------------------------------------------------------------------
95	 * Current EL with SP_ELx: 0x200 - 0x400
96	 * ---------------------------------------------------------------------
97	 */
98vector_entry cortex_a76_sync_exception_sp_elx
99	b	sync_exception_sp_elx
100end_vector_entry cortex_a76_sync_exception_sp_elx
101
102vector_entry cortex_a76_irq_sp_elx
103	b	irq_sp_elx
104end_vector_entry cortex_a76_irq_sp_elx
105
106vector_entry cortex_a76_fiq_sp_elx
107	b	fiq_sp_elx
108end_vector_entry cortex_a76_fiq_sp_elx
109
110vector_entry cortex_a76_serror_sp_elx
111	b	serror_sp_elx
112end_vector_entry cortex_a76_serror_sp_elx
113
114	/* ---------------------------------------------------------------------
115	 * Lower EL using AArch64 : 0x400 - 0x600
116	 * ---------------------------------------------------------------------
117	 */
118vector_entry cortex_a76_sync_exception_aarch64
119
120#if WORKAROUND_CVE_2022_23960
121	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
122#endif /* WORKAROUND_CVE_2022_23960 */
123
124#if DYNAMIC_WORKAROUND_CVE_2018_3639
125	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
126#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
127
128	b	sync_exception_aarch64
129end_vector_entry cortex_a76_sync_exception_aarch64
130
131vector_entry cortex_a76_irq_aarch64
132
133#if WORKAROUND_CVE_2022_23960
134	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
135#endif /* WORKAROUND_CVE_2022_23960 */
136
137#if DYNAMIC_WORKAROUND_CVE_2018_3639
138	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
139#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
140
141	b	irq_aarch64
142end_vector_entry cortex_a76_irq_aarch64
143
144vector_entry cortex_a76_fiq_aarch64
145
146#if WORKAROUND_CVE_2022_23960
147	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
148#endif /* WORKAROUND_CVE_2022_23960 */
149
150#if DYNAMIC_WORKAROUND_CVE_2018_3639
151	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
152#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
153
154	b	fiq_aarch64
155end_vector_entry cortex_a76_fiq_aarch64
156
157vector_entry cortex_a76_serror_aarch64
158
159#if WORKAROUND_CVE_2022_23960
160	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
161#endif /* WORKAROUND_CVE_2022_23960 */
162
163#if DYNAMIC_WORKAROUND_CVE_2018_3639
164	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
165#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
166
167	b	serror_aarch64
168end_vector_entry cortex_a76_serror_aarch64
169
170	/* ---------------------------------------------------------------------
171	 * Lower EL using AArch32 : 0x600 - 0x800
172	 * ---------------------------------------------------------------------
173	 */
174vector_entry cortex_a76_sync_exception_aarch32
175
176#if WORKAROUND_CVE_2022_23960
177	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
178#endif /* WORKAROUND_CVE_2022_23960 */
179
180#if DYNAMIC_WORKAROUND_CVE_2018_3639
181	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
182#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
183
184	b	sync_exception_aarch32
185end_vector_entry cortex_a76_sync_exception_aarch32
186
187vector_entry cortex_a76_irq_aarch32
188
189#if WORKAROUND_CVE_2022_23960
190	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
191#endif /* WORKAROUND_CVE_2022_23960 */
192
193#if DYNAMIC_WORKAROUND_CVE_2018_3639
194	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
195#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
196
197	b	irq_aarch32
198end_vector_entry cortex_a76_irq_aarch32
199
200vector_entry cortex_a76_fiq_aarch32
201
202#if WORKAROUND_CVE_2022_23960
203	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
204#endif /* WORKAROUND_CVE_2022_23960 */
205
206#if DYNAMIC_WORKAROUND_CVE_2018_3639
207	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
208#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
209
210	b	fiq_aarch32
211end_vector_entry cortex_a76_fiq_aarch32
212
213vector_entry cortex_a76_serror_aarch32
214
215#if WORKAROUND_CVE_2022_23960
216	apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
217#endif /* WORKAROUND_CVE_2022_23960 */
218
219#if DYNAMIC_WORKAROUND_CVE_2018_3639
220	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
221#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
222
223	b	serror_aarch32
224end_vector_entry cortex_a76_serror_aarch32
225#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
226
227#if DYNAMIC_WORKAROUND_CVE_2018_3639
228	/*
229	 * -----------------------------------------------------------------
230	 * This function applies the mitigation for CVE-2018-3639
231	 * specifically for sync exceptions. It implements a fast path
232	 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
233	 * running in AArch64 will go through the fast and return early.
234	 *
235	 * In the fast path x0-x3 registers do not need to be restored as the
236	 * calling context will have saved them.
237	 *
238	 * Caller must pass value of esr_el3 to compare via x2.
239	 * Save and restore these registers outside of this function from the
240	 * context before jumping to the main runtime vector table entry.
241	 *
242	 * Shall clobber: x0-x3, x30
243	 * -----------------------------------------------------------------
244	 */
245func apply_cve_2018_3639_sync_wa
246	/*
247	 * Ensure SMC is coming from A64/A32 state on #0
248	 * with W0 = SMCCC_ARCH_WORKAROUND_2
249	 *
250	 * This sequence evaluates as:
251	 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
252	 * allowing use of a single branch operation
253	 * X2 populated outside this function with the SMC FID.
254	 */
255	orr	w3, wzr, #SMCCC_ARCH_WORKAROUND_2
256	cmp	x0, x3
257	mrs	x3, esr_el3
258
259	ccmp	w2, w3, #0, eq
260	/*
261	 * Static predictor will predict a fall-through, optimizing
262	 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
263	 */
264	bne	1f
265
266	/*
267	* The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
268	* fast path.
269	*/
270	cmp	x1, xzr /* enable/disable check */
271
272	/*
273	 * When the calling context wants mitigation disabled,
274	 * we program the mitigation disable function in the
275	 * CPU context, which gets invoked on subsequent exits from
276	 * EL3 via the `el3_exit` function. Otherwise NULL is
277	 * programmed in the CPU context, which results in caller's
278	 * inheriting the EL3 mitigation state (enabled) on subsequent
279	 * `el3_exit`.
280	 */
281	mov	x0, xzr
282	adr	x1, cortex_a76_disable_wa_cve_2018_3639
283	csel	x1, x1, x0, eq
284	str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
285
286	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
287	orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
288	bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
289	csel	x3, x3, x1, eq
290	msr	CORTEX_A76_CPUACTLR2_EL1, x3
291	ldp	x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
292	/*
293	* `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
294	*/
295	exception_return /* exception_return contains ISB */
2961:
297	ret
298endfunc apply_cve_2018_3639_sync_wa
299#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
300
301workaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
302	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
303workaround_reset_end cortex_a76, ERRATUM(1073348)
304
305check_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
306
307workaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
308	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
309	msr	CORTEX_A76_CPUACTLR2_EL1, x1
310workaround_reset_end cortex_a76, ERRATUM(1130799)
311
312check_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
313
314workaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
315	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
316workaround_reset_end cortex_a76, ERRATUM(1220197)
317
318check_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
319
320workaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
321	sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
322workaround_reset_end cortex_a76, ERRATUM(1257314)
323
324check_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
325
326workaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
327	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
328workaround_reset_end cortex_a76, ERRATUM(1262606)
329
330check_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
331
332workaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
333	sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
334workaround_reset_end cortex_a76, ERRATUM(1262888)
335
336check_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
337
338workaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
339	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
340workaround_reset_end cortex_a76, ERRATUM(1275112)
341
342check_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
343
344check_erratum_custom_start cortex_a76, ERRATUM(1286807)
345#if ERRATA_A76_1286807
346	mov x0, #ERRATA_APPLIES
347	ret
348#else
349	mov	x1, #0x30
350	b	cpu_rev_var_ls
351#endif
352check_erratum_custom_end cortex_a76, ERRATUM(1286807)
353
354workaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
355	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
356workaround_reset_end cortex_a76, ERRATUM(1791580)
357
358check_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
359
360workaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
361	sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
362workaround_reset_end cortex_a76, ERRATUM(1868343)
363
364check_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
365
366workaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
367	mov	x0, #3
368	msr	S3_6_C15_C8_0, x0
369	ldr	x0, =0x10E3900002
370	msr	S3_6_C15_C8_2, x0
371	ldr	x0, =0x10FFF00083
372	msr	S3_6_C15_C8_3, x0
373	ldr	x0, =0x2001003FF
374	msr	S3_6_C15_C8_1, x0
375
376	mov	x0, #4
377	msr	S3_6_C15_C8_0, x0
378	ldr	x0, =0x10E3800082
379	msr	S3_6_C15_C8_2, x0
380	ldr	x0, =0x10FFF00083
381	msr	S3_6_C15_C8_3, x0
382	ldr	x0, =0x2001003FF
383	msr	S3_6_C15_C8_1, x0
384
385	mov	x0, #5
386	msr	S3_6_C15_C8_0, x0
387	ldr	x0, =0x10E3800200
388	msr	S3_6_C15_C8_2, x0
389	ldr	x0, =0x10FFF003E0
390	msr	S3_6_C15_C8_3, x0
391	ldr	x0, =0x2001003FF
392	msr	S3_6_C15_C8_1, x0
393workaround_reset_end cortex_a76, ERRATUM(1946160)
394
395check_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
396
397workaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
398	/* dsb before isb of power down sequence */
399	dsb	sy
400workaround_runtime_end cortex_a76, ERRATUM(2743102)
401
402check_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
403
404check_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
405
406func cortex_a76_disable_wa_cve_2018_3639
407	sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
408	isb
409	ret
410endfunc cortex_a76_disable_wa_cve_2018_3639
411
412/* --------------------------------------------------------------
413 * Errata Workaround for Cortex A76 Errata #1165522.
414 * This applies only to revisions <= r3p0 of Cortex A76.
415 * Due to the nature of the errata it is applied unconditionally
416 * when built in, report it as applicable in this case
417 * --------------------------------------------------------------
418 */
419check_erratum_custom_start cortex_a76, ERRATUM(1165522)
420#if ERRATA_A76_1165522
421	mov	x0, #ERRATA_APPLIES
422	ret
423#else
424	mov	x1, #0x30
425	b	cpu_rev_var_ls
426#endif
427check_erratum_custom_end cortex_a76, ERRATUM(1165522)
428
429check_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
430
431/* erratum has no workaround in the cpu. Generic code must take care */
432add_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960, NO_APPLY_AT_RESET
433
434/* ERRATA_DSU_798953 :
435 * The errata is defined in dsu_helpers.S but applies to cortex_a76
436 * as well. Henceforth creating symbolic names to the already existing errata
437 * workaround functions to get them registered under the Errata Framework.
438 */
439.equ check_erratum_cortex_a76_798953, check_errata_dsu_798953
440.equ erratum_cortex_a76_798953_wa, errata_dsu_798953_wa
441add_erratum_entry cortex_a76, ERRATUM(798953), ERRATA_DSU_798953, APPLY_AT_RESET
442
443/* ERRATA_DSU_936184 :
444 * The errata is defined in dsu_helpers.S but applies to cortex_a76
445 * as well. Henceforth creating symbolic names to the already existing errata
446 * workaround functions to get them registered under the Errata Framework.
447 */
448.equ check_erratum_cortex_a76_936184, check_errata_dsu_936184
449.equ erratum_cortex_a76_936184_wa, errata_dsu_936184_wa
450add_erratum_entry cortex_a76, ERRATUM(936184), ERRATA_DSU_936184, APPLY_AT_RESET
451
452cpu_reset_func_start cortex_a76
453
454#if WORKAROUND_CVE_2018_3639
455	/* If the PE implements SSBS, we don't need the dynamic workaround */
456	mrs	x0, id_aa64pfr1_el1
457	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
458	and	x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
459#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
460	cmp	x0, 0
461	ASM_ASSERT(ne)
462#endif
463#if DYNAMIC_WORKAROUND_CVE_2018_3639
464	cbnz	x0, 1f
465	sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
466	isb
467
468#ifdef IMAGE_BL31
469	/*
470	 * The Cortex-A76 generic vectors are overwritten to use the vectors
471	 * defined above. This is required in order to apply mitigation
472	 * against CVE-2018-3639 on exception entry from lower ELs.
473	 * If the below vector table is used, skip overriding it again for
474	 *  CVE_2022_23960 as both use the same vbar.
475	 */
476	override_vector_table cortex_a76_wa_cve_vbar
477	isb
478	b	2f
479#endif /* IMAGE_BL31 */
480
4811:
482#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
483#endif /* WORKAROUND_CVE_2018_3639 */
484
485#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
486	/*
487	 * The Cortex-A76 generic vectors are overridden to apply errata
488	 * mitigation on exception entry from lower ELs. This will be bypassed
489	 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
490	 */
491	override_vector_table cortex_a76_wa_cve_vbar
492	isb
493#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
4942:
495cpu_reset_func_end cortex_a76
496
497	/* ---------------------------------------------
498	 * HW will do the cache maintenance while powering down
499	 * ---------------------------------------------
500	 */
501func cortex_a76_core_pwr_dwn
502	/* ---------------------------------------------
503	 * Enable CPU power down bit in power control register
504	 * ---------------------------------------------
505	 */
506	sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
507
508	apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
509
510	isb
511	ret
512endfunc cortex_a76_core_pwr_dwn
513
514errata_report_shim cortex_a76
515
516	/* ---------------------------------------------
517	 * This function provides cortex_a76 specific
518	 * register information for crash reporting.
519	 * It needs to return with x6 pointing to
520	 * a list of register names in ascii and
521	 * x8 - x15 having values of registers to be
522	 * reported.
523	 * ---------------------------------------------
524	 */
525.section .rodata.cortex_a76_regs, "aS"
526cortex_a76_regs:  /* The ascii list of register names to be reported */
527	.asciz	"cpuectlr_el1", ""
528
529func cortex_a76_cpu_reg_dump
530	adr	x6, cortex_a76_regs
531	mrs	x8, CORTEX_A76_CPUECTLR_EL1
532	ret
533endfunc cortex_a76_cpu_reg_dump
534
535declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
536	cortex_a76_reset_func, \
537	CPU_NO_EXTRA1_FUNC, \
538	cortex_a76_disable_wa_cve_2018_3639, \
539	CPU_NO_EXTRA3_FUNC, \
540	cortex_a76_core_pwr_dwn
541