xref: /aosp_15_r20/art/runtime/arch/arm/jni_entrypoints_arm.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm.S"
18
19#define MANAGED_ARGS_R4_LR_SAVE_SIZE /*s0-s15*/ 16 * 4 + /*r0-r3*/ 4 * 4 + /*r4*/ 4 + /*lr*/ 4
20
21// Note: R4 is saved for stack alignment.
22.macro SAVE_MANAGED_ARGS_R4_LR_INCREASE_FRAME
23    // Save GPR args r0-r3 and return address. Also save r4 for stack alignment.
24    push   {r0-r4, lr}
25    .cfi_adjust_cfa_offset 24
26    .cfi_rel_offset lr, 20
27    // Save FPR args.
28    vpush  {s0-s15}
29    .cfi_adjust_cfa_offset 64
30.endm
31
32.macro RESTORE_MANAGED_ARGS_R4_AND_RETURN restore_cfa
33    // Restore FPR args.
34    vpop   {s0-s15}
35    .cfi_adjust_cfa_offset -64
36    // Restore GPR args and r4 and return.
37    pop    {r0-r4, pc}
38    .if \restore_cfa
39        .cfi_adjust_cfa_offset 64
40    .endif
41.endm
42
43.macro JNI_SAVE_MANAGED_ARGS_TRAMPOLINE name, cxx_name, arg1 = "none"
44    .extern \cxx_name
45ENTRY \name
46    // Note: Managed callee-save registers have been saved by the JNI stub.
47    // Save managed args, r4 (for stack alignment) and LR.
48    SAVE_MANAGED_ARGS_R4_LR_INCREASE_FRAME
49    // Call `cxx_name()`.
50    .ifnc \arg1, none
51        mov r0, \arg1                     @ Pass arg1.
52    .endif
53    bl     \cxx_name                      @ Call cxx_name(...).
54    // Restore args and R4 and return.
55    RESTORE_MANAGED_ARGS_R4_AND_RETURN /*restore_cfa*/ 0
56END \name
57.endm
58
59.macro JNI_SAVE_RETURN_VALUE_TRAMPOLINE name, cxx_name, arg1, arg2 = "none", label = "none"
60    .extern \cxx_name
61ENTRY \name
62    .ifnc \label, none
63        \label:
64    .endif
65    // Save GPR return registers and return address. Also save r4 for stack alignment.
66    push   {r0-r1, r4, lr}
67    .cfi_adjust_cfa_offset 16
68    .cfi_rel_offset lr, 12
69    // Save FPR return registers.
70    vpush  {s0-s1}
71    .cfi_adjust_cfa_offset 8
72    // Call `cxx_name()`.
73    mov r0, \arg1                         @ Pass arg1.
74    .ifnc \arg2, none
75        mov r1, \arg2                     @ Pass arg2.
76    .endif
77    bl     \cxx_name                      @ Call cxx_name(...).
78    // Restore FPR return registers.
79    vpop   {s0-s1}
80    .cfi_adjust_cfa_offset -8
81    // Restore GPR return registers and r4 and return.
82    pop    {r0-r1, r4, pc}
83END \name
84.endm
85
86    /*
87     * Jni dlsym lookup stub for @CriticalNative.
88     */
89ENTRY art_jni_dlsym_lookup_critical_stub
90    // The hidden arg holding the tagged method (bit 0 set means GenericJNI) is r4.
91    // For Generic JNI we already have a managed frame, so we reuse the art_jni_dlsym_lookup_stub.
92    tst    r4, #1
93    bne art_jni_dlsym_lookup_stub
94
95    // Reserve space for a SaveRefsAndArgs managed frame, either for the actual runtime
96    // method or for a GenericJNI frame which is similar but has a native method and a tag.
97    // Do this eagerly, so that we can use these registers as temps without the need to
98    // save and restore them multiple times.
99    INCREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
100
101    // Save args, the hidden arg and caller PC. No CFI needed for args and the hidden arg.
102    push   {r0, r1, r2, r3, r4, lr}
103    .cfi_adjust_cfa_offset 24
104    .cfi_rel_offset lr, 20
105
106    // Call artCriticalNativeFrameSize(method, caller_pc)
107    mov    r0, r4  // r0 := method (from hidden arg)
108    mov    r1, lr  // r1 := caller_pc
109    bl     artCriticalNativeFrameSize
110
111    // Prepare the return address for managed stack walk of the SaveRefsAndArgs frame.
112    // If we're coming from JNI stub with tail call, it is LR. If we're coming from
113    // JNI stub that saved the return address, it will be the last value we copy below.
114    // If we're coming directly from compiled code, it is LR, set further down.
115    ldr    lr, [sp, #20]
116
117    // Move the stack args if any.
118    add    r4, sp, #24
119    cbz    r0, .Lcritical_skip_copy_args
120.Lcritical_copy_args_loop:
121    ldrd   ip, lr, [r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS]
122    subs   r0, r0, #8
123    strd   ip, lr, [r4], #8
124    bne    .Lcritical_copy_args_loop
125.Lcritical_skip_copy_args:
126    // The managed frame address is now in R4. This is conveniently a callee-save in native ABI.
127
128    // Restore args.
129    pop    {r0, r1, r2, r3}
130    .cfi_adjust_cfa_offset -16
131
132    // Spill registers for the SaveRefsAndArgs frame above the stack args.
133    // Note that the runtime shall not examine the args here, otherwise we would have to
134    // move them in registers and stack to account for the difference between managed and
135    // native ABIs.
136    add    ip, r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS - 40
137    stmia  ip, {r1-r3, r5-r8, r10-r11, lr}  // LR: Save return address for tail call from JNI stub.
138    // (If there were any stack args, we're storing the value that's already there.
139    // For direct calls from compiled managed code, we shall overwrite this below.)
140    // Skip args r1-r3.
141    CFI_EXPRESSION_BREG 5, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 28
142    CFI_EXPRESSION_BREG 6, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 24
143    CFI_EXPRESSION_BREG 7, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 20
144    CFI_EXPRESSION_BREG 8, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 16
145    CFI_EXPRESSION_BREG 10, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 12
146    CFI_EXPRESSION_BREG 11, 4, FRAME_SIZE_SAVE_REFS_AND_ARGS - 8
147    // The saved return PC for managed stack walk is not necessarily our LR.
148    // Skip managed FP args as these are native ABI caller-saves and not args.
149
150    // Restore the hidden arg to r1 and caller PC.
151    pop    {r1, lr}
152    .cfi_adjust_cfa_offset -8
153    .cfi_restore lr
154
155    // Save our return PC in the padding.
156    str   lr, [r4, #__SIZEOF_POINTER__]
157    CFI_EXPRESSION_BREG 14, 4, __SIZEOF_POINTER__
158
159    ldr    ip, [r1, #ART_METHOD_ACCESS_FLAGS_OFFSET]  // Load access flags.
160    add    r2, r4, #1             // Prepare managed SP tagged for a GenericJNI frame.
161    tst    ip, #ACCESS_FLAGS_METHOD_IS_NATIVE
162    bne    .Lcritical_skip_prepare_runtime_method
163
164    // When coming from a compiled method, the return PC for managed stack walk is LR.
165    // (When coming from a compiled stub, the correct return PC is already stored above.)
166    str    lr, [r4, #(FRAME_SIZE_SAVE_REFS_AND_ARGS - __SIZEOF_POINTER__)]
167
168    // Replace the target method with the SaveRefsAndArgs runtime method.
169    LOAD_RUNTIME_INSTANCE r1
170    ldr    r1, [r1, #RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET]
171
172    mov    r2, r4                 // Prepare untagged managed SP for the runtime method.
173
174.Lcritical_skip_prepare_runtime_method:
175    // Store the method on the bottom of the managed frame.
176    str    r1, [r4]
177
178    // Place (maybe tagged) managed SP in Thread::Current()->top_quick_frame.
179    str    r2, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET]
180
181    // Preserve the native arg register r0 in callee-save register r10 which was saved above.
182    mov    r10, r0
183
184    // Call artFindNativeMethodRunnable()
185    mov    r0, rSELF   // pass Thread::Current()
186    bl     artFindNativeMethodRunnable
187
188    // Store result in scratch reg.
189    mov    ip, r0
190
191    // Restore the native arg register r0.
192    mov    r0, r10
193
194    // Restore the frame. We shall not need the method anymore.
195    add    r1, r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS - 40
196    ldmia  r1, {r1-r3, r5-r8, r10-r11}
197    .cfi_restore r5
198    .cfi_restore r6
199    .cfi_restore r7
200    .cfi_restore r8
201    .cfi_restore r10
202    .cfi_restore r11
203
204    REFRESH_MARKING_REGISTER
205
206    // Check for exception before moving args back to keep the return PC for managed stack walk.
207    cmp    ip, #0
208    CFI_REMEMBER_STATE
209    beq    .Lcritical_deliver_exception
210
211    // Restore our return PC.
212    ldr    lr, [r4, #__SIZEOF_POINTER__]
213    .cfi_restore lr
214
215    // Move stack args to their original place.
216    cmp    sp, r4
217    beq    .Lcritical_skip_copy_args_back
218    push   {r0, r1, r2, r3}
219    .cfi_adjust_cfa_offset 16
220    add    r0, sp, #16
221    sub    r0, r4, r0
222.Lcritical_copy_args_loop_back:
223    ldrd   r2, r3, [r4, #-8]!
224    subs   r0, r0, #8
225    strd   r2, r3, [r4, #FRAME_SIZE_SAVE_REFS_AND_ARGS]
226    bne    .Lcritical_copy_args_loop_back
227    pop    {r0, r1, r2, r3}
228    .cfi_adjust_cfa_offset -16
229.Lcritical_skip_copy_args_back:
230
231    // Remove the frame reservation.
232    DECREASE_FRAME FRAME_SIZE_SAVE_REFS_AND_ARGS
233
234    // Do the tail call.
235    bx     ip
236
237.Lcritical_deliver_exception:
238    CFI_RESTORE_STATE_AND_DEF_CFA sp, FRAME_SIZE_SAVE_REFS_AND_ARGS
239    // The exception delivery checks that rSELF was saved but the SaveRefsAndArgs
240    // frame does not save it, so we cannot use the existing SaveRefsAndArgs frame.
241    // That's why we checked for exception after restoring registers from it.
242    // We need to build a SaveAllCalleeSaves frame instead. Args are irrelevant at this
243    // point but keep the area allocated for stack args to keep CFA definition simple.
244#if FRAME_SIZE_SAVE_REFS_AND_ARGS != FRAME_SIZE_SAVE_ALL_CALLEE_SAVES
245#  error "Expected FRAME_SIZE_SAVE_REFS_AND_ARGS == FRAME_SIZE_SAVE_ALL_CALLEE_SAVES"
246    // Otherwise we would need to adjust SP and R4 and move our return PC which is at [R4, #4].
247    // (Luckily, both SaveRefsAndArgs and SaveAllCalleeSaves frames have padding there.)
248#endif
249
250    // Spill registers for the SaveAllCalleeSaves frame above the stack args area.
251    add    ip, r4, #FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 32
252    stmia  ip, {r5-r11}  // Keep the caller PC for managed stack walk.
253    CFI_EXPRESSION_BREG 5, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 32
254    CFI_EXPRESSION_BREG 6, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 28
255    CFI_EXPRESSION_BREG 7, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 24
256    CFI_EXPRESSION_BREG 8, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 20
257    CFI_EXPRESSION_BREG 9, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 16
258    CFI_EXPRESSION_BREG 10, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 12
259    CFI_EXPRESSION_BREG 11, 4, FRAME_SIZE_SAVE_ALL_CALLEE_SAVES - 8
260    // Skip R4, it is callee-save in managed ABI.
261    add    ip, r4, #12
262    vstmia ip, {s16-s31}
263
264    // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves] to the managed frame.
265    LOAD_RUNTIME_INSTANCE ip
266    ldr   ip, [ip, #RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET]
267    str   ip, [r4]
268
269    // Place the managed frame SP in Thread::Current()->top_quick_frame.
270    str   r4, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET]
271
272    DELIVER_PENDING_EXCEPTION_FRAME_READY
273END art_jni_dlsym_lookup_critical_stub
274
275    /*
276     * Read barrier for the method's declaring class needed by JNI stub for static methods.
277     * (We're using a pointer to the declaring class in `ArtMethod` as `jclass`.)
278     */
279// The method argument is already in r0 for call to `artJniReadBarrier(ArtMethod*)`.
280JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_read_barrier, artJniReadBarrier
281
282    /*
283     * Trampoline to `artJniMethodStart()` that preserves all managed arguments.
284     */
285JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_start, artJniMethodStart, rSELF
286
287    /*
288     * Trampoline to `artJniMethodEntryHook()` that preserves all managed arguments.
289     */
290JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_method_entry_hook, artJniMethodEntryHook, rSELF
291
292    /*
293     * Trampoline to `artJniMonitoredMethodStart()` that preserves all managed arguments.
294     */
295JNI_SAVE_MANAGED_ARGS_TRAMPOLINE art_jni_monitored_method_start, artJniMonitoredMethodStart, rSELF
296
297    /*
298     * Trampoline to `artJniMethodEnd()` that preserves all return registers.
299     */
300JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_method_end, artJniMethodEnd, rSELF
301
302    /*
303     * Trampoline to `artJniMonitoredMethodEnd()` that preserves all return registers.
304     */
305JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_monitored_method_end, artJniMonitoredMethodEnd, rSELF
306
307    /*
308     * Entry from JNI stub that tries to lock the object in a fast path and
309     * calls `artLockObjectFromCode()` (the same as for managed code) for the
310     * difficult cases, may block for GC.
311     * Custom calling convention:
312     *     r4 holds the non-null object to lock.
313     *     Callee-save registers have been saved and can be used as temporaries.
314     *     All argument registers need to be preserved.
315     */
316ENTRY art_jni_lock_object
317    // Note: the slow path is actually the art_jni_lock_object_no_inline (tail call).
318    LOCK_OBJECT_FAST_PATH r4, r5, r6, r7, .Llock_object_jni_slow, /*can_be_null*/ 0
319END art_jni_lock_object
320
321    /*
322     * Entry from JNI stub that calls `artLockObjectFromCode()`
323     * (the same as for managed code), may block for GC.
324     * Custom calling convention:
325     *     r4 holds the non-null object to lock.
326     *     Callee-save registers have been saved and can be used as temporaries.
327     *     All argument registers need to be preserved.
328     */
329    .extern artLockObjectFromCode
330ENTRY art_jni_lock_object_no_inline
331    // This is also the slow path for art_jni_lock_object.
332    // Note that we need a local label as the assembler emits bad instructions
333    // for CBZ/CBNZ if we try to jump to `art_jni_lock_object_no_inline`.
334.Llock_object_jni_slow:
335    // Save managed args, r4 (for stack alignment) and LR.
336    SAVE_MANAGED_ARGS_R4_LR_INCREASE_FRAME
337    // Call `artLockObjectFromCode()`
338    mov    r0, r4                       @ Pass the object to lock.
339    mov    r1, rSELF                    @ Pass Thread::Current().
340    bl     artLockObjectFromCode        @ (Object* obj, Thread*)
341    // Check result.
342    cbnz   r0, 1f
343    // Restore args and r4 and return.
344    RESTORE_MANAGED_ARGS_R4_AND_RETURN /*restore_cfa*/ 1
3451:
346    // All args are irrelevant when throwing an exception and R4 is preserved
347    // by the `artLockObjectFromCode()` call. Load LR and drop saved args and R4.
348    ldr    lr, [sp, #(MANAGED_ARGS_R4_LR_SAVE_SIZE - 4)]
349    .cfi_restore lr
350    DECREASE_FRAME MANAGED_ARGS_R4_LR_SAVE_SIZE
351    // Make a call to `artDeliverPendingExceptionFromCode()`.
352    // Rely on the JNI transition frame constructed in the JNI stub.
353    mov    r0, rSELF                            @ Pass Thread::Current().
354    bl     artDeliverPendingExceptionFromCode   @ (Thread*)
355    bl     art_quick_do_long_jump               @ (Context*)
356    bkpt  // Unreached
357END art_jni_lock_object_no_inline
358
359    /*
360     * Entry from JNI stub that tries to unlock the object in a fast path and calls
361     * `artJniUnlockObject()` for the difficult cases. Note that failure to unlock
362     * is fatal, so we do not need to check for exceptions in the slow path.
363     * Custom calling convention:
364     *     r4 holds the non-null object to unlock.
365     *     Callee-save registers have been saved and can be used as temporaries.
366     *     Return registers r0-r1 and s0-s1 need to be preserved.
367     */
368ENTRY art_jni_unlock_object
369    // Note: the slow path is actually the art_jni_unlock_object_no_inline (tail call).
370    UNLOCK_OBJECT_FAST_PATH r4, r5, r6, r7, .Lunlock_object_jni_slow, /*can_be_null*/ 0
371END art_jni_unlock_object
372
373    /*
374     * Entry from JNI stub that calls `artJniUnlockObject()`. Note that failure to
375     * unlock is fatal, so we do not need to check for exceptions.
376     * Custom calling convention:
377     *     r4 holds the non-null object to unlock.
378     *     Callee-save registers have been saved and can be used as temporaries.
379     *     Return registers r0-r1 and s0-s1 need to be preserved.
380     */
381    // This is also the slow path for art_jni_unlock_object.
382JNI_SAVE_RETURN_VALUE_TRAMPOLINE art_jni_unlock_object_no_inline, artJniUnlockObject, r4, rSELF, \
383    /* Note that we need a local label as the assembler emits bad instructions                */ \
384    /* for CBZ/CBNZ if we try to jump to `art_jni_unlock_object_no_inline`.                   */ \
385    .Lunlock_object_jni_slow
386