xref: /aosp_15_r20/art/runtime/arch/x86/quick_entrypoints_x86.S (revision 795d594fd825385562da6b089ea9b2033f3abf5a)
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_x86.S"
18#include "interpreter/cfi_asm_support.h"
19
20#include "arch/quick_alloc_entrypoints.S"
21#include "arch/quick_field_entrypoints.S"
22
23// For x86, the CFA is esp+4, the address above the pushed return address on the stack.
24
25    /*
26     * Macro that sets up the callee save frame to conform with
27     * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly)
28     * and preserves the value of temp_reg at entry.
29     */
30MACRO1(SETUP_SAVE_REFS_ONLY_FRAME_PRESERVE_TEMP_REG, temp_reg)
31    PUSH edi  // Save callee saves (ebx is saved/restored by the upcall)
32    PUSH esi
33    PUSH ebp
34    PUSH RAW_VAR(temp_reg)  // Save temp_reg
35    INCREASE_FRAME 8             // Grow stack by 2 words.
36
37    LOAD_RUNTIME_INSTANCE \temp_reg
38    // Push save all callee-save method.
39    pushl RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET(REG_VAR(temp_reg))
40    CFI_ADJUST_CFA_OFFSET(4)
41    // Store esp as the top quick frame.
42    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
43    // Restore temp_reg.
44    movl 12(%esp), REG_VAR(temp_reg)
45    CFI_RESTORE(RAW_VAR(temp_reg))
46
47    // Ugly compile-time check, but we only have the preprocessor.
48    // Last +4: implicit return address pushed on stack when caller made call.
49#if (FRAME_SIZE_SAVE_REFS_ONLY != 3*4 + 16 + 4)
50#error "FRAME_SIZE_SAVE_REFS_ONLY(X86) size not as expected."
51#endif
52END_MACRO
53
54    /*
55     * Macro that sets up the callee save frame to conform with
56     * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs)
57     */
58MACRO1(SETUP_SAVE_REFS_AND_ARGS_FRAME, temp_reg)
59    SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY
60
61    LOAD_RUNTIME_INSTANCE \temp_reg
62    // Push save all callee-save method.
63    pushl RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET(REG_VAR(temp_reg))
64    CFI_ADJUST_CFA_OFFSET(4)
65    // Store esp as the stop quick frame.
66    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
67END_MACRO
68
69    /*
70     * Macro that sets up the callee save frame to conform with
71     * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs) where the method is passed in EAX.
72     */
73MACRO0(SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_EAX)
74    SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY
75
76    pushl %eax  // Store the ArtMethod reference at the bottom of the stack.
77    CFI_ADJUST_CFA_OFFSET(4)
78    // Store esp as the stop quick frame.
79    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
80END_MACRO
81
82// Restore register and jump to routine
83// Inputs:  EDI contains pointer to code.
84// Notes: Need to pop EAX too (restores Method*)
85MACRO0(RESTORE_SAVE_REFS_AND_ARGS_FRAME_AND_JUMP)
86    POP eax  // Restore Method*
87
88    // Restore FPRs.
89    movsd 0(%esp), %xmm0
90    movsd 8(%esp), %xmm1
91    movsd 16(%esp), %xmm2
92    movsd 24(%esp), %xmm3
93
94    DECREASE_FRAME 32             // Remove FPRs.
95
96    POP ecx  // Restore args except eax
97    POP edx
98    POP ebx
99    POP ebp  // Restore callee saves
100    POP esi
101    xchgl 0(%esp),%edi // restore EDI and place code pointer as only value on stack
102    ret
103END_MACRO
104
105    /*
106     * Macro that sets up the callee save frame to conform with
107     * Runtime::CreateCalleeSaveMethod(kSaveEverything)
108     * when EDI and ESI are already saved.
109     */
110MACRO2(SETUP_SAVE_EVERYTHING_FRAME_EDI_ESI_SAVED, temp_reg, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
111    // Save core registers from highest to lowest to agree with core spills bitmap.
112    // EDI and ESI, or at least placeholders for them, are already on the stack.
113    PUSH ebp
114    PUSH ebx
115    PUSH edx
116    PUSH ecx
117    PUSH eax
118    // Create space for FPR registers and stack alignment padding.
119    INCREASE_FRAME 12 + 8 * 8
120    // Save FPRs.
121    movsd %xmm0, 12(%esp)
122    movsd %xmm1, 20(%esp)
123    movsd %xmm2, 28(%esp)
124    movsd %xmm3, 36(%esp)
125    movsd %xmm4, 44(%esp)
126    movsd %xmm5, 52(%esp)
127    movsd %xmm6, 60(%esp)
128    movsd %xmm7, 68(%esp)
129
130    LOAD_RUNTIME_INSTANCE \temp_reg
131    // Push save everything callee-save method.
132    pushl \runtime_method_offset(REG_VAR(temp_reg))
133    CFI_ADJUST_CFA_OFFSET(4)
134    // Store esp as the stop quick frame.
135    movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET
136
137    // Ugly compile-time check, but we only have the preprocessor.
138    // Last +4: implicit return address pushed on stack when caller made call.
139#if (FRAME_SIZE_SAVE_EVERYTHING != 7*4 + 8*8 + 12 + 4 + 4)
140#error "FRAME_SIZE_SAVE_EVERYTHING(X86) size not as expected."
141#endif
142END_MACRO
143
144    /*
145     * Macro that sets up the callee save frame to conform with
146     * Runtime::CreateCalleeSaveMethod(kSaveEverything)
147     * when EDI is already saved.
148     */
149MACRO2(SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED, temp_reg, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
150    // Save core registers from highest to lowest to agree with core spills bitmap.
151    // EDI, or at least a placeholder for it, is already on the stack.
152    PUSH esi
153    SETUP_SAVE_EVERYTHING_FRAME_EDI_ESI_SAVED RAW_VAR(temp_reg), \runtime_method_offset
154END_MACRO
155
156    /*
157     * Macro that sets up the callee save frame to conform with
158     * Runtime::CreateCalleeSaveMethod(kSaveEverything)
159     */
160MACRO2(SETUP_SAVE_EVERYTHING_FRAME, temp_reg, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
161    PUSH edi
162    SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED RAW_VAR(temp_reg), \runtime_method_offset
163END_MACRO
164
165MACRO0(RESTORE_SAVE_EVERYTHING_FRAME_FRPS)
166    // Restore FPRs. Method and padding is still on the stack.
167    movsd 16(%esp), %xmm0
168    movsd 24(%esp), %xmm1
169    movsd 32(%esp), %xmm2
170    movsd 40(%esp), %xmm3
171    movsd 48(%esp), %xmm4
172    movsd 56(%esp), %xmm5
173    movsd 64(%esp), %xmm6
174    movsd 72(%esp), %xmm7
175END_MACRO
176
177MACRO0(RESTORE_SAVE_EVERYTHING_FRAME_GPRS_EXCEPT_EAX)
178    // Restore core registers (except eax).
179    POP ecx
180    POP edx
181    POP ebx
182    POP ebp
183    POP esi
184    POP edi
185END_MACRO
186
187MACRO0(RESTORE_SAVE_EVERYTHING_FRAME)
188    RESTORE_SAVE_EVERYTHING_FRAME_FRPS
189
190    // Remove save everything callee save method, stack alignment padding and FPRs.
191    DECREASE_FRAME 16 + 8 * 8
192
193    POP eax
194    RESTORE_SAVE_EVERYTHING_FRAME_GPRS_EXCEPT_EAX
195END_MACRO
196
197MACRO0(RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX)
198    RESTORE_SAVE_EVERYTHING_FRAME_FRPS
199
200    // Remove save everything callee save method, stack alignment padding and FPRs, skip EAX.
201    DECREASE_FRAME 16 + 8 * 8 + 4
202
203    RESTORE_SAVE_EVERYTHING_FRAME_GPRS_EXCEPT_EAX
204END_MACRO
205
206MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
207    DEFINE_FUNCTION VAR(c_name)
208    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx      // save all registers as basis for long jump context
209    // Outgoing argument set up
210    INCREASE_FRAME 12                          // alignment padding
211    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
212    CFI_ADJUST_CFA_OFFSET(4)
213    call CALLVAR(cxx_name)                     // cxx_name(Thread*)
214    call SYMBOL(art_quick_do_long_jump)
215    UNREACHABLE
216    END_FUNCTION VAR(c_name)
217END_MACRO
218
219MACRO2(NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING, c_name, cxx_name)
220    DEFINE_FUNCTION VAR(c_name)
221    SETUP_SAVE_EVERYTHING_FRAME ebx            // save all registers as basis for long jump context
222    // Outgoing argument set up
223    INCREASE_FRAME 12                          // alignment padding
224    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
225    CFI_ADJUST_CFA_OFFSET(4)
226    call CALLVAR(cxx_name)                     // cxx_name(Thread*)
227    call SYMBOL(art_quick_do_long_jump)
228    UNREACHABLE
229    END_FUNCTION VAR(c_name)
230END_MACRO
231
232MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
233    DEFINE_FUNCTION VAR(c_name)
234    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx      // save all registers as basis for long jump context
235    // Outgoing argument set up
236    INCREASE_FRAME 8                           // alignment padding
237    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
238    CFI_ADJUST_CFA_OFFSET(4)
239    PUSH eax                                   // pass arg1
240    call CALLVAR(cxx_name)                     // cxx_name(arg1, Thread*)
241    call SYMBOL(art_quick_do_long_jump)
242    UNREACHABLE
243    END_FUNCTION VAR(c_name)
244END_MACRO
245
246MACRO2(TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING, c_name, cxx_name)
247    DEFINE_FUNCTION VAR(c_name)
248    SETUP_SAVE_EVERYTHING_FRAME ebx            // save all registers as basis for long jump context
249    // Outgoing argument set up
250    PUSH eax                                   // alignment padding
251    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
252    CFI_ADJUST_CFA_OFFSET(4)
253    PUSH ecx                                   // pass arg2
254    PUSH eax                                   // pass arg1
255    call CALLVAR(cxx_name)                     // cxx_name(arg1, arg2, Thread*)
256    call SYMBOL(art_quick_do_long_jump)
257    UNREACHABLE
258    END_FUNCTION VAR(c_name)
259END_MACRO
260
261    /*
262     * Called by managed code to create and deliver a NullPointerException.
263     */
264NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
265
266    /*
267     * Call installed by a signal handler to create and deliver a NullPointerException.
268     */
269DEFINE_FUNCTION_CUSTOM_CFA art_quick_throw_null_pointer_exception_from_signal, 2 * __SIZEOF_POINTER__
270    // Fault address and return address were saved by the fault handler.
271    // Save all registers as basis for long jump context; EDI will replace fault address later.
272    SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED ebx
273    // Retrieve fault address and save EDI.
274    movl (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__)(%esp), %eax
275    movl %edi, (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__)(%esp)
276    CFI_REL_OFFSET(%edi, (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__))
277    // Outgoing argument set up
278    INCREASE_FRAME 8                                      // alignment padding
279    pushl %fs:THREAD_SELF_OFFSET                          // pass Thread::Current()
280    CFI_ADJUST_CFA_OFFSET(4)
281    PUSH eax                                              // pass arg1
282    call SYMBOL(artThrowNullPointerExceptionFromSignal)   // (addr, self)
283    call SYMBOL(art_quick_do_long_jump)
284    UNREACHABLE
285END_FUNCTION art_quick_throw_null_pointer_exception_from_signal
286
287    /*
288     * Called by managed code to create and deliver an ArithmeticException.
289     */
290NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_div_zero, artThrowDivZeroFromCode
291
292    /*
293     * Called by managed code to create and deliver a StackOverflowError.
294     */
295NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
296
297    /*
298     * Called by managed code, saves callee saves and then calls artThrowException
299     * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
300     */
301ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
302
303    /*
304     * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
305     * index, arg2 holds limit.
306     */
307TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
308
309    /*
310     * Called by managed code to create and deliver a StringIndexOutOfBoundsException
311     * as if thrown from a call to String.charAt(). Arg1 holds index, arg2 holds limit.
312     */
313TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_string_bounds, artThrowStringBoundsFromCode
314
315    /*
316     * All generated callsites for interface invokes and invocation slow paths will load arguments
317     * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
318     * the method_idx.  This wrapper will save arg1-arg3 and call the appropriate C helper.
319     * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1.
320     *
321     * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting
322     * of the target Method* in r0 and method->code_ in r1.
323     *
324     * If unsuccessful, the helper will return null/null and there will be a pending exception in the
325     * thread and we branch to another stub to deliver it.
326     *
327     * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
328     * pointing back to the original caller.
329     */
330MACRO1(INVOKE_TRAMPOLINE_BODY, cxx_name)
331    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx
332    movl %esp, %edx  // remember SP
333
334    // Outgoing argument set up
335    PUSH edx                      // pass SP
336    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
337    CFI_ADJUST_CFA_OFFSET(4)
338    PUSH ecx                      // pass arg2
339    PUSH eax                      // pass arg1
340    call CALLVAR(cxx_name)        // cxx_name(arg1, arg2, Thread*, SP)
341    movl %edx, %edi               // save code pointer in EDI
342    DECREASE_FRAME 20             // Pop arguments skip eax
343
344    // Restore FPRs.
345    movsd 0(%esp), %xmm0
346    movsd 8(%esp), %xmm1
347    movsd 16(%esp), %xmm2
348    movsd 24(%esp), %xmm3
349
350    // Remove space for FPR args.
351    DECREASE_FRAME 4 * 8
352
353    POP ecx  // Restore args except eax
354    POP edx
355    POP ebx
356    POP ebp  // Restore callee saves
357    POP esi
358    // Swap EDI callee save with code pointer.
359    xchgl %edi, (%esp)
360    testl %eax, %eax              // Branch forward if exception pending.
361    jz    1f
362    // Tail call to intended method.
363    ret
3641:
365    DECREASE_FRAME 4              // Pop code pointer off stack
366    DELIVER_PENDING_EXCEPTION
367END_MACRO
368MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name)
369    DEFINE_FUNCTION VAR(c_name)
370    INVOKE_TRAMPOLINE_BODY RAW_VAR(cxx_name)
371    END_FUNCTION VAR(c_name)
372END_MACRO
373
374INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
375
376INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
377INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
378INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
379INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
380
381    /*
382     * Helper for quick invocation stub to set up XMM registers.
383     * Increments shorty and arg_array and clobbers temp_char.
384     * Branches to finished if it encounters the end of the shorty.
385     */
386MACRO5(LOOP_OVER_SHORTY_LOADING_XMMS, xmm_reg, shorty, arg_array, temp_char, finished)
3871: // LOOP
388    movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
389    addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
390    cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
391    je VAR(finished)                               //   goto finished
392    cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
393    je 2f                                          //   goto FOUND_DOUBLE
394    cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
395    je 3f                                          //   goto FOUND_FLOAT
396    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
397    //  Handle extra space in arg array taken by a long.
398    cmpb MACRO_LITERAL(74), REG_VAR(temp_char)     // if (temp_char != 'J')
399    jne 1b                                         //   goto LOOP
400    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
401    jmp 1b                                         // goto LOOP
4022:  // FOUND_DOUBLE
403    movsd (REG_VAR(arg_array)), REG_VAR(xmm_reg)
404    addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
405    jmp 4f
4063:  // FOUND_FLOAT
407    movss (REG_VAR(arg_array)), REG_VAR(xmm_reg)
408    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
4094:
410END_MACRO
411
412    /*
413     * Helper for quick invocation stub to set up GPR registers.
414     * Increments shorty and arg_array, and returns the current short character in
415     * temp_char. Branches to finished if it encounters the end of the shorty.
416     */
417MACRO4(SKIP_OVER_FLOATS, shorty, arg_array, temp_char, finished)
4181: // LOOP:
419    movb (REG_VAR(shorty)), REG_VAR(temp_char)     // temp_char := *shorty
420    addl MACRO_LITERAL(1), REG_VAR(shorty)         // shorty++
421    cmpb MACRO_LITERAL(0), REG_VAR(temp_char)      // if (temp_char == '\0')
422    je VAR(finished)                               //   goto finished
423    cmpb MACRO_LITERAL(70), REG_VAR(temp_char)     // if (temp_char == 'F')
424    je 3f                                          //   goto SKIP_FLOAT
425    cmpb MACRO_LITERAL(68), REG_VAR(temp_char)     // if (temp_char == 'D')
426    je 4f                                          //   goto SKIP_DOUBLE
427    jmp 5f                                         // goto end
4283:  // SKIP_FLOAT
429    addl MACRO_LITERAL(4), REG_VAR(arg_array)      // arg_array++
430    jmp 1b                                         // goto LOOP
4314:  // SKIP_DOUBLE
432    addl MACRO_LITERAL(8), REG_VAR(arg_array)      // arg_array+=2
433    jmp 1b                                         // goto LOOP
4345:
435END_MACRO
436
437  /*
438     * Quick invocation stub (non-static).
439     * On entry:
440     *   [sp] = return address
441     *   [sp + 4] = method pointer
442     *   [sp + 8] = argument array or null for no argument methods
443     *   [sp + 12] = size of argument array in bytes
444     *   [sp + 16] = (managed) thread pointer
445     *   [sp + 20] = JValue* result
446     *   [sp + 24] = shorty
447     */
448DEFINE_FUNCTION art_quick_invoke_stub
449    // Save the non-volatiles.
450    PUSH ebp                      // save ebp
451    PUSH ebx                      // save ebx
452    PUSH esi                      // save esi
453    PUSH edi                      // save edi
454    // Set up argument XMM registers.
455    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
456    addl LITERAL(1), %esi
457    mov 8+16(%esp), %edi          // EDI := arg_array + 4 ; ie skip this pointer.
458    addl LITERAL(4), %edi
459    // Clobbers ESI, EDI, EAX.
460    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished
461    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished
462    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished
463    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished
464    .balign 16
465.Lxmm_setup_finished:
466    mov %esp, %ebp                // copy value of stack pointer into base pointer
467    CFI_DEF_CFA_REGISTER(ebp)
468    mov 28(%ebp), %ebx            // get arg array size
469    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
470    addl LITERAL(36), %ebx
471    // align frame size to 16 bytes
472    andl LITERAL(0xFFFFFFF0), %ebx
473    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
474    subl %ebx, %esp               // reserve stack space for argument array
475
476    movl LITERAL(0), (%esp)       // store null for method*
477
478    // Copy arg array into stack.
479    movl 28(%ebp), %ecx           // ECX = size of args
480    movl 24(%ebp), %esi           // ESI = argument array
481    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
482    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
483
484    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
485    addl LITERAL(1), %esi
486    mov 24(%ebp), %edi            // EDI := arg_array
487    mov 0(%edi), %ecx             // ECX := this pointer
488    addl LITERAL(4), %edi         // EDI := arg_array + 4 ; ie skip this pointer.
489
490    // Enumerate the possible cases for loading GPRS.
491    // edx (and maybe ebx):
492    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
493    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
494    je .LfirstLong
495    // Must be an integer value.
496    movl (%edi), %edx
497    addl LITERAL(4), %edi         // arg_array++
498
499    // Now check ebx
500    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished
501    // Must be first word of a long, or an integer. First word of long doesn't
502    // go into EBX, but can be loaded there anyways, as it is harmless.
503    movl (%edi), %ebx
504    jmp .Lgpr_setup_finished
505.LfirstLong:
506    movl (%edi), %edx
507    movl 4(%edi), %ebx
508    // Nothing left to load.
509.Lgpr_setup_finished:
510    mov 20(%ebp), %eax            // move method pointer into eax
511    call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
512    mov %ebp, %esp                // restore stack pointer
513    CFI_DEF_CFA_REGISTER(esp)
514    POP edi                       // pop edi
515    POP esi                       // pop esi
516    POP ebx                       // pop ebx
517    POP ebp                       // pop ebp
518    mov 20(%esp), %ecx            // get result pointer
519    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
520    mov %edx, 4(%ecx)             // store the other half of the result
521    mov 24(%esp), %edx            // get the shorty
522    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
523    je .Lreturn_double_quick
524    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
525    je .Lreturn_float_quick
526    ret
527.Lreturn_double_quick:
528    movsd %xmm0, (%ecx)           // store the floating point result
529    ret
530.Lreturn_float_quick:
531    movss %xmm0, (%ecx)           // store the floating point result
532    ret
533END_FUNCTION art_quick_invoke_stub
534
535  /*
536     * Quick invocation stub (static).
537     * On entry:
538     *   [sp] = return address
539     *   [sp + 4] = method pointer
540     *   [sp + 8] = argument array or null for no argument methods
541     *   [sp + 12] = size of argument array in bytes
542     *   [sp + 16] = (managed) thread pointer
543     *   [sp + 20] = JValue* result
544     *   [sp + 24] = shorty
545     */
546DEFINE_FUNCTION art_quick_invoke_static_stub
547    // Save the non-volatiles.
548    PUSH ebp                      // save ebp
549    PUSH ebx                      // save ebx
550    PUSH esi                      // save esi
551    PUSH edi                      // save edi
552    // Set up argument XMM registers.
553    mov 24+16(%esp), %esi         // ESI := shorty + 1  ; ie skip return arg character.
554    addl LITERAL(1), %esi
555    mov 8+16(%esp), %edi          // EDI := arg_array
556    // Clobbers ESI, EDI, EAX.
557    LOOP_OVER_SHORTY_LOADING_XMMS xmm0, esi, edi, al, .Lxmm_setup_finished2
558    LOOP_OVER_SHORTY_LOADING_XMMS xmm1, esi, edi, al, .Lxmm_setup_finished2
559    LOOP_OVER_SHORTY_LOADING_XMMS xmm2, esi, edi, al, .Lxmm_setup_finished2
560    LOOP_OVER_SHORTY_LOADING_XMMS xmm3, esi, edi, al, .Lxmm_setup_finished2
561    .balign 16
562.Lxmm_setup_finished2:
563    mov %esp, %ebp                // copy value of stack pointer into base pointer
564    CFI_DEF_CFA_REGISTER(ebp)
565    mov 28(%ebp), %ebx            // get arg array size
566    // reserve space for return addr, method*, ebx, ebp, esi, and edi in frame
567    addl LITERAL(36), %ebx
568    // align frame size to 16 bytes
569    andl LITERAL(0xFFFFFFF0), %ebx
570    subl LITERAL(20), %ebx        // remove space for return address, ebx, ebp, esi and edi
571    subl %ebx, %esp               // reserve stack space for argument array
572
573    movl LITERAL(0), (%esp)       // store null for method*
574
575    // Copy arg array into stack.
576    movl 28(%ebp), %ecx           // ECX = size of args
577    movl 24(%ebp), %esi           // ESI = argument array
578    leal 4(%esp), %edi            // EDI = just after Method* in stack arguments
579    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
580
581    mov 40(%ebp), %esi            // ESI := shorty + 1  ; ie skip return arg character.
582    addl LITERAL(1), %esi
583    mov 24(%ebp), %edi            // EDI := arg_array
584
585    // Enumerate the possible cases for loading GPRS.
586    // ecx (and maybe edx)
587    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
588    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
589    je .LfirstLong2
590    // Must be an integer value.  Load into ECX.
591    movl (%edi), %ecx
592    addl LITERAL(4), %edi         // arg_array++
593
594    // Now check edx (and maybe ebx).
595    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
596    cmpb LITERAL(74), %al         // if (al == 'J') goto FOUND_LONG
597    je .LSecondLong2
598    // Must be an integer.  Load into EDX.
599    movl (%edi), %edx
600    addl LITERAL(4), %edi         // arg_array++
601
602    // Is there anything for ebx?
603    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
604    // Must be first word of a long, or an integer. First word of long doesn't
605    // go into EBX, but can be loaded there anyways, as it is harmless.
606    movl (%edi), %ebx
607    jmp .Lgpr_setup_finished2
608.LSecondLong2:
609    // EDX:EBX is long.  That is all.
610    movl (%edi), %edx
611    movl 4(%edi), %ebx
612    jmp .Lgpr_setup_finished2
613.LfirstLong2:
614    // ECX:EDX is a long
615    movl (%edi), %ecx
616    movl 4(%edi), %edx
617    addl LITERAL(8), %edi         // arg_array += 2
618
619    // Anything for EBX?
620    SKIP_OVER_FLOATS esi, edi, al, .Lgpr_setup_finished2
621    // Must be first word of a long, or an integer. First word of long doesn't
622    // go into EBX, but can be loaded there anyways, as it is harmless.
623    movl (%edi), %ebx
624    jmp .Lgpr_setup_finished2
625    // Nothing left to load.
626.Lgpr_setup_finished2:
627    mov 20(%ebp), %eax            // move method pointer into eax
628    call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
629    mov %ebp, %esp                // restore stack pointer
630    CFI_DEF_CFA_REGISTER(esp)
631    POP edi                       // pop edi
632    POP esi                       // pop esi
633    POP ebx                       // pop ebx
634    POP ebp                       // pop ebp
635    mov 20(%esp), %ecx            // get result pointer
636    mov %eax, (%ecx)              // store the result assuming its a long, int or Object*
637    mov %edx, 4(%ecx)             // store the other half of the result
638    mov 24(%esp), %edx            // get the shorty
639    cmpb LITERAL(68), (%edx)      // test if result type char == 'D'
640    je .Lreturn_double_quick2
641    cmpb LITERAL(70), (%edx)      // test if result type char == 'F'
642    je .Lreturn_float_quick2
643    ret
644.Lreturn_double_quick2:
645    movsd %xmm0, (%ecx)           // store the floating point result
646    ret
647.Lreturn_float_quick2:
648    movss %xmm0, (%ecx)           // store the floating point result
649    ret
650END_FUNCTION art_quick_invoke_static_stub
651
652    /*
653     * Long jump stub.
654     * Custom calling convention: On entry EAX is the long jump context. This is expected to
655     * be returned from a previous entrypoint call which threw an exception or deoptimized.
656     */
657DEFINE_FUNCTION art_quick_do_long_jump
658#if defined(__APPLE__)
659    int3
660    int3
661#else
662    // Reserve space for the gprs + fprs; add 16-byte stack alignment padding for call.
663    // (Note that the return address plus 3 args below shall take exactly 16 bytes.)
664    INCREASE_FRAME (X86_LONG_JUMP_CONTEXT_SIZE + 15) & ~15
665
666    lea 0(%esp), %esi                               // GPRS
667    lea X86_LONG_JUMP_GPRS_SIZE(%esp), %edx         // FPRS
668
669    PUSH_ARG edx
670    PUSH_ARG esi
671    PUSH_ARG eax
672    call SYMBOL(artContextCopyForLongJump)          // Context* context,
673                                                    // uintptr_t* gprs,
674                                                    // uintptr_t* fprs
675
676    DECREASE_FRAME 8                                // Remove the context and GPRS arguments.
677    POP_ARG edx                                     // Restore FPRS, make ESP point to GPRS.
678
679    // Address base of FPRs.
680    movsd 0(%edx), %xmm0     // Load up XMM0-XMM7.
681    movsd 8(%edx), %xmm1
682    movsd 16(%edx), %xmm2
683    movsd 24(%edx), %xmm3
684    movsd 32(%edx), %xmm4
685    movsd 40(%edx), %xmm5
686    movsd 48(%edx), %xmm6
687    movsd 56(%edx), %xmm7
688    popal            // Load all registers except ESP and EIP with values in gprs.
689    CFI_ADJUST_CFA_OFFSET(-(X86_LONG_JUMP_GPRS_SIZE - /*ESP*/ 4))
690
691    POP_ARG esp      // Load stack pointer.
692    CFI_DEF_CFA(esp, 4)
693    ret              // From higher in the stack pop EIP.
694#endif
695END_FUNCTION art_quick_do_long_jump
696
697MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
698    DEFINE_FUNCTION VAR(c_name)
699    SETUP_SAVE_REFS_ONLY_FRAME ebx               // save ref containing registers for GC
700    // Outgoing argument set up
701    INCREASE_FRAME 8                             // push padding
702    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
703    CFI_ADJUST_CFA_OFFSET(4)
704    PUSH eax                                     // pass arg1
705    call CALLVAR(cxx_name)                       // cxx_name(arg1, Thread*)
706    DECREASE_FRAME 16                            // pop arguments
707    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
708    CALL_MACRO(return_macro)                     // return or deliver exception
709    END_FUNCTION VAR(c_name)
710END_MACRO
711
712MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
713    DEFINE_FUNCTION VAR(c_name)
714    SETUP_SAVE_REFS_ONLY_FRAME ebx               // save ref containing registers for GC
715    // Outgoing argument set up
716    PUSH eax                                     // push padding
717    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
718    CFI_ADJUST_CFA_OFFSET(4)
719    PUSH ecx                                     // pass arg2
720    PUSH eax                                     // pass arg1
721    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, Thread*)
722    DECREASE_FRAME 16                            // pop arguments
723    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
724    CALL_MACRO(return_macro)                     // return or deliver exception
725    END_FUNCTION VAR(c_name)
726END_MACRO
727
728MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
729    DEFINE_FUNCTION VAR(c_name)
730    SETUP_SAVE_REFS_ONLY_FRAME ebx               // save ref containing registers for GC
731    // Outgoing argument set up
732    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
733    CFI_ADJUST_CFA_OFFSET(4)
734    PUSH edx                                     // pass arg3
735    PUSH ecx                                     // pass arg2
736    PUSH eax                                     // pass arg1
737    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, Thread*)
738    DECREASE_FRAME 16                            // pop arguments
739    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
740    CALL_MACRO(return_macro)                     // return or deliver exception
741    END_FUNCTION VAR(c_name)
742END_MACRO
743
744MACRO3(FOUR_ARG_DOWNCALL, c_name, cxx_name, return_macro)
745    DEFINE_FUNCTION VAR(c_name)
746    SETUP_SAVE_REFS_ONLY_FRAME_PRESERVE_TEMP_REG ebx  // save ref containing registers for GC
747
748    // Outgoing argument set up
749    INCREASE_FRAME 12                            // alignment padding
750    pushl %fs:THREAD_SELF_OFFSET                 // pass Thread::Current()
751    CFI_ADJUST_CFA_OFFSET(4)
752    PUSH ebx                                     // pass arg4
753    PUSH edx                                     // pass arg3
754    PUSH ecx                                     // pass arg2
755    PUSH eax                                     // pass arg1
756    call CALLVAR(cxx_name)                       // cxx_name(arg1, arg2, arg3, arg4, Thread*)
757    DECREASE_FRAME 32                            // pop arguments
758    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
759    CALL_MACRO(return_macro)                     // return or deliver exception
760    END_FUNCTION VAR(c_name)
761END_MACRO
762
763    /*
764     * Macro for resolution and initialization of indexed DEX file
765     * constants such as classes and strings.
766     */
767MACRO3(ONE_ARG_SAVE_EVERYTHING_DOWNCALL, c_name, cxx_name, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET)
768    DEFINE_FUNCTION VAR(c_name)
769    SETUP_SAVE_EVERYTHING_FRAME ebx, \runtime_method_offset  // save ref containing registers for GC
770    // Outgoing argument set up
771    INCREASE_FRAME 8                                  // push padding
772    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
773    CFI_ADJUST_CFA_OFFSET(4)
774    PUSH eax                                          // pass the index of the constant as arg1
775    call CALLVAR(cxx_name)                            // cxx_name(arg1, Thread*)
776    DECREASE_FRAME 16                                 // pop arguments
777    testl %eax, %eax                                  // If result is null deliver pending exception
778    jz 1f
779    DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_EAX ebx,  /* is_ref= */1  // Check for deopt
7801:
781    DELIVER_PENDING_EXCEPTION_FRAME_READY
782    END_FUNCTION VAR(c_name)
783END_MACRO
784
785MACRO2(ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT, c_name, cxx_name)
786    ONE_ARG_SAVE_EVERYTHING_DOWNCALL \c_name, \cxx_name, RUNTIME_SAVE_EVERYTHING_FOR_CLINIT_METHOD_OFFSET
787END_MACRO
788
789MACRO0(RETURN_OR_DEOPT_IF_RESULT_IS_NON_NULL_OR_DELIVER)
790    CFI_REMEMBER_STATE
791    testl %eax, %eax                  // eax == 0 ?
792    jz  1f                            // if eax == 0 goto 1
793    DEOPT_OR_RETURN ebx, /*is_ref=*/1 // check if deopt is required
7941:                                    // deliver exception on current thread
795    CFI_RESTORE_STATE_AND_DEF_CFA esp, 4
796    DELIVER_PENDING_EXCEPTION
797END_MACRO
798
799MACRO1(RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION, is_ref = 0)
800    CFI_REMEMBER_STATE
801    cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ?
802    jne 1f                                            // if exception field != 0 goto 1
803    DEOPT_OR_RETURN ebx, \is_ref                      // check if deopt is required
8041:                                                    // deliver exception on current thread
805    CFI_RESTORE_STATE_AND_DEF_CFA esp, 4
806    DELIVER_PENDING_EXCEPTION
807END_MACRO
808
809MACRO0(RETURN_REF_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION)
810    RETURN_OR_DEOPT_OR_DELIVER_PENDING_EXCEPTION /*is_ref=*/1
811END_MACRO
812
813MACRO2(DEOPT_OR_RETURN, temp, is_ref = 0)
814  cmpl LITERAL(0), %fs:THREAD_DEOPT_CHECK_REQUIRED_OFFSET
815  jne 2f
816  ret
8172:
818  SETUP_SAVE_EVERYTHING_FRAME \temp
819  INCREASE_FRAME 4                  // alignment padding
820  pushl MACRO_LITERAL(\is_ref)      // is_ref
821  CFI_ADJUST_CFA_OFFSET(4)
822  PUSH_ARG eax                      // result
823  pushl %fs:THREAD_SELF_OFFSET      // Pass Thread::Current
824  CFI_ADJUST_CFA_OFFSET(4)
825  call SYMBOL(artDeoptimizeIfNeeded)
826  DECREASE_FRAME(16)                // pop arguments
827
828  CFI_REMEMBER_STATE
829  testl %eax, %eax
830  jnz 3f
831
832  RESTORE_SAVE_EVERYTHING_FRAME
833  ret
834
8353:
836  // Deoptimize
837  CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
838  call SYMBOL(art_quick_do_long_jump)
839  UNREACHABLE
840END_MACRO
841
842MACRO2(DEOPT_OR_RESTORE_SAVE_EVERYTHING_FRAME_AND_RETURN_EAX, temp, is_ref = 0)
843  cmpl LITERAL(0), %fs:THREAD_DEOPT_CHECK_REQUIRED_OFFSET
844  CFI_REMEMBER_STATE
845  jne 2f
846  RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX
847  ret
8482:
849  CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
850  movl %eax, SAVE_EVERYTHING_FRAME_EAX_OFFSET(%esp) // update eax in the frame
851  INCREASE_FRAME 4                                  // alignment padding
852  pushl MACRO_LITERAL(\is_ref)                      // is_ref
853  CFI_ADJUST_CFA_OFFSET(4)
854  PUSH_ARG eax                                      // result
855  pushl %fs:THREAD_SELF_OFFSET                      // Pass Thread::Current
856  CFI_ADJUST_CFA_OFFSET(4)
857  call SYMBOL(artDeoptimizeIfNeeded)
858  DECREASE_FRAME(16)                                // pop arguments
859
860  CFI_REMEMBER_STATE
861  testl %eax, %eax
862  jnz 3f
863
864  RESTORE_SAVE_EVERYTHING_FRAME
865  ret
866
8673:
868  // Deoptimize
869  CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
870  call SYMBOL(art_quick_do_long_jump)
871  UNREACHABLE
872END_MACRO
873
874
875MACRO0(RETURN_OR_DEOPT_IF_INT_RESULT_IS_ZERO_OR_DELIVER)
876    CFI_REMEMBER_STATE
877    testl %eax, %eax               // eax == 0 ?
878    jnz  1f                        // if eax != 0 goto 1
879    DEOPT_OR_RETURN ebx            // check if deopt is needed
8801:                                 // deliver exception on current thread
881    CFI_RESTORE_STATE_AND_DEF_CFA esp, 4
882    DELIVER_PENDING_EXCEPTION
883END_MACRO
884
885// Generate the allocation entrypoints for each allocator.
886GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_TLAB_ALLOCATORS
887
888// Comment out allocators that have x86 specific asm.
889// Region TLAB:
890// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB)
891// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab, RegionTLAB)
892GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
893GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_OBJECT(_region_tlab, RegionTLAB)
894// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab, RegionTLAB)
895// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_region_tlab, RegionTLAB)
896// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_region_tlab, RegionTLAB)
897// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_region_tlab, RegionTLAB)
898// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_region_tlab, RegionTLAB)
899GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_region_tlab, RegionTLAB)
900GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_region_tlab, RegionTLAB)
901GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_region_tlab, RegionTLAB)
902// Normal TLAB:
903// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB)
904// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab, TLAB)
905GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB)
906GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_OBJECT(_tlab, TLAB)
907// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab, TLAB)
908// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_tlab, TLAB)
909// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_tlab, TLAB)
910// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_tlab, TLAB)
911// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_tlab, TLAB)
912GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_tlab, TLAB)
913GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_tlab, TLAB)
914GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_tlab, TLAB)
915
916// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc).
917MACRO2(ART_QUICK_ALLOC_OBJECT_ROSALLOC, c_name, cxx_name)
918    DEFINE_FUNCTION VAR(c_name)
919    // Fast path rosalloc allocation.
920    // eax: type/return value
921    // ecx, ebx, edx: free
922    movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
923                                                        // Check if the thread local allocation
924                                                        // stack has room
925    movl THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx), %ecx
926    cmpl THREAD_LOCAL_ALLOC_STACK_END_OFFSET(%ebx), %ecx
927    jae  .Lslow_path\c_name
928
929    movl MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%eax), %ecx  // Load the object size (ecx)
930                                                        // Check if the size is for a thread
931                                                        // local allocation. Also does the
932                                                        // finalizable and initialization check.
933    cmpl LITERAL(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE), %ecx
934    ja   .Lslow_path\c_name
935    shrl LITERAL(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT), %ecx // Calculate the rosalloc bracket index
936                                                            // from object size.
937                                                        // Load thread local rosalloc run (ebx)
938                                                        // Subtract __SIZEOF_POINTER__ to subtract
939                                                        // one from edi as there is no 0 byte run
940                                                        // and the size is already aligned.
941    movl (THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)(%ebx, %ecx, __SIZEOF_POINTER__), %ebx
942                                                        // Load free_list head (edi),
943                                                        // this will be the return value.
944    movl (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%ebx), %ecx
945    jecxz   .Lslow_path\c_name
946                                                        // Point of no slow path. Won't go to
947                                                        // the slow path from here on.
948                                                        // Load the next pointer of the head
949                                                        // and update head of free list with
950                                                        // next pointer
951    movl ROSALLOC_SLOT_NEXT_OFFSET(%ecx), %edx
952    movl %edx, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%ebx)
953                                                        // Decrement size of free list by 1
954    decl (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)(%ebx)
955                                                        // Store the class pointer in the
956                                                        // header. This also overwrites the
957                                                        // next pointer. The offsets are
958                                                        // asserted to match.
959#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
960#error "Class pointer needs to overwrite next pointer."
961#endif
962    POISON_HEAP_REF eax
963    movl %eax, MIRROR_OBJECT_CLASS_OFFSET(%ecx)
964    movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
965                                                        // Push the new object onto the thread
966                                                        // local allocation stack and
967                                                        // increment the thread local
968                                                        // allocation stack top.
969    movl THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx), %eax
970    movl %ecx, (%eax)
971    addl LITERAL(COMPRESSED_REFERENCE_SIZE), %eax
972    movl %eax, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET(%ebx)
973                                                        // No fence needed for x86.
974    movl %ecx, %eax                                     // Move object to return register
975    ret
976.Lslow_path\c_name:
977    SETUP_SAVE_REFS_ONLY_FRAME ebx              // save ref containing registers for GC
978    // Outgoing argument set up
979    INCREASE_FRAME(8)                           // alignment padding
980    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
981    CFI_ADJUST_CFA_OFFSET(4)
982    PUSH eax
983    call SYMBOL(artAllocObjectFromCodeResolvedRosAlloc)  // cxx_name(arg0, Thread*)
984    addl LITERAL(16), %esp                       // pop arguments
985    CFI_ADJUST_CFA_OFFSET(-16)
986    RESTORE_SAVE_REFS_ONLY_FRAME                 // restore frame up to return address
987    RETURN_OR_DEOPT_IF_RESULT_IS_NON_NULL_OR_DELIVER      // return or deliver exception
988    END_FUNCTION VAR(c_name)
989END_MACRO
990
991ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_resolved_rosalloc, artAllocObjectFromCodeResolvedRosAlloc
992ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_initialized_rosalloc, artAllocObjectFromCodeInitializedRosAlloc
993
994// The common fast path code for art_quick_alloc_object_resolved/initialized_tlab
995// and art_quick_alloc_object_resolved/initialized_region_tlab.
996//
997// EAX: type/return_value
998MACRO1(ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH, slowPathLabel)
999    movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
1000    movl THREAD_LOCAL_END_OFFSET(%ebx), %edi            // Load thread_local_end.
1001    subl THREAD_LOCAL_POS_OFFSET(%ebx), %edi            // Compute the remaining buffer size.
1002    movl MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%eax), %ecx  // Load the object size.
1003    cmpl %edi, %ecx                                     // Check if it fits.
1004    ja   VAR(slowPathLabel)
1005    movl THREAD_LOCAL_POS_OFFSET(%ebx), %edx            // Load thread_local_pos
1006                                                        // as allocated object.
1007    addl %edx, %ecx                                     // Add the object size.
1008    movl %ecx, THREAD_LOCAL_POS_OFFSET(%ebx)            // Update thread_local_pos.
1009                                                        // Store the class pointer in the header.
1010                                                        // No fence needed for x86.
1011    POISON_HEAP_REF eax
1012    movl %eax, MIRROR_OBJECT_CLASS_OFFSET(%edx)
1013    movl %edx, %eax
1014    POP edi
1015    ret                                                 // Fast path succeeded.
1016END_MACRO
1017
1018// The common slow path code for art_quick_alloc_object_resolved/initialized_tlab
1019// and art_quick_alloc_object_resolved/initialized_region_tlab.
1020MACRO1(ALLOC_OBJECT_RESOLVED_TLAB_SLOW_PATH, cxx_name)
1021    POP edi
1022    SETUP_SAVE_REFS_ONLY_FRAME ebx                      // save ref containing registers for GC
1023    // Outgoing argument set up
1024    subl LITERAL(8), %esp                               // alignment padding
1025    CFI_ADJUST_CFA_OFFSET(8)
1026    pushl %fs:THREAD_SELF_OFFSET                        // pass Thread::Current()
1027    CFI_ADJUST_CFA_OFFSET(4)
1028    PUSH eax
1029    call CALLVAR(cxx_name)                              // cxx_name(arg0, Thread*)
1030    addl LITERAL(16), %esp
1031    CFI_ADJUST_CFA_OFFSET(-16)
1032    RESTORE_SAVE_REFS_ONLY_FRAME                        // restore frame up to return address
1033    RETURN_OR_DEOPT_IF_RESULT_IS_NON_NULL_OR_DELIVER    // return or deliver exception
1034END_MACRO
1035
1036MACRO2(ART_QUICK_ALLOC_OBJECT_TLAB, c_name, cxx_name)
1037    DEFINE_FUNCTION VAR(c_name)
1038    // Fast path tlab allocation.
1039    // EAX: type
1040    // EBX, ECX, EDX: free.
1041    PUSH edi
1042    CFI_REMEMBER_STATE
1043    ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH .Lslow_path\c_name
1044.Lslow_path\c_name:
1045    CFI_RESTORE_STATE_AND_DEF_CFA esp, 8
1046    ALLOC_OBJECT_RESOLVED_TLAB_SLOW_PATH RAW_VAR(cxx_name)
1047    END_FUNCTION VAR(c_name)
1048END_MACRO
1049
1050ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_tlab, artAllocObjectFromCodeResolvedTLAB
1051ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_tlab, artAllocObjectFromCodeInitializedTLAB
1052ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_region_tlab, artAllocObjectFromCodeResolvedRegionTLAB
1053ART_QUICK_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_region_tlab, artAllocObjectFromCodeInitializedRegionTLAB
1054
1055// The fast path code for art_quick_alloc_array_region_tlab.
1056// Inputs: EAX: the class, ECX: int32_t component_count, EDX: total_size
1057// Free temp: EBX
1058// Output: EAX: return value.
1059MACRO1(ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE, slowPathLabel)
1060    mov %fs:THREAD_SELF_OFFSET, %ebx                          // ebx = thread
1061    // Mask out the unaligned part to make sure we are 8 byte aligned.
1062    andl LITERAL(OBJECT_ALIGNMENT_MASK_TOGGLED), %edx
1063    movl THREAD_LOCAL_END_OFFSET(%ebx), %edi
1064    subl THREAD_LOCAL_POS_OFFSET(%ebx), %edi
1065    cmpl %edi, %edx                                           // Check if it fits.
1066    ja   RAW_VAR(slowPathLabel)
1067    movl THREAD_LOCAL_POS_OFFSET(%ebx), %edi
1068    addl %edi, %edx                                            // Add the object size.
1069    movl %edx, THREAD_LOCAL_POS_OFFSET(%ebx)                   // Update thread_local_pos_
1070                                                               // Store the class pointer in the
1071                                                               // header.
1072                                                               // No fence needed for x86.
1073    POISON_HEAP_REF eax
1074    movl %eax, MIRROR_OBJECT_CLASS_OFFSET(%edi)
1075    movl %ecx, MIRROR_ARRAY_LENGTH_OFFSET(%edi)
1076    movl %edi, %eax
1077    POP edi
1078    ret                                                        // Fast path succeeded.
1079END_MACRO
1080
1081MACRO1(COMPUTE_ARRAY_SIZE_UNKNOWN, slow_path)
1082    // Possibly a large object, go slow.
1083    // Also does negative array size check.
1084    cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_WIDE_ARRAY_DATA_OFFSET) / 8), %ecx
1085    jae RAW_VAR(slow_path)
1086    PUSH ecx
1087    movl %ecx, %edx
1088    movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%eax), %ecx        // Load component type.
1089    UNPOISON_HEAP_REF ecx
1090    movl MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET(%ecx), %ecx // Load primitive type.
1091    shr MACRO_LITERAL(PRIMITIVE_TYPE_SIZE_SHIFT_SHIFT), %ecx        // Get component size shift.
1092    sall %cl, %edx                                              // Calculate array count shifted.
1093    // Add array header + alignment rounding.
1094    add MACRO_LITERAL(MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK), %edx
1095    // Add 4 extra bytes if we are doing a long array.
1096    add MACRO_LITERAL(1), %ecx
1097    and MACRO_LITERAL(4), %ecx
1098#if MIRROR_WIDE_ARRAY_DATA_OFFSET != MIRROR_INT_ARRAY_DATA_OFFSET + 4
1099#error Long array data offset must be 4 greater than int array data offset.
1100#endif
1101    addl %ecx, %edx
1102    POP ecx
1103END_MACRO
1104
1105MACRO1(COMPUTE_ARRAY_SIZE_8, slow_path)
1106    // EAX: mirror::Class* klass, ECX: int32_t component_count
1107    // Possibly a large object, go slow.
1108    // Also does negative array size check.
1109    cmpl LITERAL(MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET), %ecx
1110    jae RAW_VAR(slow_path)
1111    // Add array header + alignment rounding.
1112    leal (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)(%ecx), %edx
1113END_MACRO
1114
1115MACRO1(COMPUTE_ARRAY_SIZE_16, slow_path)
1116    // EAX: mirror::Class* klass, ECX: int32_t component_count
1117    // Possibly a large object, go slow.
1118    // Also does negative array size check.
1119    cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET) / 2), %ecx
1120    jae RAW_VAR(slow_path)
1121    // Add array header + alignment rounding.
1122    leal ((MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) / 2)(%ecx), %edx
1123    sall MACRO_LITERAL(1), %edx
1124END_MACRO
1125
1126MACRO1(COMPUTE_ARRAY_SIZE_32, slow_path)
1127    // EAX: mirror::Class* klass, ECX: int32_t component_count
1128    // Possibly a large object, go slow.
1129    // Also does negative array size check.
1130    cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET) / 4), %ecx
1131    jae RAW_VAR(slow_path)
1132    // Add array header + alignment rounding.
1133    leal ((MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) / 4)(%ecx), %edx
1134    sall MACRO_LITERAL(2), %edx
1135END_MACRO
1136
1137MACRO1(COMPUTE_ARRAY_SIZE_64, slow_path)
1138    // EAX: mirror::Class* klass, ECX: int32_t component_count
1139    // Possibly a large object, go slow.
1140    // Also does negative array size check.
1141    cmpl LITERAL((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_WIDE_ARRAY_DATA_OFFSET) / 8), %ecx
1142    jae RAW_VAR(slow_path)
1143    // Add array header + alignment rounding.
1144    leal ((MIRROR_WIDE_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) / 8)(%ecx), %edx
1145    sall MACRO_LITERAL(3), %edx
1146END_MACRO
1147
1148MACRO3(GENERATE_ALLOC_ARRAY_TLAB, c_entrypoint, cxx_name, size_setup)
1149    DEFINE_FUNCTION VAR(c_entrypoint)
1150    // EAX: mirror::Class* klass, ECX: int32_t component_count
1151    PUSH edi
1152    CALL_MACRO(size_setup) .Lslow_path\c_entrypoint
1153    CFI_REMEMBER_STATE
1154    ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE .Lslow_path\c_entrypoint
1155.Lslow_path\c_entrypoint:
1156    CFI_RESTORE_STATE_AND_DEF_CFA esp, 8
1157    POP edi
1158    SETUP_SAVE_REFS_ONLY_FRAME ebx                      // save ref containing registers for GC
1159    // Outgoing argument set up
1160    PUSH eax                                            // alignment padding
1161    pushl %fs:THREAD_SELF_OFFSET                        // pass Thread::Current()
1162    CFI_ADJUST_CFA_OFFSET(4)
1163    PUSH ecx
1164    PUSH eax
1165    call CALLVAR(cxx_name)                              // cxx_name(arg0, arg1, Thread*)
1166    addl LITERAL(16), %esp                              // pop arguments
1167    CFI_ADJUST_CFA_OFFSET(-16)
1168    RESTORE_SAVE_REFS_ONLY_FRAME                        // restore frame up to return address
1169    RETURN_OR_DEOPT_IF_RESULT_IS_NON_NULL_OR_DELIVER    // return or deliver exception
1170    END_FUNCTION VAR(c_entrypoint)
1171END_MACRO
1172
1173
1174GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN
1175GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_8
1176GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_16
1177GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_32
1178GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_64
1179
1180GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN
1181GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_8
1182GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_16
1183GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_32
1184GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_64
1185
1186ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
1187ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_resolve_type, artResolveTypeFromCode
1188ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_type_and_verify_access, artResolveTypeAndVerifyAccessFromCode
1189ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_method_handle, artResolveMethodHandleFromCode
1190ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_method_type, artResolveMethodTypeFromCode
1191ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
1192
1193TWO_ARG_DOWNCALL art_quick_handle_fill_data, \
1194                 artHandleFillArrayDataFromCode, \
1195                 RETURN_OR_DEOPT_IF_INT_RESULT_IS_ZERO_OR_DELIVER
1196
1197    /*
1198     * Entry from managed code that tries to lock the object in a fast path and
1199     * calls `artLockObjectFromCode()` for the difficult cases, may block for GC.
1200     * EAX holds the possibly null object to lock.
1201     */
1202DEFINE_FUNCTION art_quick_lock_object
1203    testl %eax, %eax
1204    jz   SYMBOL(art_quick_lock_object_no_inline)
1205    movl %eax, %ecx                       // Move obj to a different register.
1206    LOCK_OBJECT_FAST_PATH ecx, edx, /*saved_eax*/ none, .Llock_object_slow
1207.Llock_object_slow:
1208    movl %ecx, %eax                       // Move obj back to EAX.
1209    jmp  SYMBOL(art_quick_lock_object_no_inline)
1210END_FUNCTION art_quick_lock_object
1211
1212    /*
1213     * Entry from managed code that calls `artLockObjectFromCode()`, may block for GC.
1214     * EAX holds the possibly null object to lock.
1215     */
1216DEFINE_FUNCTION art_quick_lock_object_no_inline
1217    // This is also the slow path for art_quick_lock_object.
1218    SETUP_SAVE_REFS_ONLY_FRAME ebx        // save ref containing registers for GC
1219    // Outgoing argument set up
1220    INCREASE_FRAME 8                      // alignment padding
1221    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1222    CFI_ADJUST_CFA_OFFSET(4)
1223    PUSH_ARG eax                          // pass object
1224    call SYMBOL(artLockObjectFromCode)    // artLockObjectFromCode(object, Thread*)
1225    DECREASE_FRAME 16                     // pop arguments
1226    RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
1227    RETURN_OR_DEOPT_IF_INT_RESULT_IS_ZERO_OR_DELIVER
1228END_FUNCTION art_quick_lock_object_no_inline
1229
1230    /*
1231     * Entry from managed code that tries to unlock the object in a fast path and calls
1232     * `artUnlockObjectFromCode()` for the difficult cases and delivers exception on failure.
1233     * EAX holds the possibly null object to unlock.
1234     */
1235DEFINE_FUNCTION art_quick_unlock_object
1236    testl %eax, %eax
1237    jz   SYMBOL(art_quick_unlock_object_no_inline)
1238    movl %eax, %ecx                       // Move obj to a different register.
1239    UNLOCK_OBJECT_FAST_PATH ecx, edx, /*saved_eax*/ none, .Lunlock_object_slow
1240.Lunlock_object_slow:
1241    movl %ecx, %eax                       // Move obj back to EAX.
1242    jmp  SYMBOL(art_quick_unlock_object_no_inline)
1243END_FUNCTION art_quick_unlock_object
1244
1245    /*
1246     * Entry from managed code that calls `artUnlockObjectFromCode()`
1247     * and delivers exception on failure.
1248     * EAX holds the possibly null object to unlock.
1249     */
1250DEFINE_FUNCTION art_quick_unlock_object_no_inline
1251    // This is also the slow path for art_quick_unlock_object.
1252    SETUP_SAVE_REFS_ONLY_FRAME ebx        // save ref containing registers for GC
1253    // Outgoing argument set up
1254    INCREASE_FRAME 8                      // alignment padding
1255    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1256    CFI_ADJUST_CFA_OFFSET(4)
1257    PUSH_ARG eax                          // pass object
1258    call SYMBOL(artUnlockObjectFromCode)  // artUnlockObjectFromCode(object, Thread*)
1259    DECREASE_FRAME 16                     // pop arguments
1260    RESTORE_SAVE_REFS_ONLY_FRAME          // restore frame up to return address
1261    RETURN_OR_DEOPT_IF_INT_RESULT_IS_ZERO_OR_DELIVER
1262END_FUNCTION art_quick_unlock_object_no_inline
1263
1264DEFINE_FUNCTION art_quick_instance_of
1265    PUSH eax                              // alignment padding
1266    PUSH ecx                              // pass arg2 - obj->klass
1267    PUSH eax                              // pass arg1 - checked class
1268    call SYMBOL(artInstanceOfFromCode)    // (Object* obj, Class* ref_klass)
1269    addl LITERAL(12), %esp                // pop arguments
1270    CFI_ADJUST_CFA_OFFSET(-12)
1271    ret
1272END_FUNCTION art_quick_instance_of
1273
1274DEFINE_FUNCTION art_quick_check_instance_of
1275    // Type check using the bit string passes null as the target class. In that case just throw.
1276    testl %ecx, %ecx
1277    jz .Lthrow_class_cast_exception_for_bitstring_check
1278
1279    PUSH eax                              // alignment padding
1280    PUSH ecx                              // pass arg2 - checked class
1281    PUSH eax                              // pass arg1 - obj
1282    call SYMBOL(artInstanceOfFromCode)    // (Object* obj, Class* ref_klass)
1283    testl %eax, %eax
1284    jz .Lthrow_class_cast_exception       // jump forward if not assignable
1285    addl LITERAL(12), %esp                // pop arguments
1286    CFI_ADJUST_CFA_OFFSET(-12)
1287    ret
1288    CFI_ADJUST_CFA_OFFSET(12)             // Reset unwind info so following code unwinds.
1289
1290.Lthrow_class_cast_exception:
1291    POP eax                               // pop arguments
1292    POP ecx
1293    addl LITERAL(4), %esp
1294    CFI_ADJUST_CFA_OFFSET(-4)
1295
1296.Lthrow_class_cast_exception_for_bitstring_check:
1297    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx // save all registers as basis for long jump context
1298    // Outgoing argument set up
1299    PUSH eax                              // alignment padding
1300    pushl %fs:THREAD_SELF_OFFSET          // pass Thread::Current()
1301    CFI_ADJUST_CFA_OFFSET(4)
1302    PUSH ecx                              // pass arg2
1303    PUSH eax                              // pass arg1
1304    call SYMBOL(artThrowClassCastExceptionForObject)  // (Object* src, Class* dest, Thread*)
1305    call SYMBOL(art_quick_do_long_jump)
1306    UNREACHABLE
1307END_FUNCTION art_quick_check_instance_of
1308
1309// Restore reg's value if reg is not the same as exclude_reg, otherwise just adjust stack.
1310MACRO2(POP_REG_NE, reg, exclude_reg)
1311    .ifc RAW_VAR(reg), RAW_VAR(exclude_reg)
1312      DECREASE_FRAME 4
1313    .else
1314      POP RAW_VAR(reg)
1315    .endif
1316END_MACRO
1317
1318DEFINE_FUNCTION art_quick_aput_obj
1319    test %edx, %edx              // store of null
1320    jz .Laput_obj_null
1321    movl MIRROR_OBJECT_CLASS_OFFSET(%eax), %ebx
1322    UNPOISON_HEAP_REF ebx
1323#ifdef USE_READ_BARRIER
1324    cmpl LITERAL(0), %fs:THREAD_IS_GC_MARKING_OFFSET
1325    CFI_REMEMBER_STATE
1326    jnz .Laput_obj_gc_marking
1327#endif  // USE_READ_BARRIER
1328    movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%ebx), %ebx
1329    cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ebx  // Both poisoned if heap poisoning is enabled.
1330    jne .Laput_obj_check_assignability
1331.Laput_obj_store:
1332    POISON_HEAP_REF edx
1333    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1334    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1335    shrl LITERAL(CARD_TABLE_CARD_SHIFT), %eax
1336    movb %dl, (%edx, %eax)
1337    ret
1338
1339.Laput_obj_null:
1340    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)
1341    ret
1342
1343.Laput_obj_check_assignability:
1344    UNPOISON_HEAP_REF ebx         // Unpoison array component type if poisoning is enabled.
1345    PUSH_ARG eax                  // Save `art_quick_aput_obj()` arguments.
1346    PUSH_ARG ecx
1347    PUSH_ARG edx
1348    INCREASE_FRAME 8              // Alignment padding.
1349    // Pass arg2 - type of the value to be stored.
1350#if defined(USE_HEAP_POISONING)
1351    movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
1352    UNPOISON_HEAP_REF eax
1353    PUSH_ARG eax
1354#else
1355    pushl MIRROR_OBJECT_CLASS_OFFSET(%edx)
1356    CFI_ADJUST_CFA_OFFSET(4)
1357#endif
1358.Laput_obj_check_assignability_call:
1359    PUSH_ARG ebx                  // Pass arg1 - component type of the array.
1360    call SYMBOL(artIsAssignableFromCode)  // (Class* a, Class* b)
1361    DECREASE_FRAME 16             // Pop `artIsAssignableFromCode()` arguments
1362    testl %eax, %eax
1363    POP_ARG edx                   // Pop `art_quick_aput_obj()` arguments; flags unaffected.
1364    POP_ARG ecx
1365    POP_ARG eax
1366    jz   .Lthrow_array_store_exception
1367    POISON_HEAP_REF edx
1368    movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%eax, %ecx, 4)  // Do the aput.
1369    movl %fs:THREAD_CARD_TABLE_OFFSET, %edx
1370    shrl LITERAL(CARD_TABLE_CARD_SHIFT), %eax
1371    movb %dl, (%edx, %eax)
1372    ret
1373
1374.Lthrow_array_store_exception:
1375    SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx // Save all registers as basis for long jump context.
1376    // Outgoing argument set up.
1377    PUSH_ARG eax                  // Alignment padding.
1378    PUSH_ARG fs:THREAD_SELF_OFFSET  // Pass Thread::Current()
1379    PUSH_ARG edx                  // Pass arg2 - value.
1380    PUSH_ARG eax                  // Pass arg1 - array.
1381    call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*)
1382    call SYMBOL(art_quick_do_long_jump)
1383    UNREACHABLE
1384
1385#ifdef USE_READ_BARRIER
1386.Laput_obj_gc_marking:
1387    CFI_RESTORE_STATE_AND_DEF_CFA esp, 4
1388    PUSH_ARG eax                  // Save `art_quick_aput_obj()` arguments.
1389    PUSH_ARG ecx                  // We need to align stack for `art_quick_read_barrier_mark_regNN`
1390    PUSH_ARG edx                  // and use a register (EAX) as a temporary for the object class.
1391    call SYMBOL(art_quick_read_barrier_mark_reg03)  // Mark EBX.
1392    movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%ebx), %ebx
1393    UNPOISON_HEAP_REF ebx
1394    call SYMBOL(art_quick_read_barrier_mark_reg03)  // Mark EBX.
1395    movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %eax
1396    UNPOISON_HEAP_REF eax
1397    call SYMBOL(art_quick_read_barrier_mark_reg00)  // Mark EAX.
1398    cmpl %eax, %ebx
1399    CFI_REMEMBER_STATE
1400    jne .Laput_obj_check_assignability_gc_marking
1401    POP_ARG edx                   // Restore `art_quick_aput_obj()` arguments.
1402    POP_ARG ecx
1403    POP_ARG eax
1404    jmp .Laput_obj_store
1405
1406.Laput_obj_check_assignability_gc_marking:
1407    CFI_RESTORE_STATE_AND_DEF_CFA esp, 16
1408    // Prepare arguments in line with `.Laput_obj_check_assignability_call` and jump there.
1409    // (EAX, ECX and EDX were already saved in the right stack slots.)
1410    INCREASE_FRAME 8              // Alignment padding.
1411    PUSH_ARG eax                  // Pass arg2 - type of the value to be stored.
1412    // The arg1 shall be pushed at `.Laput_obj_check_assignability_call`.
1413    jmp .Laput_obj_check_assignability_call
1414#endif  // USE_READ_BARRIER
1415END_FUNCTION art_quick_aput_obj
1416
1417DEFINE_FUNCTION art_quick_memcpy
1418    PUSH edx                      // pass arg3
1419    PUSH ecx                      // pass arg2
1420    PUSH eax                      // pass arg1
1421    // PLT call requires EBX initialized to the $_GLOBAL_OFFSET_TABLE_.
1422    SETUP_PC_REL_BASE_0 ebx
14231:
1424    addl $_GLOBAL_OFFSET_TABLE_ + (1b - 0b), %ebx
1425    call PLT_SYMBOL(memcpy)       // (void*, const void*, size_t)
1426    addl LITERAL(12), %esp        // pop arguments
1427    CFI_ADJUST_CFA_OFFSET(-12)
1428    ret
1429END_FUNCTION art_quick_memcpy
1430
1431DEFINE_FUNCTION art_quick_test_suspend
1432    SETUP_SAVE_EVERYTHING_FRAME ebx, RUNTIME_SAVE_EVERYTHING_FOR_SUSPEND_CHECK_METHOD_OFFSET  // save everything for GC
1433    // Outgoing argument set up
1434    INCREASE_FRAME 12                                 // push padding
1435    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
1436    CFI_ADJUST_CFA_OFFSET(4)
1437    call SYMBOL(artTestSuspendFromCode)               // (Thread*)
1438    DECREASE_FRAME 16                                 // pop arguments
1439
1440    CFI_REMEMBER_STATE
1441    testl %eax, %eax
1442    jnz .Ltest_suspend_deoptimize
1443
1444    RESTORE_SAVE_EVERYTHING_FRAME                     // restore frame up to return address
1445    ret                                               // return
1446
1447.Ltest_suspend_deoptimize:
1448    // Deoptimize
1449    CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
1450    call SYMBOL(art_quick_do_long_jump)
1451    UNREACHABLE
1452END_FUNCTION art_quick_test_suspend
1453
1454DEFINE_FUNCTION art_quick_d2l
1455    subl LITERAL(12), %esp        // alignment padding, room for argument
1456    CFI_ADJUST_CFA_OFFSET(12)
1457    movsd %xmm0, 0(%esp)          // arg a
1458    call SYMBOL(art_d2l)          // (jdouble a)
1459    addl LITERAL(12), %esp        // pop arguments
1460    CFI_ADJUST_CFA_OFFSET(-12)
1461    ret
1462END_FUNCTION art_quick_d2l
1463
1464DEFINE_FUNCTION art_quick_f2l
1465    subl LITERAL(12), %esp        // alignment padding
1466    CFI_ADJUST_CFA_OFFSET(12)
1467    movss %xmm0, 0(%esp)          // arg a
1468    call SYMBOL(art_f2l)          // (jfloat a)
1469    addl LITERAL(12), %esp        // pop arguments
1470    CFI_ADJUST_CFA_OFFSET(-12)
1471    ret
1472END_FUNCTION art_quick_f2l
1473
1474DEFINE_FUNCTION art_quick_ldiv
1475    subl LITERAL(12), %esp        // alignment padding
1476    CFI_ADJUST_CFA_OFFSET(12)
1477    PUSH ebx                      // pass arg4 b.hi
1478    PUSH edx                      // pass arg3 b.lo
1479    PUSH ecx                      // pass arg2 a.hi
1480    PUSH eax                      // pass arg1 a.lo
1481    call SYMBOL(artLdiv)          // (jlong a, jlong b)
1482    addl LITERAL(28), %esp        // pop arguments
1483    CFI_ADJUST_CFA_OFFSET(-28)
1484    ret
1485END_FUNCTION art_quick_ldiv
1486
1487DEFINE_FUNCTION art_quick_lmod
1488    subl LITERAL(12), %esp        // alignment padding
1489    CFI_ADJUST_CFA_OFFSET(12)
1490    PUSH ebx                      // pass arg4 b.hi
1491    PUSH edx                      // pass arg3 b.lo
1492    PUSH ecx                      // pass arg2 a.hi
1493    PUSH eax                      // pass arg1 a.lo
1494    call SYMBOL(artLmod)          // (jlong a, jlong b)
1495    addl LITERAL(28), %esp        // pop arguments
1496    CFI_ADJUST_CFA_OFFSET(-28)
1497    ret
1498END_FUNCTION art_quick_lmod
1499
1500DEFINE_FUNCTION art_quick_lmul
1501    imul %eax, %ebx               // ebx = a.lo(eax) * b.hi(ebx)
1502    imul %edx, %ecx               // ecx = b.lo(edx) * a.hi(ecx)
1503    mul  %edx                     // edx:eax = a.lo(eax) * b.lo(edx)
1504    add  %ebx, %ecx
1505    add  %ecx, %edx               // edx += (a.lo * b.hi) + (b.lo * a.hi)
1506    ret
1507END_FUNCTION art_quick_lmul
1508
1509DEFINE_FUNCTION art_quick_lshl
1510    // ecx:eax << edx
1511    xchg %edx, %ecx
1512    shld %cl,%eax,%edx
1513    shl  %cl,%eax
1514    test LITERAL(32), %cl
1515    jz  1f
1516    mov %eax, %edx
1517    xor %eax, %eax
15181:
1519    ret
1520END_FUNCTION art_quick_lshl
1521
1522DEFINE_FUNCTION art_quick_lshr
1523    // ecx:eax >> edx
1524    xchg %edx, %ecx
1525    shrd %cl,%edx,%eax
1526    sar  %cl,%edx
1527    test LITERAL(32),%cl
1528    jz  1f
1529    mov %edx, %eax
1530    sar LITERAL(31), %edx
15311:
1532    ret
1533END_FUNCTION art_quick_lshr
1534
1535DEFINE_FUNCTION art_quick_lushr
1536    // ecx:eax >>> edx
1537    xchg %edx, %ecx
1538    shrd %cl,%edx,%eax
1539    shr  %cl,%edx
1540    test LITERAL(32),%cl
1541    jz  1f
1542    mov %edx, %eax
1543    xor %edx, %edx
15441:
1545    ret
1546END_FUNCTION art_quick_lushr
1547
1548GENERATE_STATIC_FIELD_GETTERS
1549
1550GENERATE_INSTANCE_FIELD_GETTERS
1551
1552GENERATE_STATIC_FIELD_SETTERS /*emit64=*/0
1553
1554THREE_ARG_DOWNCALL art_quick_set64_static, \
1555                   artSet64StaticFromCompiledCode, \
1556                   RETURN_OR_DEOPT_IF_INT_RESULT_IS_ZERO_OR_DELIVER
1557
1558GENERATE_INSTANCE_FIELD_SETTERS /*emit64=*/0
1559
1560// Call artSet64InstanceFromCode with 4 word size arguments.
1561DEFINE_FUNCTION art_quick_set64_instance
1562    movd %ebx, %xmm0
1563    SETUP_SAVE_REFS_ONLY_FRAME ebx  // save ref containing registers for GC
1564    movd %xmm0, %ebx
1565    // Outgoing argument set up
1566    subl LITERAL(12), %esp         // alignment padding
1567    CFI_ADJUST_CFA_OFFSET(12)
1568    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1569    CFI_ADJUST_CFA_OFFSET(4)
1570    PUSH ebx                      // pass high half of new_val
1571    PUSH edx                      // pass low half of new_val
1572    PUSH ecx                      // pass object
1573    PUSH eax                      // pass field_idx
1574    call SYMBOL(artSet64InstanceFromCompiledCode)  // (field_idx, Object*, new_val, Thread*)
1575    addl LITERAL(32), %esp        // pop arguments
1576    CFI_ADJUST_CFA_OFFSET(-32)
1577    RESTORE_SAVE_REFS_ONLY_FRAME  // restore frame up to return address
1578    RETURN_OR_DEOPT_IF_INT_RESULT_IS_ZERO_OR_DELIVER  // return or deliver exception
1579END_FUNCTION art_quick_set64_instance
1580
1581DEFINE_FUNCTION art_quick_proxy_invoke_handler
1582    SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_EAX
1583    PUSH esp                      // pass SP
1584    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1585    CFI_ADJUST_CFA_OFFSET(4)
1586    PUSH ecx                      // pass receiver
1587    PUSH eax                      // pass proxy method
1588    call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP)
1589    movd %eax, %xmm0              // place return value also into floating point return value
1590    movd %edx, %xmm1
1591    punpckldq %xmm1, %xmm0
1592    addl LITERAL(16 + FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_REFS_ONLY), %esp
1593    CFI_ADJUST_CFA_OFFSET(-(16 + FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_REFS_ONLY))
1594    RESTORE_SAVE_REFS_ONLY_FRAME
1595    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1596END_FUNCTION art_quick_proxy_invoke_handler
1597
1598    /*
1599     * Called to resolve an imt conflict.
1600     * eax is the conflict ArtMethod.
1601     * xmm7 is a hidden argument that holds the target interface method.
1602     *
1603     * Note that this stub writes to eax.
1604     * Because of lack of free registers, it also saves and restores esi.
1605     */
1606DEFINE_FUNCTION art_quick_imt_conflict_trampoline
1607    PUSH ESI
1608    movd %xmm7, %esi            // Get target method index stored in xmm7, remember it in ESI.
1609    movl ART_METHOD_JNI_OFFSET_32(%eax), %eax  // Load ImtConflictTable.
1610.Limt_table_iterate:
1611    cmpl %esi, 0(%eax)
1612    CFI_REMEMBER_STATE
1613    jne .Limt_table_next_entry
1614    // We successfully hit an entry in the table. Load the target method
1615    // and jump to it.
1616    movl __SIZEOF_POINTER__(%eax), %eax
1617    POP ESI
1618    jmp *ART_METHOD_QUICK_CODE_OFFSET_32(%eax)
1619.Limt_table_next_entry:
1620    CFI_RESTORE_STATE_AND_DEF_CFA esp, 8
1621    // If the entry is null, the interface method is not in the ImtConflictTable.
1622    cmpl LITERAL(0), 0(%eax)
1623    jz .Lconflict_trampoline
1624    // Iterate over the entries of the ImtConflictTable.
1625    addl LITERAL(2 * __SIZEOF_POINTER__), %eax
1626    jmp .Limt_table_iterate
1627.Lconflict_trampoline:
1628    // Call the runtime stub to populate the ImtConflictTable and jump to the
1629    // resolved method.
1630    // Pass the interface method in first argument.
1631    movl %esi, %eax
1632    POP ESI
1633    INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
1634END_FUNCTION art_quick_imt_conflict_trampoline
1635
1636DEFINE_FUNCTION art_quick_resolution_trampoline
1637    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx
1638    movl %esp, %edi
1639    PUSH EDI                      // pass SP. do not just PUSH ESP; that messes up unwinding
1640    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1641    CFI_ADJUST_CFA_OFFSET(4)
1642    PUSH ecx                      // pass receiver
1643    PUSH eax                      // pass method
1644    call SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
1645    movl %eax, %edi               // remember code pointer in EDI
1646    addl LITERAL(16), %esp        // pop arguments
1647    CFI_ADJUST_CFA_OFFSET(-16)
1648    test %eax, %eax               // if code pointer is null goto deliver the OOME.
1649    CFI_REMEMBER_STATE
1650    jz 1f
1651    RESTORE_SAVE_REFS_AND_ARGS_FRAME_AND_JUMP
16521:
1653    CFI_RESTORE_STATE_AND_DEF_CFA esp, 64
1654    RESTORE_SAVE_REFS_AND_ARGS_FRAME
1655    DELIVER_PENDING_EXCEPTION
1656END_FUNCTION art_quick_resolution_trampoline
1657
1658DEFINE_FUNCTION art_quick_generic_jni_trampoline
1659    SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_EAX
1660    movl %esp, %ebp               // save SP at callee-save frame
1661    CFI_DEF_CFA_REGISTER(ebp)
1662    subl LITERAL(GENERIC_JNI_TRAMPOLINE_RESERVED_AREA), %esp
1663    // prepare for artQuickGenericJniTrampoline call
1664    // (Thread*, managed_sp, reserved_area)
1665    //   (esp)    4(esp)        8(esp)  <= C calling convention
1666    //  fs:...      ebp           esp   <= where they are
1667
1668    movl %esp, %eax
1669    subl LITERAL(4), %esp         // Padding for 16B alignment.
1670    pushl %eax                    // Pass reserved area.
1671    pushl %ebp                    // Pass managed frame SP.
1672    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1673    call SYMBOL(artQuickGenericJniTrampoline)  // (Thread*, sp)
1674
1675    // The C call will have registered the complete save-frame on success.
1676    // The result of the call is:
1677    //     eax: pointer to native code, 0 on error.
1678    //     The bottom of the reserved area contains values for arg registers,
1679    //     hidden arg register and SP for out args for the call.
1680
1681    // Check for error (class init check or locking for synchronized native method can throw).
1682    test %eax, %eax
1683    jz .Lexception_in_native
1684
1685    // On x86 there are no registers passed, so no native call args to pop here.
1686
1687    // Save code pointer in EDX.
1688    movl %eax, %edx
1689    // Load hidden arg (EAX) for @CriticalNative.
1690    movl 16(%esp), %eax
1691    // Load SP for out args, releasing unneeded reserved area.
1692    movl 20(%esp), %esp
1693
1694    // Native call.
1695    call *%edx
1696
1697    // result sign extension is handled in C code
1698    // prepare for artQuickGenericJniEndTrampoline call
1699    // (Thread*, result, result_f)
1700    //  (esp)    4(esp)  12(esp)    <= C calling convention
1701    //  fs:...  eax:edx   fp0      <= where they are
1702
1703    subl LITERAL(20), %esp        // Padding & pass float result.
1704    fstpl (%esp)
1705    pushl %edx                    // Pass int result.
1706    pushl %eax
1707    pushl %fs:THREAD_SELF_OFFSET  // Pass Thread::Current().
1708    call SYMBOL(artQuickGenericJniEndTrampoline)
1709
1710    // Pending exceptions possible.
1711    mov %fs:THREAD_EXCEPTION_OFFSET, %ebx
1712    testl %ebx, %ebx
1713    jnz .Lexception_in_native
1714
1715    // Tear down the alloca.
1716    movl %ebp, %esp
1717
1718    // Quick expects the return value to be in xmm0.
1719    movd %eax, %xmm0
1720    movd %edx, %xmm1
1721    punpckldq %xmm1, %xmm0
1722
1723    LOAD_RUNTIME_INSTANCE ebx
1724    cmpb MACRO_LITERAL(0), RUN_EXIT_HOOKS_OFFSET_FROM_RUNTIME_INSTANCE(%ebx)
1725    CFI_REMEMBER_STATE
1726    jne .Lcall_method_exit_hook
1727.Lcall_method_exit_hook_done:
1728
1729    // Tear down the callee-save frame.
1730    CFI_DEF_CFA_REGISTER(esp)
1731    // Remove space for the method, FPR and GPR args
1732    DECREASE_FRAME 4 + 4 * 8 + 3*4
1733    // Restore callee saves and return.
1734    POP ebp
1735    POP esi
1736    POP edi
1737    ret
1738
1739.Lcall_method_exit_hook:
1740    CFI_RESTORE_STATE_AND_DEF_CFA ebp, 64
1741    movl LITERAL(FRAME_SIZE_SAVE_REFS_AND_ARGS), %ebx
1742    call art_quick_method_exit_hook
1743    jmp .Lcall_method_exit_hook_done
1744
1745.Lexception_in_native:
1746    pushl %fs:THREAD_TOP_QUICK_FRAME_OFFSET
1747    addl LITERAL(-1), (%esp)  // Remove the GenericJNI tag.
1748    movl (%esp), %esp
1749    call art_deliver_pending_exception
1750END_FUNCTION art_quick_generic_jni_trampoline
1751
1752DEFINE_FUNCTION art_deliver_pending_exception
1753    // This will create a new save-all frame, required by the runtime.
1754    DELIVER_PENDING_EXCEPTION
1755END_FUNCTION art_deliver_pending_exception
1756
1757DEFINE_FUNCTION art_quick_to_interpreter_bridge
1758    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx  // save frame
1759    mov %esp, %edx                // remember SP
1760    PUSH eax                      // alignment padding
1761    PUSH edx                      // pass SP
1762    pushl %fs:THREAD_SELF_OFFSET  // pass Thread::Current()
1763    CFI_ADJUST_CFA_OFFSET(4)
1764    PUSH eax                      // pass  method
1765    call SYMBOL(artQuickToInterpreterBridge)  // (method, Thread*, SP)
1766    addl LITERAL(16), %esp        // pop arguments
1767    CFI_ADJUST_CFA_OFFSET(-16)
1768
1769    // Return eax:edx in xmm0 also.
1770    movd %eax, %xmm0
1771    movd %edx, %xmm1
1772    punpckldq %xmm1, %xmm0
1773
1774    addl LITERAL(48), %esp        // Remove FPRs and EAX, ECX, EDX, EBX.
1775    CFI_ADJUST_CFA_OFFSET(-48)
1776
1777    POP ebp                       // Restore callee saves
1778    POP esi
1779    POP edi
1780
1781    RETURN_OR_DELIVER_PENDING_EXCEPTION    // return or deliver exception
1782END_FUNCTION art_quick_to_interpreter_bridge
1783
1784    /*
1785     * Called by managed code, saves callee saves and then calls artInvokeObsoleteMethod
1786     */
1787ONE_ARG_RUNTIME_EXCEPTION art_invoke_obsolete_method_stub, artInvokeObsoleteMethod
1788
1789    /*
1790     * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
1791     * will long jump to the interpreter bridge.
1792     */
1793DEFINE_FUNCTION art_quick_deoptimize_from_compiled_code
1794    SETUP_SAVE_EVERYTHING_FRAME ebx
1795    subl LITERAL(8), %esp                       // Align stack.
1796    CFI_ADJUST_CFA_OFFSET(8)
1797    pushl %fs:THREAD_SELF_OFFSET                // Pass Thread::Current().
1798    CFI_ADJUST_CFA_OFFSET(4)
1799    PUSH eax
1800    call SYMBOL(artDeoptimizeFromCompiledCode)  // (DeoptimizationKind, Thread*)
1801    call SYMBOL(art_quick_do_long_jump)
1802    UNREACHABLE
1803END_FUNCTION art_quick_deoptimize_from_compiled_code
1804
1805    /*
1806     * String's compareTo.
1807     *
1808     * On entry:
1809     *    eax:   this string object (known non-null)
1810     *    ecx:   comp string object (known non-null)
1811     */
1812DEFINE_FUNCTION art_quick_string_compareto
1813    PUSH esi                      // push callee save reg
1814    PUSH edi                      // push callee save reg
1815    mov MIRROR_STRING_COUNT_OFFSET(%eax), %edx
1816    mov MIRROR_STRING_COUNT_OFFSET(%ecx), %ebx
1817    lea MIRROR_STRING_VALUE_OFFSET(%eax), %esi
1818    lea MIRROR_STRING_VALUE_OFFSET(%ecx), %edi
1819#if (STRING_COMPRESSION_FEATURE)
1820    /* Differ cases */
1821    shrl    LITERAL(1), %edx
1822    jnc     .Lstring_compareto_this_is_compressed
1823    shrl    LITERAL(1), %ebx
1824    jnc     .Lstring_compareto_that_is_compressed
1825    jmp     .Lstring_compareto_both_not_compressed
1826.Lstring_compareto_this_is_compressed:
1827    shrl    LITERAL(1), %ebx
1828    jnc     .Lstring_compareto_both_compressed
1829    /* If (this->IsCompressed() && that->IsCompressed() == false) */
1830    mov     %edx, %eax
1831    subl    %ebx, %eax
1832    mov     %edx, %ecx
1833    cmovg   %ebx, %ecx
1834    /* Going into loop to compare each character */
1835    jecxz   .Lstring_compareto_keep_length            // check loop counter (if 0, don't compare)
1836.Lstring_compareto_loop_comparison_this_compressed:
1837    movzbl  (%esi), %edx                              // move *(this_cur_char) byte to long
1838    movzwl  (%edi), %ebx                              // move *(that_cur_char) word to long
1839    addl    LITERAL(1), %esi                          // ++this_cur_char (8-bit)
1840    addl    LITERAL(2), %edi                          // ++that_cur_char (16-bit)
1841    subl    %ebx, %edx
1842    loope   .Lstring_compareto_loop_comparison_this_compressed
1843    cmovne  %edx, %eax                        // return eax = *(this_cur_char) - *(that_cur_char)
1844    jmp     .Lstring_compareto_return
1845.Lstring_compareto_that_is_compressed:
1846    mov     %edx, %eax
1847    subl    %ebx, %eax
1848    mov     %edx, %ecx
1849    cmovg   %ebx, %ecx
1850    /* If (this->IsCompressed() == false && that->IsCompressed()) */
1851    jecxz   .Lstring_compareto_keep_length            // check loop counter, if 0, don't compare
1852.Lstring_compareto_loop_comparison_that_compressed:
1853    movzwl  (%esi), %edx                              // move *(this_cur_char) word to long
1854    movzbl  (%edi), %ebx                              // move *(that_cur_char) byte to long
1855    addl    LITERAL(2), %esi                          // ++this_cur_char (16-bit)
1856    addl    LITERAL(1), %edi                          // ++that_cur_char (8-bit)
1857    subl    %ebx, %edx
1858    loope   .Lstring_compareto_loop_comparison_that_compressed
1859    cmovne  %edx, %eax
1860    jmp     .Lstring_compareto_return         // return eax = *(this_cur_char) - *(that_cur_char)
1861.Lstring_compareto_both_compressed:
1862    /* Calculate min length and count diff */
1863    mov     %edx, %ecx
1864    mov     %edx, %eax
1865    subl    %ebx, %eax
1866    cmovg   %ebx, %ecx
1867    jecxz   .Lstring_compareto_keep_length
1868    repe    cmpsb
1869    je      .Lstring_compareto_keep_length
1870    movzbl  -1(%esi), %eax        // get last compared char from this string (8-bit)
1871    movzbl  -1(%edi), %ecx        // get last compared char from comp string (8-bit)
1872    jmp     .Lstring_compareto_count_difference
1873#endif // STRING_COMPRESSION_FEATURE
1874.Lstring_compareto_both_not_compressed:
1875    /* Calculate min length and count diff */
1876    mov     %edx, %ecx
1877    mov     %edx, %eax
1878    subl    %ebx, %eax
1879    cmovg   %ebx, %ecx
1880    /*
1881     * At this point we have:
1882     *   eax: value to return if first part of strings are equal
1883     *   ecx: minimum among the lengths of the two strings
1884     *   esi: pointer to this string data
1885     *   edi: pointer to comp string data
1886     */
1887    jecxz .Lstring_compareto_keep_length
1888    repe  cmpsw                   // find nonmatching chars in [%esi] and [%edi], up to length %ecx
1889    je    .Lstring_compareto_keep_length
1890    movzwl  -2(%esi), %eax        // get last compared char from this string (16-bit)
1891    movzwl  -2(%edi), %ecx        // get last compared char from comp string (16-bit)
1892.Lstring_compareto_count_difference:
1893    subl    %ecx, %eax
1894.Lstring_compareto_keep_length:
1895.Lstring_compareto_return:
1896    POP edi                       // pop callee save reg
1897    POP esi                       // pop callee save reg
1898    ret
1899END_FUNCTION art_quick_string_compareto
1900
1901DEFINE_FUNCTION art_quick_string_builder_append
1902    SETUP_SAVE_REFS_ONLY_FRAME ebx            // save ref containing registers for GC
1903    // Outgoing argument set up
1904    leal FRAME_SIZE_SAVE_REFS_ONLY + __SIZEOF_POINTER__(%esp), %edi  // prepare args
1905    push %eax                                         // push padding
1906    CFI_ADJUST_CFA_OFFSET(4)
1907    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
1908    CFI_ADJUST_CFA_OFFSET(4)
1909    push %edi                                         // pass args
1910    CFI_ADJUST_CFA_OFFSET(4)
1911    push %eax                                         // pass format
1912    CFI_ADJUST_CFA_OFFSET(4)
1913    call SYMBOL(artStringBuilderAppend)               // (uint32_t, const unit32_t*, Thread*)
1914    DECREASE_FRAME 16                                 // pop arguments
1915    RESTORE_SAVE_REFS_ONLY_FRAME                      // restore frame up to return address
1916    RETURN_OR_DEOPT_IF_RESULT_IS_NON_NULL_OR_DELIVER  // return or deliver exception
1917END_FUNCTION art_quick_string_builder_append
1918
1919// Create a function `name` calling the ReadBarrier::Mark routine,
1920// getting its argument and returning its result through register
1921// `reg`, saving and restoring all caller-save registers.
1922//
1923// The generated function follows a non-standard runtime calling convention:
1924// - register `reg` (which may differ from EAX) is used to pass the (sole) argument,
1925// - register `reg` (which may differ from EAX) is used to return the result,
1926// - all other registers are callee-save (the values they hold are preserved).
1927MACRO2(READ_BARRIER_MARK_REG, name, reg)
1928    DEFINE_FUNCTION VAR(name)
1929    // Null check so that we can load the lock word.
1930    test REG_VAR(reg), REG_VAR(reg)
1931    jz .Lret_rb_\name
1932.Lnot_null_\name:
1933    // Check the mark bit, if it is 1 return.
1934    testl LITERAL(LOCK_WORD_MARK_BIT_MASK_SHIFTED), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(reg))
1935    jz .Lslow_rb_\name
1936    ret
1937.Lslow_rb_\name:
1938    PUSH eax
1939    mov MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(reg)), %eax
1940    add LITERAL(LOCK_WORD_STATE_FORWARDING_ADDRESS_OVERFLOW), %eax
1941    // Jump if overflow, the only case where it overflows should be the forwarding address one.
1942    // Taken ~25% of the time.
1943    CFI_REMEMBER_STATE
1944    jnae .Lret_forwarding_address\name
1945
1946    // Save all potentially live caller-save core registers.
1947    mov 0(%esp), %eax
1948    PUSH ecx
1949    PUSH edx
1950    PUSH ebx
1951    // 8-byte align the stack to improve (8-byte) XMM register saving and restoring.
1952    // and create space for caller-save floating-point registers.
1953    INCREASE_FRAME 4 + 8 * 8
1954    // Save all potentially live caller-save floating-point registers.
1955    movsd %xmm0, 0(%esp)
1956    movsd %xmm1, 8(%esp)
1957    movsd %xmm2, 16(%esp)
1958    movsd %xmm3, 24(%esp)
1959    movsd %xmm4, 32(%esp)
1960    movsd %xmm5, 40(%esp)
1961    movsd %xmm6, 48(%esp)
1962    movsd %xmm7, 56(%esp)
1963
1964    subl LITERAL(4), %esp            // alignment padding
1965    CFI_ADJUST_CFA_OFFSET(4)
1966    PUSH RAW_VAR(reg)                // pass arg1 - obj from `reg`
1967    call SYMBOL(artReadBarrierMark)  // artReadBarrierMark(obj)
1968    .ifnc RAW_VAR(reg), eax
1969      movl %eax, REG_VAR(reg)        // return result into `reg`
1970    .endif
1971    addl LITERAL(8), %esp            // pop argument and remove padding
1972    CFI_ADJUST_CFA_OFFSET(-8)
1973
1974    // Restore floating-point registers.
1975    movsd 0(%esp), %xmm0
1976    movsd 8(%esp), %xmm1
1977    movsd 16(%esp), %xmm2
1978    movsd 24(%esp), %xmm3
1979    movsd 32(%esp), %xmm4
1980    movsd 40(%esp), %xmm5
1981    movsd 48(%esp), %xmm6
1982    movsd 56(%esp), %xmm7
1983    // Remove floating-point registers and padding.
1984    DECREASE_FRAME 8 * 8 + 4
1985    // Restore core regs, except `reg`, as it is used to return the
1986    // result of this function (simply remove it from the stack instead).
1987    POP_REG_NE ebx, RAW_VAR(reg)
1988    POP_REG_NE edx, RAW_VAR(reg)
1989    POP_REG_NE ecx, RAW_VAR(reg)
1990    POP_REG_NE eax, RAW_VAR(reg)
1991.Lret_rb_\name:
1992    ret
1993.Lret_forwarding_address\name:
1994    CFI_RESTORE_STATE_AND_DEF_CFA esp, 8
1995    // The overflow cleared the top bits.
1996    sall LITERAL(LOCK_WORD_STATE_FORWARDING_ADDRESS_SHIFT), %eax
1997    mov %eax, REG_VAR(reg)
1998    POP_REG_NE eax, RAW_VAR(reg)
1999    ret
2000    END_FUNCTION VAR(name)
2001END_MACRO
2002
2003READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg00, eax
2004READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg01, ecx
2005READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg02, edx
2006READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg03, ebx
2007READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg05, ebp
2008// Note: There is no art_quick_read_barrier_mark_reg04, as register 4 (ESP)
2009// cannot be used to pass arguments.
2010READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg06, esi
2011READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg07, edi
2012
2013DEFINE_FUNCTION art_quick_read_barrier_slow
2014    PUSH edx                         // pass arg3 - offset
2015    PUSH ecx                         // pass arg2 - obj
2016    PUSH eax                         // pass arg1 - ref
2017    call SYMBOL(artReadBarrierSlow)  // artReadBarrierSlow(ref, obj, offset)
2018    addl LITERAL(12), %esp           // pop arguments
2019    CFI_ADJUST_CFA_OFFSET(-12)
2020    ret
2021END_FUNCTION art_quick_read_barrier_slow
2022
2023DEFINE_FUNCTION art_quick_read_barrier_for_root_slow
2024    subl LITERAL(8), %esp                   // alignment padding
2025    CFI_ADJUST_CFA_OFFSET(8)
2026    PUSH eax                                // pass arg1 - root
2027    call SYMBOL(artReadBarrierForRootSlow)  // artReadBarrierForRootSlow(root)
2028    addl LITERAL(12), %esp                  // pop argument and remove padding
2029    CFI_ADJUST_CFA_OFFSET(-12)
2030    ret
2031END_FUNCTION art_quick_read_barrier_for_root_slow
2032
2033  /*
2034     * On stack replacement stub.
2035     * On entry:
2036     *   [sp] = return address
2037     *   [sp + 4] = stack to copy
2038     *   [sp + 8] = size of stack
2039     *   [sp + 12] = pc to call
2040     *   [sp + 16] = JValue* result
2041     *   [sp + 20] = shorty
2042     *   [sp + 24] = thread
2043     */
2044DEFINE_FUNCTION art_quick_osr_stub
2045    // Save native callee saves.
2046    PUSH ebp
2047    PUSH ebx
2048    PUSH esi
2049    PUSH edi
2050    SAVE_SIZE=20                   // 4 registers and the return address
2051    mov 4+16(%esp), %esi           // ESI = argument array
2052    mov 8+16(%esp), %ecx           // ECX = size of args
2053    mov 12+16(%esp), %ebx          // EBX = pc to call
2054    mov %esp, %ebp                 // Save stack pointer
2055    CFI_DEF_CFA(ebp, SAVE_SIZE)    // CFA = ebp + SAVE_SIZE
2056    CFI_REMEMBER_STATE
2057    andl LITERAL(0xFFFFFFF0), %esp // Align stack
2058    pushl %ebp                     // Save old stack pointer
2059    subl LITERAL(12), %esp         // Align stack
2060    movl LITERAL(0), (%esp)        // Store null for ArtMethod* slot
2061    // ebp isn't properly spilled in the osr method, so we need use DWARF expression.
2062    // NB: the CFI must be before the call since this is the address gdb will lookup.
2063    // NB: gdb expects that cfa_expression returns the CFA value (not address to it).
2064    CFI_ESCAPE(                    /* cfa = [sp + 12] + SAVE_SIZE */ \
2065      0x0f, 6,                     /* DW_CFA_def_cfa_expression(len) */ \
2066      0x92, 4, 12,                 /* DW_OP_bregx(reg,offset) */ \
2067      0x06,                        /* DW_OP_deref */ \
2068      0x23, SAVE_SIZE)             /* DW_OP_plus_uconst(val) */
2069    call .Losr_entry
2070    mov 12(%esp), %esp             // Restore stack pointer.
2071    CFI_DEF_CFA(esp, SAVE_SIZE)    // CFA = esp + SAVE_SIZE
2072
2073    // Restore callee saves.
2074    POP edi
2075    POP esi
2076    POP ebx
2077    POP ebp
2078    mov 16(%esp), %ecx            // Get JValue result
2079    mov %eax, (%ecx)              // Store the result.
2080    mov %edx, 4(%ecx)             // Store the other half of the result.
2081    ret
2082.Losr_entry:
2083    CFI_RESTORE_STATE_AND_DEF_CFA ebp, SAVE_SIZE   // CFA = ebp + SAVE_SIZE
2084    subl LITERAL(4), %ecx         // Given stack size contains pushed frame pointer, substract it.
2085    subl %ecx, %esp
2086    mov %esp, %edi                // EDI = beginning of stack
2087    rep movsb                     // while (ecx--) { *edi++ = *esi++ }
2088    jmp *%ebx
2089END_FUNCTION art_quick_osr_stub
2090
2091DEFINE_FUNCTION art_quick_invoke_polymorphic
2092                                                   // On entry: EAX := unused, ECX := receiver
2093    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx             // Save frame.
2094    mov %esp, %edx                                 // Remember SP
2095    sub LITERAL(4), %esp                           // Alignment padding
2096    CFI_ADJUST_CFA_OFFSET(4)
2097    push %edx                                      // Push SP
2098    CFI_ADJUST_CFA_OFFSET(4)
2099    pushl %fs:THREAD_SELF_OFFSET                   // Push Thread::Current()
2100    CFI_ADJUST_CFA_OFFSET(4)
2101    push %ecx                                      // Push receiver (method handle)
2102    CFI_ADJUST_CFA_OFFSET(4)
2103    call SYMBOL(artInvokePolymorphic)              // invoke with (receiver, thread, SP)
2104    addl LITERAL(16), %esp                         // Pop arguments.
2105    CFI_ADJUST_CFA_OFFSET(-16)
2106    mov %eax, 4(%esp)                              // Result is in EAX:EDX. Copy to saved FP state.
2107    mov %edx, 8(%esp)
2108    mov %edx, 40(%esp)                             // Copy EDX to saved context
2109    RESTORE_SAVE_REFS_AND_ARGS_FRAME
2110    RETURN_OR_DELIVER_PENDING_EXCEPTION
2111END_FUNCTION art_quick_invoke_polymorphic
2112
2113DEFINE_FUNCTION art_quick_invoke_custom
2114    SETUP_SAVE_REFS_AND_ARGS_FRAME ebx             // Save frame.
2115                                                   // EAX := call_site_index
2116    mov %esp, %ecx                                 // Remember SP.
2117    subl LITERAL(4), %esp                          // Alignment padding.
2118    CFI_ADJUST_CFA_OFFSET(4)
2119    push %ecx                                      // pass SP
2120    CFI_ADJUST_CFA_OFFSET(4)
2121    pushl %fs:THREAD_SELF_OFFSET                   // pass Thread::Current()
2122    CFI_ADJUST_CFA_OFFSET(4)
2123    push %eax                                      // pass call_site_index
2124    CFI_ADJUST_CFA_OFFSET(4)
2125    call SYMBOL(artInvokeCustom)                   // artInvokeCustom(call_site_index, Thread*, SP)
2126    addl LITERAL(16), %esp                         // Pop arguments.
2127    CFI_ADJUST_CFA_OFFSET(-16)
2128    mov %eax, 4(%esp)                              // Result is in EAX:EDX. Copy to saved FP state.
2129    mov %edx, 8(%esp)
2130    mov %edx, 40(%esp)                             // Copy EDX to saved context
2131    RESTORE_SAVE_REFS_AND_ARGS_FRAME
2132    RETURN_OR_DELIVER_PENDING_EXCEPTION
2133END_FUNCTION art_quick_invoke_custom
2134
2135// On entry: eax is the class, ebp is the inline cache.
2136DEFINE_FUNCTION art_quick_update_inline_cache
2137#if (INLINE_CACHE_SIZE != 5)
2138#error "INLINE_CACHE_SIZE not as expected."
2139#endif
2140    // Don't update the cache if we are marking.
2141    cmpl LITERAL(0), %fs:THREAD_IS_GC_MARKING_OFFSET
2142    jnz .Lret
2143    PUSH ecx
2144    movl %eax, %ecx // eax will be used for cmpxchg
2145.Lentry1:
2146    movl INLINE_CACHE_CLASSES_OFFSET(%ebp), %eax
2147    cmpl %ecx, %eax
2148    je .Ldone
2149    testl %eax, %eax
2150    jnz .Lentry2
2151    lock cmpxchg %ecx, INLINE_CACHE_CLASSES_OFFSET(%ebp)
2152    jz .Ldone
2153    jmp .Lentry1
2154.Lentry2:
2155    movl (INLINE_CACHE_CLASSES_OFFSET+4)(%ebp), %eax
2156    cmpl %ecx, %eax
2157    je .Ldone
2158    testl %eax, %eax
2159    jnz .Lentry3
2160    lock cmpxchg %ecx, (INLINE_CACHE_CLASSES_OFFSET+4)(%ebp)
2161    jz .Ldone
2162    jmp .Lentry2
2163.Lentry3:
2164    movl (INLINE_CACHE_CLASSES_OFFSET+8)(%ebp), %eax
2165    cmpl %ecx, %eax
2166    je .Ldone
2167    testl %eax, %eax
2168    jnz .Lentry4
2169    lock cmpxchg %ecx, (INLINE_CACHE_CLASSES_OFFSET+8)(%ebp)
2170    jz .Ldone
2171    jmp .Lentry3
2172.Lentry4:
2173    movl (INLINE_CACHE_CLASSES_OFFSET+12)(%ebp), %eax
2174    cmpl %ecx, %eax
2175    je .Ldone
2176    testl %eax, %eax
2177    jnz .Lentry5
2178    lock cmpxchg %ecx, (INLINE_CACHE_CLASSES_OFFSET+12)(%ebp)
2179    jz .Ldone
2180    jmp .Lentry4
2181.Lentry5:
2182    // Unconditionally store, the cache is megamorphic.
2183    movl %ecx, (INLINE_CACHE_CLASSES_OFFSET+16)(%ebp)
2184.Ldone:
2185    // Restore registers
2186    movl %ecx, %eax
2187    POP ecx
2188.Lret:
2189    ret
2190END_FUNCTION art_quick_update_inline_cache
2191
2192    // TODO: implement these!
2193UNIMPLEMENTED art_quick_memcmp16
2194
2195// On entry, the method is at the bottom of the stack.
2196DEFINE_FUNCTION art_quick_compile_optimized
2197    SETUP_SAVE_EVERYTHING_FRAME ebx
2198    mov FRAME_SIZE_SAVE_EVERYTHING(%esp), %eax // Fetch ArtMethod
2199    sub LITERAL(8), %esp   		       // Alignment padding
2200    CFI_ADJUST_CFA_OFFSET(8)
2201    pushl %fs:THREAD_SELF_OFFSET               // pass Thread::Current()
2202    CFI_ADJUST_CFA_OFFSET(4)
2203    pushl %eax
2204    CFI_ADJUST_CFA_OFFSET(4)
2205    call SYMBOL(artCompileOptimized)           // (ArtMethod*, Thread*)
2206    addl LITERAL(16), %esp                     // Pop arguments.
2207    CFI_ADJUST_CFA_OFFSET(-16)
2208    RESTORE_SAVE_EVERYTHING_FRAME
2209    ret
2210END_FUNCTION art_quick_compile_optimized
2211
2212DEFINE_FUNCTION art_quick_method_entry_hook
2213    SETUP_SAVE_EVERYTHING_FRAME edx
2214    mov FRAME_SIZE_SAVE_EVERYTHING(%esp), %eax // Fetch ArtMethod
2215    mov %esp, %edx  // Store esp before pushing anything on stack.
2216    subl LITERAL(4), %esp
2217    CFI_ADJUST_CFA_OFFSET(4)
2218
2219    push %edx                       // Pass SP
2220    CFI_ADJUST_CFA_OFFSET(4)
2221    pushl %fs:THREAD_SELF_OFFSET    // Pass Thread::Current().
2222    CFI_ADJUST_CFA_OFFSET(4)
2223    pushl %eax                      // Pass Method*.
2224    CFI_ADJUST_CFA_OFFSET(4)
2225
2226    call SYMBOL(artMethodEntryHook) // (Method*, Thread*, SP)
2227
2228    addl LITERAL(16), %esp          // Pop arguments.
2229    CFI_ADJUST_CFA_OFFSET(-16)
2230
2231    CFI_REMEMBER_STATE
2232    testl %eax, %eax
2233    jnz .Lentryhook_deopt
2234
2235    // Normal return.
2236    RESTORE_SAVE_EVERYTHING_FRAME
2237    ret
2238
2239.Lentryhook_deopt:
2240    // Deoptimize.
2241    CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
2242    call SYMBOL(art_quick_do_long_jump)
2243    UNREACHABLE
2244END_FUNCTION art_quick_method_entry_hook
2245
2246DEFINE_FUNCTION art_quick_method_exit_hook
2247    PUSH edi
2248    SETUP_SAVE_EVERYTHING_FRAME_EDI_SAVED edi
2249
2250    leal FRAME_SIZE_SAVE_EVERYTHING(%esp), %edi // Remember ArtMethod**
2251    subl LITERAL(4), %esp                       // Align stack.
2252    CFI_ADJUST_CFA_OFFSET(4)
2253
2254    PUSH_ARG edx                   // Save gpr return value. edx and eax need to be together
2255                                   // which isn't the case in kSaveEverything frame.
2256    PUSH_ARG eax
2257    movl %esp, %edx                // Get pointer to gpr_result
2258    leal 28(%esp), %eax            // Get pointer to fpr_result, in kSaveEverything frame
2259    PUSH_ARG ebx                   // push frame_size
2260    PUSH_ARG eax                   // Pass fpr_result
2261    PUSH_ARG edx                   // Pass gpr_result
2262    PUSH_ARG edi                   // Pass ArtMethod**
2263    pushl %fs:THREAD_SELF_OFFSET   // Pass Thread::Current.
2264    CFI_ADJUST_CFA_OFFSET(4)
2265    call SYMBOL(artMethodExitHook) // (Thread*, ArtMethod**, gpr_result*, fpr_result*,
2266                                   // frame_size)
2267
2268    // Keep gpr_result in case the return result was changed.
2269    movl 20(%esp), %ecx
2270
2271    addl LITERAL(32), %esp         // Pop arguments and gpr_result.
2272    CFI_ADJUST_CFA_OFFSET(-32)
2273
2274    CFI_REMEMBER_STATE
2275    testl %eax, %eax
2276    jnz .Lexithook_deopt_or_exception
2277
2278    // Return result could have been changed if it's a reference.
2279    movl %ecx, (80)(%esp)
2280
2281    // Normal return.
2282    RESTORE_SAVE_EVERYTHING_FRAME
2283    ret
2284
2285.Lexithook_deopt_or_exception:
2286    // Deoptimize or exception thrown.
2287    CFI_RESTORE_STATE_AND_DEF_CFA esp, FRAME_SIZE_SAVE_EVERYTHING
2288    call SYMBOL(art_quick_do_long_jump)
2289    UNREACHABLE
2290END_FUNCTION art_quick_method_exit_hook
2291