1*1fd5a2e1SPrashanth Swaminathan/* ----------------------------------------------------------------------- 2*1fd5a2e1SPrashanth Swaminathan sysv.h - Copyright (c) 2003 Jakub Jelinek <[email protected]> 3*1fd5a2e1SPrashanth Swaminathan Copyright (c) 2008 Red Hat, Inc. 4*1fd5a2e1SPrashanth Swaminathan 5*1fd5a2e1SPrashanth Swaminathan PowerPC64 Assembly glue. 6*1fd5a2e1SPrashanth Swaminathan 7*1fd5a2e1SPrashanth Swaminathan Permission is hereby granted, free of charge, to any person obtaining 8*1fd5a2e1SPrashanth Swaminathan a copy of this software and associated documentation files (the 9*1fd5a2e1SPrashanth Swaminathan ``Software''), to deal in the Software without restriction, including 10*1fd5a2e1SPrashanth Swaminathan without limitation the rights to use, copy, modify, merge, publish, 11*1fd5a2e1SPrashanth Swaminathan distribute, sublicense, and/or sell copies of the Software, and to 12*1fd5a2e1SPrashanth Swaminathan permit persons to whom the Software is furnished to do so, subject to 13*1fd5a2e1SPrashanth Swaminathan the following conditions: 14*1fd5a2e1SPrashanth Swaminathan 15*1fd5a2e1SPrashanth Swaminathan The above copyright notice and this permission notice shall be included 16*1fd5a2e1SPrashanth Swaminathan in all copies or substantial portions of the Software. 17*1fd5a2e1SPrashanth Swaminathan 18*1fd5a2e1SPrashanth Swaminathan THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND, 19*1fd5a2e1SPrashanth Swaminathan EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 20*1fd5a2e1SPrashanth Swaminathan MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 21*1fd5a2e1SPrashanth Swaminathan NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 22*1fd5a2e1SPrashanth Swaminathan HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 23*1fd5a2e1SPrashanth Swaminathan WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 24*1fd5a2e1SPrashanth Swaminathan OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25*1fd5a2e1SPrashanth Swaminathan DEALINGS IN THE SOFTWARE. 26*1fd5a2e1SPrashanth Swaminathan ----------------------------------------------------------------------- */ 27*1fd5a2e1SPrashanth Swaminathan#define LIBFFI_ASM 28*1fd5a2e1SPrashanth Swaminathan#include <fficonfig.h> 29*1fd5a2e1SPrashanth Swaminathan#include <ffi.h> 30*1fd5a2e1SPrashanth Swaminathan 31*1fd5a2e1SPrashanth Swaminathan .file "linux64_closure.S" 32*1fd5a2e1SPrashanth Swaminathan 33*1fd5a2e1SPrashanth Swaminathan#ifdef POWERPC64 34*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN (ffi_closure_LINUX64) 35*1fd5a2e1SPrashanth Swaminathan .globl ffi_closure_LINUX64 36*1fd5a2e1SPrashanth Swaminathan .text 37*1fd5a2e1SPrashanth Swaminathan .cfi_startproc 38*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2 39*1fd5a2e1SPrashanth Swaminathanffi_closure_LINUX64: 40*1fd5a2e1SPrashanth Swaminathan addis %r2, %r12, .TOC.-ffi_closure_LINUX64@ha 41*1fd5a2e1SPrashanth Swaminathan addi %r2, %r2, .TOC.-ffi_closure_LINUX64@l 42*1fd5a2e1SPrashanth Swaminathan .localentry ffi_closure_LINUX64, . - ffi_closure_LINUX64 43*1fd5a2e1SPrashanth Swaminathan# else 44*1fd5a2e1SPrashanth Swaminathan .section ".opd","aw" 45*1fd5a2e1SPrashanth Swaminathan .align 3 46*1fd5a2e1SPrashanth Swaminathanffi_closure_LINUX64: 47*1fd5a2e1SPrashanth Swaminathan# ifdef _CALL_LINUX 48*1fd5a2e1SPrashanth Swaminathan .quad .L.ffi_closure_LINUX64,.TOC.@tocbase,0 49*1fd5a2e1SPrashanth Swaminathan .type ffi_closure_LINUX64,@function 50*1fd5a2e1SPrashanth Swaminathan .text 51*1fd5a2e1SPrashanth Swaminathan.L.ffi_closure_LINUX64: 52*1fd5a2e1SPrashanth Swaminathan# else 53*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN (.ffi_closure_LINUX64) 54*1fd5a2e1SPrashanth Swaminathan .globl .ffi_closure_LINUX64 55*1fd5a2e1SPrashanth Swaminathan .quad .ffi_closure_LINUX64,.TOC.@tocbase,0 56*1fd5a2e1SPrashanth Swaminathan .size ffi_closure_LINUX64,24 57*1fd5a2e1SPrashanth Swaminathan .type .ffi_closure_LINUX64,@function 58*1fd5a2e1SPrashanth Swaminathan .text 59*1fd5a2e1SPrashanth Swaminathan.ffi_closure_LINUX64: 60*1fd5a2e1SPrashanth Swaminathan# endif 61*1fd5a2e1SPrashanth Swaminathan# endif 62*1fd5a2e1SPrashanth Swaminathan 63*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2 64*1fd5a2e1SPrashanth Swaminathan# ifdef __VEC__ 65*1fd5a2e1SPrashanth Swaminathan# 32 byte special reg save area + 64 byte parm save area 66*1fd5a2e1SPrashanth Swaminathan# + 128 byte retval area + 13*8 fpr save area + 12*16 vec save area + round to 16 67*1fd5a2e1SPrashanth Swaminathan# define STACKFRAME 528 68*1fd5a2e1SPrashanth Swaminathan# else 69*1fd5a2e1SPrashanth Swaminathan# 32 byte special reg save area + 64 byte parm save area 70*1fd5a2e1SPrashanth Swaminathan# + 64 byte retval area + 13*8 fpr save area + round to 16 71*1fd5a2e1SPrashanth Swaminathan# define STACKFRAME 272 72*1fd5a2e1SPrashanth Swaminathan# endif 73*1fd5a2e1SPrashanth Swaminathan# define PARMSAVE 32 74*1fd5a2e1SPrashanth Swaminathan# define RETVAL PARMSAVE+64 75*1fd5a2e1SPrashanth Swaminathan# else 76*1fd5a2e1SPrashanth Swaminathan# 48 bytes special reg save area + 64 bytes parm save area 77*1fd5a2e1SPrashanth Swaminathan# + 16 bytes retval area + 13*8 bytes fpr save area + round to 16 78*1fd5a2e1SPrashanth Swaminathan# define STACKFRAME 240 79*1fd5a2e1SPrashanth Swaminathan# define PARMSAVE 48 80*1fd5a2e1SPrashanth Swaminathan# define RETVAL PARMSAVE+64 81*1fd5a2e1SPrashanth Swaminathan# endif 82*1fd5a2e1SPrashanth Swaminathan 83*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2 84*1fd5a2e1SPrashanth Swaminathan ld %r12, FFI_TRAMPOLINE_SIZE(%r11) # closure->cif 85*1fd5a2e1SPrashanth Swaminathan mflr %r0 86*1fd5a2e1SPrashanth Swaminathan lwz %r12, 28(%r12) # cif->flags 87*1fd5a2e1SPrashanth Swaminathan mtcrf 0x40, %r12 88*1fd5a2e1SPrashanth Swaminathan addi %r12, %r1, PARMSAVE 89*1fd5a2e1SPrashanth Swaminathan bt 7, 0f 90*1fd5a2e1SPrashanth Swaminathan # Our caller has not allocated a parameter save area. 91*1fd5a2e1SPrashanth Swaminathan # We need to allocate one here and use it to pass gprs to 92*1fd5a2e1SPrashanth Swaminathan # ffi_closure_helper_LINUX64. 93*1fd5a2e1SPrashanth Swaminathan addi %r12, %r1, -STACKFRAME+PARMSAVE 94*1fd5a2e1SPrashanth Swaminathan0: 95*1fd5a2e1SPrashanth Swaminathan # Save general regs into parm save area 96*1fd5a2e1SPrashanth Swaminathan std %r3, 0(%r12) 97*1fd5a2e1SPrashanth Swaminathan std %r4, 8(%r12) 98*1fd5a2e1SPrashanth Swaminathan std %r5, 16(%r12) 99*1fd5a2e1SPrashanth Swaminathan std %r6, 24(%r12) 100*1fd5a2e1SPrashanth Swaminathan std %r7, 32(%r12) 101*1fd5a2e1SPrashanth Swaminathan std %r8, 40(%r12) 102*1fd5a2e1SPrashanth Swaminathan std %r9, 48(%r12) 103*1fd5a2e1SPrashanth Swaminathan std %r10, 56(%r12) 104*1fd5a2e1SPrashanth Swaminathan 105*1fd5a2e1SPrashanth Swaminathan # load up the pointer to the parm save area 106*1fd5a2e1SPrashanth Swaminathan mr %r7, %r12 107*1fd5a2e1SPrashanth Swaminathan# else 108*1fd5a2e1SPrashanth Swaminathan # copy r2 to r11 and load TOC into r2 109*1fd5a2e1SPrashanth Swaminathan mr %r11, %r2 110*1fd5a2e1SPrashanth Swaminathan ld %r2, 16(%r2) 111*1fd5a2e1SPrashanth Swaminathan 112*1fd5a2e1SPrashanth Swaminathan mflr %r0 113*1fd5a2e1SPrashanth Swaminathan # Save general regs into parm save area 114*1fd5a2e1SPrashanth Swaminathan # This is the parameter save area set up by our caller. 115*1fd5a2e1SPrashanth Swaminathan std %r3, PARMSAVE+0(%r1) 116*1fd5a2e1SPrashanth Swaminathan std %r4, PARMSAVE+8(%r1) 117*1fd5a2e1SPrashanth Swaminathan std %r5, PARMSAVE+16(%r1) 118*1fd5a2e1SPrashanth Swaminathan std %r6, PARMSAVE+24(%r1) 119*1fd5a2e1SPrashanth Swaminathan std %r7, PARMSAVE+32(%r1) 120*1fd5a2e1SPrashanth Swaminathan std %r8, PARMSAVE+40(%r1) 121*1fd5a2e1SPrashanth Swaminathan std %r9, PARMSAVE+48(%r1) 122*1fd5a2e1SPrashanth Swaminathan std %r10, PARMSAVE+56(%r1) 123*1fd5a2e1SPrashanth Swaminathan 124*1fd5a2e1SPrashanth Swaminathan # load up the pointer to the parm save area 125*1fd5a2e1SPrashanth Swaminathan addi %r7, %r1, PARMSAVE 126*1fd5a2e1SPrashanth Swaminathan# endif 127*1fd5a2e1SPrashanth Swaminathan std %r0, 16(%r1) 128*1fd5a2e1SPrashanth Swaminathan 129*1fd5a2e1SPrashanth Swaminathan # closure->cif 130*1fd5a2e1SPrashanth Swaminathan ld %r3, FFI_TRAMPOLINE_SIZE(%r11) 131*1fd5a2e1SPrashanth Swaminathan # closure->fun 132*1fd5a2e1SPrashanth Swaminathan ld %r4, FFI_TRAMPOLINE_SIZE+8(%r11) 133*1fd5a2e1SPrashanth Swaminathan # closure->user_data 134*1fd5a2e1SPrashanth Swaminathan ld %r5, FFI_TRAMPOLINE_SIZE+16(%r11) 135*1fd5a2e1SPrashanth Swaminathan 136*1fd5a2e1SPrashanth Swaminathan.Ldoclosure: 137*1fd5a2e1SPrashanth Swaminathan # next save fpr 1 to fpr 13 138*1fd5a2e1SPrashanth Swaminathan stfd %f1, -104+(0*8)(%r1) 139*1fd5a2e1SPrashanth Swaminathan stfd %f2, -104+(1*8)(%r1) 140*1fd5a2e1SPrashanth Swaminathan stfd %f3, -104+(2*8)(%r1) 141*1fd5a2e1SPrashanth Swaminathan stfd %f4, -104+(3*8)(%r1) 142*1fd5a2e1SPrashanth Swaminathan stfd %f5, -104+(4*8)(%r1) 143*1fd5a2e1SPrashanth Swaminathan stfd %f6, -104+(5*8)(%r1) 144*1fd5a2e1SPrashanth Swaminathan stfd %f7, -104+(6*8)(%r1) 145*1fd5a2e1SPrashanth Swaminathan stfd %f8, -104+(7*8)(%r1) 146*1fd5a2e1SPrashanth Swaminathan stfd %f9, -104+(8*8)(%r1) 147*1fd5a2e1SPrashanth Swaminathan stfd %f10, -104+(9*8)(%r1) 148*1fd5a2e1SPrashanth Swaminathan stfd %f11, -104+(10*8)(%r1) 149*1fd5a2e1SPrashanth Swaminathan stfd %f12, -104+(11*8)(%r1) 150*1fd5a2e1SPrashanth Swaminathan stfd %f13, -104+(12*8)(%r1) 151*1fd5a2e1SPrashanth Swaminathan 152*1fd5a2e1SPrashanth Swaminathan # load up the pointer to the saved fpr registers 153*1fd5a2e1SPrashanth Swaminathan addi %r8, %r1, -104 154*1fd5a2e1SPrashanth Swaminathan 155*1fd5a2e1SPrashanth Swaminathan# ifdef __VEC__ 156*1fd5a2e1SPrashanth Swaminathan # load up the pointer to the saved vector registers 157*1fd5a2e1SPrashanth Swaminathan # 8 bytes padding for 16-byte alignment at -112(%r1) 158*1fd5a2e1SPrashanth Swaminathan addi %r9, %r8, -24 159*1fd5a2e1SPrashanth Swaminathan stvx %v13, 0, %r9 160*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 161*1fd5a2e1SPrashanth Swaminathan stvx %v12, 0, %r9 162*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 163*1fd5a2e1SPrashanth Swaminathan stvx %v11, 0, %r9 164*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 165*1fd5a2e1SPrashanth Swaminathan stvx %v10, 0, %r9 166*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 167*1fd5a2e1SPrashanth Swaminathan stvx %v9, 0, %r9 168*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 169*1fd5a2e1SPrashanth Swaminathan stvx %v8, 0, %r9 170*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 171*1fd5a2e1SPrashanth Swaminathan stvx %v7, 0, %r9 172*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 173*1fd5a2e1SPrashanth Swaminathan stvx %v6, 0, %r9 174*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 175*1fd5a2e1SPrashanth Swaminathan stvx %v5, 0, %r9 176*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 177*1fd5a2e1SPrashanth Swaminathan stvx %v4, 0, %r9 178*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 179*1fd5a2e1SPrashanth Swaminathan stvx %v3, 0, %r9 180*1fd5a2e1SPrashanth Swaminathan addi %r9, %r9, -16 181*1fd5a2e1SPrashanth Swaminathan stvx %v2, 0, %r9 182*1fd5a2e1SPrashanth Swaminathan# endif 183*1fd5a2e1SPrashanth Swaminathan 184*1fd5a2e1SPrashanth Swaminathan # load up the pointer to the result storage 185*1fd5a2e1SPrashanth Swaminathan addi %r6, %r1, -STACKFRAME+RETVAL 186*1fd5a2e1SPrashanth Swaminathan 187*1fd5a2e1SPrashanth Swaminathan stdu %r1, -STACKFRAME(%r1) 188*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 189*1fd5a2e1SPrashanth Swaminathan .cfi_offset 65, 16 190*1fd5a2e1SPrashanth Swaminathan 191*1fd5a2e1SPrashanth Swaminathan # make the call 192*1fd5a2e1SPrashanth Swaminathan# if defined _CALL_LINUX || _CALL_ELF == 2 193*1fd5a2e1SPrashanth Swaminathan bl ffi_closure_helper_LINUX64 194*1fd5a2e1SPrashanth Swaminathan# else 195*1fd5a2e1SPrashanth Swaminathan bl .ffi_closure_helper_LINUX64 196*1fd5a2e1SPrashanth Swaminathan# endif 197*1fd5a2e1SPrashanth Swaminathan.Lret: 198*1fd5a2e1SPrashanth Swaminathan 199*1fd5a2e1SPrashanth Swaminathan # now r3 contains the return type 200*1fd5a2e1SPrashanth Swaminathan # so use it to look up in a table 201*1fd5a2e1SPrashanth Swaminathan # so we know how to deal with each type 202*1fd5a2e1SPrashanth Swaminathan 203*1fd5a2e1SPrashanth Swaminathan # look up the proper starting point in table 204*1fd5a2e1SPrashanth Swaminathan # by using return type as offset 205*1fd5a2e1SPrashanth Swaminathan ld %r0, STACKFRAME+16(%r1) 206*1fd5a2e1SPrashanth Swaminathan cmpldi %r3, FFI_V2_TYPE_SMALL_STRUCT 207*1fd5a2e1SPrashanth Swaminathan bge .Lsmall 208*1fd5a2e1SPrashanth Swaminathan mflr %r4 # move address of .Lret to r4 209*1fd5a2e1SPrashanth Swaminathan sldi %r3, %r3, 4 # now multiply return type by 16 210*1fd5a2e1SPrashanth Swaminathan addi %r4, %r4, .Lret_type0 - .Lret 211*1fd5a2e1SPrashanth Swaminathan add %r3, %r3, %r4 # add contents of table to table address 212*1fd5a2e1SPrashanth Swaminathan mtctr %r3 213*1fd5a2e1SPrashanth Swaminathan bctr # jump to it 214*1fd5a2e1SPrashanth Swaminathan 215*1fd5a2e1SPrashanth Swaminathan# Each of the ret_typeX code fragments has to be exactly 16 bytes long 216*1fd5a2e1SPrashanth Swaminathan# (4 instructions). For cache effectiveness we align to a 16 byte boundary 217*1fd5a2e1SPrashanth Swaminathan# first. 218*1fd5a2e1SPrashanth Swaminathan .align 4 219*1fd5a2e1SPrashanth Swaminathan 220*1fd5a2e1SPrashanth Swaminathan.Lret_type0: 221*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_VOID 222*1fd5a2e1SPrashanth Swaminathan mtlr %r0 223*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 224*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 225*1fd5a2e1SPrashanth Swaminathan blr 226*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 227*1fd5a2e1SPrashanth Swaminathan nop 228*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_INT 229*1fd5a2e1SPrashanth Swaminathan# ifdef __LITTLE_ENDIAN__ 230*1fd5a2e1SPrashanth Swaminathan lwa %r3, RETVAL+0(%r1) 231*1fd5a2e1SPrashanth Swaminathan# else 232*1fd5a2e1SPrashanth Swaminathan lwa %r3, RETVAL+4(%r1) 233*1fd5a2e1SPrashanth Swaminathan# endif 234*1fd5a2e1SPrashanth Swaminathan mtlr %r0 235*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 236*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 237*1fd5a2e1SPrashanth Swaminathan blr 238*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 239*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_FLOAT 240*1fd5a2e1SPrashanth Swaminathan lfs %f1, RETVAL+0(%r1) 241*1fd5a2e1SPrashanth Swaminathan mtlr %r0 242*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 243*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 244*1fd5a2e1SPrashanth Swaminathan blr 245*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 246*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_DOUBLE 247*1fd5a2e1SPrashanth Swaminathan lfd %f1, RETVAL+0(%r1) 248*1fd5a2e1SPrashanth Swaminathan mtlr %r0 249*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 250*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 251*1fd5a2e1SPrashanth Swaminathan blr 252*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 253*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_LONGDOUBLE 254*1fd5a2e1SPrashanth Swaminathan lfd %f1, RETVAL+0(%r1) 255*1fd5a2e1SPrashanth Swaminathan mtlr %r0 256*1fd5a2e1SPrashanth Swaminathan lfd %f2, RETVAL+8(%r1) 257*1fd5a2e1SPrashanth Swaminathan b .Lfinish 258*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_UINT8 259*1fd5a2e1SPrashanth Swaminathan# ifdef __LITTLE_ENDIAN__ 260*1fd5a2e1SPrashanth Swaminathan lbz %r3, RETVAL+0(%r1) 261*1fd5a2e1SPrashanth Swaminathan# else 262*1fd5a2e1SPrashanth Swaminathan lbz %r3, RETVAL+7(%r1) 263*1fd5a2e1SPrashanth Swaminathan# endif 264*1fd5a2e1SPrashanth Swaminathan mtlr %r0 265*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 266*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 267*1fd5a2e1SPrashanth Swaminathan blr 268*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 269*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_SINT8 270*1fd5a2e1SPrashanth Swaminathan# ifdef __LITTLE_ENDIAN__ 271*1fd5a2e1SPrashanth Swaminathan lbz %r3, RETVAL+0(%r1) 272*1fd5a2e1SPrashanth Swaminathan# else 273*1fd5a2e1SPrashanth Swaminathan lbz %r3, RETVAL+7(%r1) 274*1fd5a2e1SPrashanth Swaminathan# endif 275*1fd5a2e1SPrashanth Swaminathan extsb %r3,%r3 276*1fd5a2e1SPrashanth Swaminathan mtlr %r0 277*1fd5a2e1SPrashanth Swaminathan b .Lfinish 278*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_UINT16 279*1fd5a2e1SPrashanth Swaminathan# ifdef __LITTLE_ENDIAN__ 280*1fd5a2e1SPrashanth Swaminathan lhz %r3, RETVAL+0(%r1) 281*1fd5a2e1SPrashanth Swaminathan# else 282*1fd5a2e1SPrashanth Swaminathan lhz %r3, RETVAL+6(%r1) 283*1fd5a2e1SPrashanth Swaminathan# endif 284*1fd5a2e1SPrashanth Swaminathan mtlr %r0 285*1fd5a2e1SPrashanth Swaminathan.Lfinish: 286*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 287*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 288*1fd5a2e1SPrashanth Swaminathan blr 289*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 290*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_SINT16 291*1fd5a2e1SPrashanth Swaminathan# ifdef __LITTLE_ENDIAN__ 292*1fd5a2e1SPrashanth Swaminathan lha %r3, RETVAL+0(%r1) 293*1fd5a2e1SPrashanth Swaminathan# else 294*1fd5a2e1SPrashanth Swaminathan lha %r3, RETVAL+6(%r1) 295*1fd5a2e1SPrashanth Swaminathan# endif 296*1fd5a2e1SPrashanth Swaminathan mtlr %r0 297*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 298*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 299*1fd5a2e1SPrashanth Swaminathan blr 300*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 301*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_UINT32 302*1fd5a2e1SPrashanth Swaminathan# ifdef __LITTLE_ENDIAN__ 303*1fd5a2e1SPrashanth Swaminathan lwz %r3, RETVAL+0(%r1) 304*1fd5a2e1SPrashanth Swaminathan# else 305*1fd5a2e1SPrashanth Swaminathan lwz %r3, RETVAL+4(%r1) 306*1fd5a2e1SPrashanth Swaminathan# endif 307*1fd5a2e1SPrashanth Swaminathan mtlr %r0 308*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 309*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 310*1fd5a2e1SPrashanth Swaminathan blr 311*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 312*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_SINT32 313*1fd5a2e1SPrashanth Swaminathan# ifdef __LITTLE_ENDIAN__ 314*1fd5a2e1SPrashanth Swaminathan lwa %r3, RETVAL+0(%r1) 315*1fd5a2e1SPrashanth Swaminathan# else 316*1fd5a2e1SPrashanth Swaminathan lwa %r3, RETVAL+4(%r1) 317*1fd5a2e1SPrashanth Swaminathan# endif 318*1fd5a2e1SPrashanth Swaminathan mtlr %r0 319*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 320*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 321*1fd5a2e1SPrashanth Swaminathan blr 322*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 323*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_UINT64 324*1fd5a2e1SPrashanth Swaminathan ld %r3, RETVAL+0(%r1) 325*1fd5a2e1SPrashanth Swaminathan mtlr %r0 326*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 327*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 328*1fd5a2e1SPrashanth Swaminathan blr 329*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 330*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_SINT64 331*1fd5a2e1SPrashanth Swaminathan ld %r3, RETVAL+0(%r1) 332*1fd5a2e1SPrashanth Swaminathan mtlr %r0 333*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 334*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 335*1fd5a2e1SPrashanth Swaminathan blr 336*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 337*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_STRUCT 338*1fd5a2e1SPrashanth Swaminathan mtlr %r0 339*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 340*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 341*1fd5a2e1SPrashanth Swaminathan blr 342*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 343*1fd5a2e1SPrashanth Swaminathan nop 344*1fd5a2e1SPrashanth Swaminathan# case FFI_TYPE_POINTER 345*1fd5a2e1SPrashanth Swaminathan ld %r3, RETVAL+0(%r1) 346*1fd5a2e1SPrashanth Swaminathan mtlr %r0 347*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 348*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 349*1fd5a2e1SPrashanth Swaminathan blr 350*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 351*1fd5a2e1SPrashanth Swaminathan# case FFI_V2_TYPE_VECTOR 352*1fd5a2e1SPrashanth Swaminathan addi %r3, %r1, RETVAL 353*1fd5a2e1SPrashanth Swaminathan lvx %v2, 0, %r3 354*1fd5a2e1SPrashanth Swaminathan mtlr %r0 355*1fd5a2e1SPrashanth Swaminathan b .Lfinish 356*1fd5a2e1SPrashanth Swaminathan# case FFI_V2_TYPE_VECTOR_HOMOG 357*1fd5a2e1SPrashanth Swaminathan addi %r3, %r1, RETVAL 358*1fd5a2e1SPrashanth Swaminathan lvx %v2, 0, %r3 359*1fd5a2e1SPrashanth Swaminathan addi %r3, %r3, 16 360*1fd5a2e1SPrashanth Swaminathan b .Lmorevector 361*1fd5a2e1SPrashanth Swaminathan# case FFI_V2_TYPE_FLOAT_HOMOG 362*1fd5a2e1SPrashanth Swaminathan lfs %f1, RETVAL+0(%r1) 363*1fd5a2e1SPrashanth Swaminathan lfs %f2, RETVAL+4(%r1) 364*1fd5a2e1SPrashanth Swaminathan lfs %f3, RETVAL+8(%r1) 365*1fd5a2e1SPrashanth Swaminathan b .Lmorefloat 366*1fd5a2e1SPrashanth Swaminathan# case FFI_V2_TYPE_DOUBLE_HOMOG 367*1fd5a2e1SPrashanth Swaminathan lfd %f1, RETVAL+0(%r1) 368*1fd5a2e1SPrashanth Swaminathan lfd %f2, RETVAL+8(%r1) 369*1fd5a2e1SPrashanth Swaminathan lfd %f3, RETVAL+16(%r1) 370*1fd5a2e1SPrashanth Swaminathan lfd %f4, RETVAL+24(%r1) 371*1fd5a2e1SPrashanth Swaminathan mtlr %r0 372*1fd5a2e1SPrashanth Swaminathan lfd %f5, RETVAL+32(%r1) 373*1fd5a2e1SPrashanth Swaminathan lfd %f6, RETVAL+40(%r1) 374*1fd5a2e1SPrashanth Swaminathan lfd %f7, RETVAL+48(%r1) 375*1fd5a2e1SPrashanth Swaminathan lfd %f8, RETVAL+56(%r1) 376*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 377*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 378*1fd5a2e1SPrashanth Swaminathan blr 379*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 380*1fd5a2e1SPrashanth Swaminathan.Lmorevector: 381*1fd5a2e1SPrashanth Swaminathan lvx %v3, 0, %r3 382*1fd5a2e1SPrashanth Swaminathan addi %r3, %r3, 16 383*1fd5a2e1SPrashanth Swaminathan lvx %v4, 0, %r3 384*1fd5a2e1SPrashanth Swaminathan addi %r3, %r3, 16 385*1fd5a2e1SPrashanth Swaminathan lvx %v5, 0, %r3 386*1fd5a2e1SPrashanth Swaminathan mtlr %r0 387*1fd5a2e1SPrashanth Swaminathan addi %r3, %r3, 16 388*1fd5a2e1SPrashanth Swaminathan lvx %v6, 0, %r3 389*1fd5a2e1SPrashanth Swaminathan addi %r3, %r3, 16 390*1fd5a2e1SPrashanth Swaminathan lvx %v7, 0, %r3 391*1fd5a2e1SPrashanth Swaminathan addi %r3, %r3, 16 392*1fd5a2e1SPrashanth Swaminathan lvx %v8, 0, %r3 393*1fd5a2e1SPrashanth Swaminathan addi %r3, %r3, 16 394*1fd5a2e1SPrashanth Swaminathan lvx %v9, 0, %r3 395*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 396*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 397*1fd5a2e1SPrashanth Swaminathan blr 398*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 399*1fd5a2e1SPrashanth Swaminathan.Lmorefloat: 400*1fd5a2e1SPrashanth Swaminathan lfs %f4, RETVAL+12(%r1) 401*1fd5a2e1SPrashanth Swaminathan mtlr %r0 402*1fd5a2e1SPrashanth Swaminathan lfs %f5, RETVAL+16(%r1) 403*1fd5a2e1SPrashanth Swaminathan lfs %f6, RETVAL+20(%r1) 404*1fd5a2e1SPrashanth Swaminathan lfs %f7, RETVAL+24(%r1) 405*1fd5a2e1SPrashanth Swaminathan lfs %f8, RETVAL+28(%r1) 406*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 407*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 408*1fd5a2e1SPrashanth Swaminathan blr 409*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 410*1fd5a2e1SPrashanth Swaminathan.Lsmall: 411*1fd5a2e1SPrashanth Swaminathan# ifdef __LITTLE_ENDIAN__ 412*1fd5a2e1SPrashanth Swaminathan ld %r3,RETVAL+0(%r1) 413*1fd5a2e1SPrashanth Swaminathan mtlr %r0 414*1fd5a2e1SPrashanth Swaminathan ld %r4,RETVAL+8(%r1) 415*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 416*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 417*1fd5a2e1SPrashanth Swaminathan blr 418*1fd5a2e1SPrashanth Swaminathan# else 419*1fd5a2e1SPrashanth Swaminathan # A struct smaller than a dword is returned in the low bits of r3 420*1fd5a2e1SPrashanth Swaminathan # ie. right justified. Larger structs are passed left justified 421*1fd5a2e1SPrashanth Swaminathan # in r3 and r4. The return value area on the stack will have 422*1fd5a2e1SPrashanth Swaminathan # the structs as they are usually stored in memory. 423*1fd5a2e1SPrashanth Swaminathan cmpldi %r3, FFI_V2_TYPE_SMALL_STRUCT + 7 # size 8 bytes? 424*1fd5a2e1SPrashanth Swaminathan neg %r5, %r3 425*1fd5a2e1SPrashanth Swaminathan ld %r3,RETVAL+0(%r1) 426*1fd5a2e1SPrashanth Swaminathan blt .Lsmalldown 427*1fd5a2e1SPrashanth Swaminathan mtlr %r0 428*1fd5a2e1SPrashanth Swaminathan ld %r4,RETVAL+8(%r1) 429*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 430*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 431*1fd5a2e1SPrashanth Swaminathan blr 432*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset STACKFRAME 433*1fd5a2e1SPrashanth Swaminathan.Lsmalldown: 434*1fd5a2e1SPrashanth Swaminathan addi %r5, %r5, FFI_V2_TYPE_SMALL_STRUCT + 7 435*1fd5a2e1SPrashanth Swaminathan mtlr %r0 436*1fd5a2e1SPrashanth Swaminathan sldi %r5, %r5, 3 437*1fd5a2e1SPrashanth Swaminathan addi %r1, %r1, STACKFRAME 438*1fd5a2e1SPrashanth Swaminathan .cfi_def_cfa_offset 0 439*1fd5a2e1SPrashanth Swaminathan srd %r3, %r3, %r5 440*1fd5a2e1SPrashanth Swaminathan blr 441*1fd5a2e1SPrashanth Swaminathan# endif 442*1fd5a2e1SPrashanth Swaminathan 443*1fd5a2e1SPrashanth Swaminathan .cfi_endproc 444*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2 445*1fd5a2e1SPrashanth Swaminathan .size ffi_closure_LINUX64,.-ffi_closure_LINUX64 446*1fd5a2e1SPrashanth Swaminathan# else 447*1fd5a2e1SPrashanth Swaminathan# ifdef _CALL_LINUX 448*1fd5a2e1SPrashanth Swaminathan .size ffi_closure_LINUX64,.-.L.ffi_closure_LINUX64 449*1fd5a2e1SPrashanth Swaminathan# else 450*1fd5a2e1SPrashanth Swaminathan .long 0 451*1fd5a2e1SPrashanth Swaminathan .byte 0,12,0,1,128,0,0,0 452*1fd5a2e1SPrashanth Swaminathan .size .ffi_closure_LINUX64,.-.ffi_closure_LINUX64 453*1fd5a2e1SPrashanth Swaminathan# endif 454*1fd5a2e1SPrashanth Swaminathan# endif 455*1fd5a2e1SPrashanth Swaminathan 456*1fd5a2e1SPrashanth Swaminathan 457*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN (ffi_go_closure_linux64) 458*1fd5a2e1SPrashanth Swaminathan .globl ffi_go_closure_linux64 459*1fd5a2e1SPrashanth Swaminathan .text 460*1fd5a2e1SPrashanth Swaminathan .cfi_startproc 461*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2 462*1fd5a2e1SPrashanth Swaminathanffi_go_closure_linux64: 463*1fd5a2e1SPrashanth Swaminathan addis %r2, %r12, .TOC.-ffi_go_closure_linux64@ha 464*1fd5a2e1SPrashanth Swaminathan addi %r2, %r2, .TOC.-ffi_go_closure_linux64@l 465*1fd5a2e1SPrashanth Swaminathan .localentry ffi_go_closure_linux64, . - ffi_go_closure_linux64 466*1fd5a2e1SPrashanth Swaminathan# else 467*1fd5a2e1SPrashanth Swaminathan .section ".opd","aw" 468*1fd5a2e1SPrashanth Swaminathan .align 3 469*1fd5a2e1SPrashanth Swaminathanffi_go_closure_linux64: 470*1fd5a2e1SPrashanth Swaminathan# ifdef _CALL_LINUX 471*1fd5a2e1SPrashanth Swaminathan .quad .L.ffi_go_closure_linux64,.TOC.@tocbase,0 472*1fd5a2e1SPrashanth Swaminathan .type ffi_go_closure_linux64,@function 473*1fd5a2e1SPrashanth Swaminathan .text 474*1fd5a2e1SPrashanth Swaminathan.L.ffi_go_closure_linux64: 475*1fd5a2e1SPrashanth Swaminathan# else 476*1fd5a2e1SPrashanth Swaminathan FFI_HIDDEN (.ffi_go_closure_linux64) 477*1fd5a2e1SPrashanth Swaminathan .globl .ffi_go_closure_linux64 478*1fd5a2e1SPrashanth Swaminathan .quad .ffi_go_closure_linux64,.TOC.@tocbase,0 479*1fd5a2e1SPrashanth Swaminathan .size ffi_go_closure_linux64,24 480*1fd5a2e1SPrashanth Swaminathan .type .ffi_go_closure_linux64,@function 481*1fd5a2e1SPrashanth Swaminathan .text 482*1fd5a2e1SPrashanth Swaminathan.ffi_go_closure_linux64: 483*1fd5a2e1SPrashanth Swaminathan# endif 484*1fd5a2e1SPrashanth Swaminathan# endif 485*1fd5a2e1SPrashanth Swaminathan 486*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2 487*1fd5a2e1SPrashanth Swaminathan ld %r12, 8(%r11) # closure->cif 488*1fd5a2e1SPrashanth Swaminathan mflr %r0 489*1fd5a2e1SPrashanth Swaminathan lwz %r12, 28(%r12) # cif->flags 490*1fd5a2e1SPrashanth Swaminathan mtcrf 0x40, %r12 491*1fd5a2e1SPrashanth Swaminathan addi %r12, %r1, PARMSAVE 492*1fd5a2e1SPrashanth Swaminathan bt 7, 0f 493*1fd5a2e1SPrashanth Swaminathan # Our caller has not allocated a parameter save area. 494*1fd5a2e1SPrashanth Swaminathan # We need to allocate one here and use it to pass gprs to 495*1fd5a2e1SPrashanth Swaminathan # ffi_closure_helper_LINUX64. 496*1fd5a2e1SPrashanth Swaminathan addi %r12, %r1, -STACKFRAME+PARMSAVE 497*1fd5a2e1SPrashanth Swaminathan0: 498*1fd5a2e1SPrashanth Swaminathan # Save general regs into parm save area 499*1fd5a2e1SPrashanth Swaminathan std %r3, 0(%r12) 500*1fd5a2e1SPrashanth Swaminathan std %r4, 8(%r12) 501*1fd5a2e1SPrashanth Swaminathan std %r5, 16(%r12) 502*1fd5a2e1SPrashanth Swaminathan std %r6, 24(%r12) 503*1fd5a2e1SPrashanth Swaminathan std %r7, 32(%r12) 504*1fd5a2e1SPrashanth Swaminathan std %r8, 40(%r12) 505*1fd5a2e1SPrashanth Swaminathan std %r9, 48(%r12) 506*1fd5a2e1SPrashanth Swaminathan std %r10, 56(%r12) 507*1fd5a2e1SPrashanth Swaminathan 508*1fd5a2e1SPrashanth Swaminathan # load up the pointer to the parm save area 509*1fd5a2e1SPrashanth Swaminathan mr %r7, %r12 510*1fd5a2e1SPrashanth Swaminathan# else 511*1fd5a2e1SPrashanth Swaminathan mflr %r0 512*1fd5a2e1SPrashanth Swaminathan # Save general regs into parm save area 513*1fd5a2e1SPrashanth Swaminathan # This is the parameter save area set up by our caller. 514*1fd5a2e1SPrashanth Swaminathan std %r3, PARMSAVE+0(%r1) 515*1fd5a2e1SPrashanth Swaminathan std %r4, PARMSAVE+8(%r1) 516*1fd5a2e1SPrashanth Swaminathan std %r5, PARMSAVE+16(%r1) 517*1fd5a2e1SPrashanth Swaminathan std %r6, PARMSAVE+24(%r1) 518*1fd5a2e1SPrashanth Swaminathan std %r7, PARMSAVE+32(%r1) 519*1fd5a2e1SPrashanth Swaminathan std %r8, PARMSAVE+40(%r1) 520*1fd5a2e1SPrashanth Swaminathan std %r9, PARMSAVE+48(%r1) 521*1fd5a2e1SPrashanth Swaminathan std %r10, PARMSAVE+56(%r1) 522*1fd5a2e1SPrashanth Swaminathan 523*1fd5a2e1SPrashanth Swaminathan # load up the pointer to the parm save area 524*1fd5a2e1SPrashanth Swaminathan addi %r7, %r1, PARMSAVE 525*1fd5a2e1SPrashanth Swaminathan# endif 526*1fd5a2e1SPrashanth Swaminathan std %r0, 16(%r1) 527*1fd5a2e1SPrashanth Swaminathan 528*1fd5a2e1SPrashanth Swaminathan # closure->cif 529*1fd5a2e1SPrashanth Swaminathan ld %r3, 8(%r11) 530*1fd5a2e1SPrashanth Swaminathan # closure->fun 531*1fd5a2e1SPrashanth Swaminathan ld %r4, 16(%r11) 532*1fd5a2e1SPrashanth Swaminathan # user_data 533*1fd5a2e1SPrashanth Swaminathan mr %r5, %r11 534*1fd5a2e1SPrashanth Swaminathan b .Ldoclosure 535*1fd5a2e1SPrashanth Swaminathan 536*1fd5a2e1SPrashanth Swaminathan .cfi_endproc 537*1fd5a2e1SPrashanth Swaminathan# if _CALL_ELF == 2 538*1fd5a2e1SPrashanth Swaminathan .size ffi_go_closure_linux64,.-ffi_go_closure_linux64 539*1fd5a2e1SPrashanth Swaminathan# else 540*1fd5a2e1SPrashanth Swaminathan# ifdef _CALL_LINUX 541*1fd5a2e1SPrashanth Swaminathan .size ffi_go_closure_linux64,.-.L.ffi_go_closure_linux64 542*1fd5a2e1SPrashanth Swaminathan# else 543*1fd5a2e1SPrashanth Swaminathan .long 0 544*1fd5a2e1SPrashanth Swaminathan .byte 0,12,0,1,128,0,0,0 545*1fd5a2e1SPrashanth Swaminathan .size .ffi_go_closure_linux64,.-.ffi_go_closure_linux64 546*1fd5a2e1SPrashanth Swaminathan# endif 547*1fd5a2e1SPrashanth Swaminathan# endif 548*1fd5a2e1SPrashanth Swaminathan#endif 549*1fd5a2e1SPrashanth Swaminathan 550*1fd5a2e1SPrashanth Swaminathan#if (defined __ELF__ && defined __linux__) || _CALL_ELF == 2 551*1fd5a2e1SPrashanth Swaminathan .section .note.GNU-stack,"",@progbits 552*1fd5a2e1SPrashanth Swaminathan#endif 553