runtime/ffi/call_x86_64.S - ktap

  1. /*
  2. * call_x86_64.S - assembly code to call C function and handle return value
  3. *
  4. * This file is part of ktap by Jovi Zhangwei
  5. *
  6. * Copyright (C) 2012-2013 Jovi Zhangwei <jovi.zhangwei@gmail.com>.
  7. *
  8. * ktap is free software; you can redistribute it and/or modify it
  9. * under the terms and conditions of the GNU General Public License,
  10. * version 2, as published by the Free Software Foundation.
  11. *
  12. * ktap is distributed in the hope it will be useful, but WITHOUT
  13. * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  14. * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
  15. * more details.
  16. *
  17. * You should have received a copy of the GNU General Public License along with
  18. * this program; if not, write to the Free Software Foundation, Inc.,
  19. * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
  20. */


  21. #ifdef __x86_64

  22.     .file "call_x86_64.S"
  23.     .text

  24. /*    ffi_call_assem_x86_64(void *stack, void *temp_stack,
  25. *        void *rvalue, void *func_addr, ffi_type rftype)
  26. *    @stack: base address of register values and new stack
  27. *    @temp_stack: stack to store temporary values
  28. *    @func_addr: Function address
  29. *    @rvalue: where to put return value
  30. *    @rftype: FFI type of return value
  31. */
  32.     .align 2
  33.     .globl    ffi_call_assem_x86_64
  34.     .type    ffi_call_assem_x86_64,@function

  35. ffi_call_assem_x86_64:
  36.     movq    (%rsp), %rax    /* save return address */
  37.     /* move stuffs to temp memory region(void *temp_stack) */
  38.     movq    %rcx, (%rsi)    /* save pointer to return value */
  39.     movq    %r8, 8(%rsi)    /* save return_ffi_type */
  40.     movq    %rbp, 16(%rsi)    /* save %rbp */
  41.     movq    %rax, 24(%rsi)    /* save return address */
  42.     movq    %rsp, 32(%rsi)    /* save %rsp */
  43.     movq    %rsi, %rbp    /* point %rbp to temp memory region */

  44.     movq    %rdx, %r11    /* move function address to %r11 */

  45.     movq    %rdi, %r10    /* set %r10 point to register region */
  46.     movq    (%r10), %rdi    /* load registers */
  47.     movq    8(%r10), %rsi
  48.     movq    16(%r10), %rdx
  49.     movq    24(%r10), %rcx
  50.     movq    32(%r10), %r8
  51.     movq    40(%r10), %r9
  52.     xorq    %rax, %rax

  53.     leaq    48(%r10), %rsp

  54.     callq    *%r11

  55.     movq    32(%rbp), %rsp    /* restore %rsp */
  56.     movq    24(%rbp), %rcx    /* restore return address */
  57.     movq    %rcx, (%rsp)

  58.     movq    (%rbp), %rcx    /* get pointer to return value */
  59.     movq    8(%rbp), %r8    /* get return_ffi_type */
  60.     movq    16(%rbp), %rbp    /* restore rbp */

  61.     leaq    .Lreturn_table(%rip), %r11    /* start address of return_table */
  62.     movslq    (%r11, %r8, 8), %r11    /* fetch target address from table */
  63.     jmpq    *%r11            /* jump according to value in table */

  64.     .align 8
  65. .Lreturn_table:
  66.     .quad    .Lreturn_void        /* FFI_VOID */
  67.     .quad    .Lreturn_uint8        /* FFI_UINT8 */
  68.     .quad    .Lreturn_int8        /* FFI_INT8 */
  69.     .quad    .Lreturn_uint16        /* FFI_UINT16 */
  70.     .quad    .Lreturn_int16        /* FFI_INT16 */
  71.     .quad    .Lreturn_uint32        /* FFI_UINT32 */
  72.     .quad    .Lreturn_int32        /* FFI_INT32 */
  73.     .quad    .Lreturn_uint64        /* FFI_UINT64 */
  74.     .quad    .Lreturn_int64        /* FFI_INT64 */
  75.     .quad    .Lreturn_ptr        /* FFI_PTR */
  76.     .quad    .Lreturn_func        /* FFI_FUNC */
  77.     .quad    .Lreturn_struct        /* FFI_STRUCT */
  78.     .quad    .Lreturn_unknown    /* FFI_UNKNOWN */

  79.     .align 8
  80. .Lreturn_void:
  81. .Lreturn_func:
  82. .Lreturn_unknown:
  83.     retq
  84.     .align 8
  85. .Lreturn_uint8:
  86.     movzbq    %al, %rax
  87.     movq    %rax, (%rcx)
  88.     retq
  89.     .align 8
  90. .Lreturn_int8:
  91.     movsbq    %al, %rax
  92.     movq    %rax, (%rcx)
  93.     retq
  94.     .align 8
  95. .Lreturn_uint16:
  96.     movzwq    %ax, %rax
  97.     movq    %rax, (%rcx)
  98.     retq
  99.     .align 8
  100. .Lreturn_int16:
  101.     movswq    %ax, %rax
  102.     movq    %rax, (%rcx)
  103.     retq
  104.     .align 8
  105. .Lreturn_uint32:
  106.     movl    %eax, %eax
  107.     movq    %rax, (%rcx)
  108.     retq
  109.     .align 8
  110. .Lreturn_int32:
  111.     movslq    %eax, %rax
  112.     movq    %rax, (%rcx)
  113.     retq
  114.     .align 8
  115. .Lreturn_uint64:
  116. .Lreturn_int64:
  117. .Lreturn_ptr:
  118.     movq    %rax, (%rcx)
  119.     retq
  120. /* Struct type indicates that struct is put into at most two registers,
  121. * and 16 bytes space is always available
  122. */
  123.     .align 8
  124. .Lreturn_struct:
  125.     movq    %rax, (%rcx)
  126.     movq    %rdx, 8(%rcx)
  127.     retq

  128. #endif /* end for __x86_64 */