switch_s390_unix.h 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. /*
  2. * this is the internal transfer function.
  3. *
  4. * HISTORY
  5. * 25-Jan-12 Alexey Borzenkov <snaury@gmail.com>
  6. * Fixed Linux/S390 port to work correctly with
  7. * different optimization options both on 31-bit
  8. * and 64-bit. Thanks to Stefan Raabe for lots
  9. * of testing.
  10. * 24-Nov-02 Christian Tismer <tismer@tismer.com>
  11. * needed to add another magic constant to insure
  12. * that f in slp_eval_frame(PyFrameObject *f)
  13. * STACK_REFPLUS will probably be 1 in most cases.
  14. * gets included into the saved stack area.
  15. * 06-Oct-02 Gustavo Niemeyer <niemeyer@conectiva.com>
  16. * Ported to Linux/S390.
  17. */
  18. #define STACK_REFPLUS 1
  19. #ifdef SLP_EVAL
  20. #ifdef __s390x__
  21. #define STACK_MAGIC 20 /* 20 * 8 = 160 bytes of function call area */
  22. #else
  23. #define STACK_MAGIC 24 /* 24 * 4 = 96 bytes of function call area */
  24. #endif
  25. /* Technically, r11-r13 also need saving, but function prolog starts
  26. with stm(g) and since there are so many saved registers already
  27. it won't be optimized, resulting in all r6-r15 being saved */
  28. #define REGS_TO_SAVE "r6", "r7", "r8", "r9", "r10", "r14", \
  29. "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \
  30. "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15"
  31. static int
  32. slp_switch(void)
  33. {
  34. register int ret;
  35. register long *stackref, stsizediff;
  36. __asm__ volatile ("" : : : REGS_TO_SAVE);
  37. #ifdef __s390x__
  38. __asm__ volatile ("lgr %0, 15" : "=r" (stackref) : );
  39. #else
  40. __asm__ volatile ("lr %0, 15" : "=r" (stackref) : );
  41. #endif
  42. {
  43. SLP_SAVE_STATE(stackref, stsizediff);
  44. /* N.B.
  45. r11 may be used as the frame pointer, and in that case it cannot be
  46. clobbered and needs offsetting just like the stack pointer (but in cases
  47. where frame pointer isn't used we might clobber it accidentally). What's
  48. scary is that r11 is 2nd (and even 1st when GOT is used) callee saved
  49. register that gcc would chose for surviving function calls. However,
  50. since r6-r10 are clobbered above, their cost for reuse is reduced, so
  51. gcc IRA will chose them over r11 (not seeing r11 is implicitly saved),
  52. making it relatively safe to offset in all cases. :) */
  53. __asm__ volatile (
  54. #ifdef __s390x__
  55. "agr 15, %0\n\t"
  56. "agr 11, %0"
  57. #else
  58. "ar 15, %0\n\t"
  59. "ar 11, %0"
  60. #endif
  61. : /* no outputs */
  62. : "r" (stsizediff)
  63. );
  64. SLP_RESTORE_STATE();
  65. }
  66. __asm__ volatile ("" : : : REGS_TO_SAVE);
  67. __asm__ volatile ("lhi %0, 0" : "=r" (ret) : );
  68. return ret;
  69. }
  70. #endif
  71. /*
  72. * further self-processing support
  73. */
  74. /*
  75. * if you want to add self-inspection tools, place them
  76. * here. See the x86_msvc for the necessary defines.
  77. * These features are highly experimental und not
  78. * essential yet.
  79. */