switch_x86_unix.h 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105
  1. /*
  2. * this is the internal transfer function.
  3. *
  4. * HISTORY
  5. * 3-May-13 Ralf Schmitt <ralf@systemexit.de>
  6. * Add support for strange GCC caller-save decisions
  7. * (ported from switch_aarch64_gcc.h)
  8. * 19-Aug-11 Alexey Borzenkov <snaury@gmail.com>
  9. * Correctly save ebp, ebx and cw
  10. * 07-Sep-05 (py-dev mailing list discussion)
  11. * removed 'ebx' from the register-saved. !!!! WARNING !!!!
  12. * It means that this file can no longer be compiled statically!
  13. * It is now only suitable as part of a dynamic library!
  14. * 24-Nov-02 Christian Tismer <tismer@tismer.com>
  15. * needed to add another magic constant to insure
  16. * that f in slp_eval_frame(PyFrameObject *f)
  17. * STACK_REFPLUS will probably be 1 in most cases.
  18. * gets included into the saved stack area.
  19. * 17-Sep-02 Christian Tismer <tismer@tismer.com>
  20. * after virtualizing stack save/restore, the
  21. * stack size shrunk a bit. Needed to introduce
  22. * an adjustment STACK_MAGIC per platform.
  23. * 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
  24. * slightly changed framework for spark
  25. * 31-Avr-02 Armin Rigo <arigo@ulb.ac.be>
  26. * Added ebx, esi and edi register-saves.
  27. * 01-Mar-02 Samual M. Rushing <rushing@ironport.com>
  28. * Ported from i386.
  29. */
  30. #define STACK_REFPLUS 1
  31. #ifdef SLP_EVAL
  32. /* #define STACK_MAGIC 3 */
  33. /* the above works fine with gcc 2.96, but 2.95.3 wants this */
  34. #define STACK_MAGIC 0
  35. #if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
  36. # define ATTR_NOCLONE __attribute__((noclone))
  37. #else
  38. # define ATTR_NOCLONE
  39. #endif
  40. static int
  41. slp_switch(void)
  42. {
  43. int err;
  44. #ifdef _WIN32
  45. void *seh;
  46. #endif
  47. void *ebp, *ebx;
  48. unsigned short cw;
  49. register int *stackref, stsizediff;
  50. __asm__ volatile ("" : : : "esi", "edi");
  51. __asm__ volatile ("fstcw %0" : "=m" (cw));
  52. __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp));
  53. __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx));
  54. #ifdef _WIN32
  55. __asm__ volatile (
  56. "movl %%fs:0x0, %%eax\n"
  57. "movl %%eax, %0\n"
  58. : "=m" (seh)
  59. :
  60. : "eax");
  61. #endif
  62. __asm__ ("movl %%esp, %0" : "=g" (stackref));
  63. {
  64. SLP_SAVE_STATE(stackref, stsizediff);
  65. __asm__ volatile (
  66. "addl %0, %%esp\n"
  67. "addl %0, %%ebp\n"
  68. :
  69. : "r" (stsizediff)
  70. );
  71. SLP_RESTORE_STATE();
  72. __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err));
  73. }
  74. #ifdef _WIN32
  75. __asm__ volatile (
  76. "movl %0, %%eax\n"
  77. "movl %%eax, %%fs:0x0\n"
  78. :
  79. : "m" (seh)
  80. : "eax");
  81. #endif
  82. __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx));
  83. __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp));
  84. __asm__ volatile ("fldcw %0" : : "m" (cw));
  85. __asm__ volatile ("" : : : "esi", "edi");
  86. return err;
  87. }
  88. #endif
  89. /*
  90. * further self-processing support
  91. */
  92. /*
  93. * if you want to add self-inspection tools, place them
  94. * here. See the x86_msvc for the necessary defines.
  95. * These features are highly experimental und not
  96. * essential yet.
  97. */