switch_x32_unix.h 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. /*
  2. * this is the internal transfer function.
  3. *
  4. * HISTORY
  5. * 17-Aug-12 Fantix King <fantix.king@gmail.com>
  6. * Ported from amd64.
  7. */
  8. #define STACK_REFPLUS 1
  9. #ifdef SLP_EVAL
  10. #define STACK_MAGIC 0
  11. #define REGS_TO_SAVE "r12", "r13", "r14", "r15"
  12. static int
  13. slp_switch(void)
  14. {
  15. void* ebp;
  16. void* ebx;
  17. unsigned int csr;
  18. unsigned short cw;
  19. register int err;
  20. register int *stackref, stsizediff;
  21. __asm__ volatile ("" : : : REGS_TO_SAVE);
  22. __asm__ volatile ("fstcw %0" : "=m" (cw));
  23. __asm__ volatile ("stmxcsr %0" : "=m" (csr));
  24. __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp));
  25. __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx));
  26. __asm__ ("movl %%esp, %0" : "=g" (stackref));
  27. {
  28. SLP_SAVE_STATE(stackref, stsizediff);
  29. __asm__ volatile (
  30. "addl %0, %%esp\n"
  31. "addl %0, %%ebp\n"
  32. :
  33. : "r" (stsizediff)
  34. );
  35. SLP_RESTORE_STATE();
  36. }
  37. __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx));
  38. __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp));
  39. __asm__ volatile ("ldmxcsr %0" : : "m" (csr));
  40. __asm__ volatile ("fldcw %0" : : "m" (cw));
  41. __asm__ volatile ("" : : : REGS_TO_SAVE);
  42. __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err));
  43. return err;
  44. }
  45. #endif
  46. /*
  47. * further self-processing support
  48. */
  49. /*
  50. * if you want to add self-inspection tools, place them
  51. * here. See the x86_msvc for the necessary defines.
  52. * These features are highly experimental und not
  53. * essential yet.
  54. */