123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384 |
- /*
- * this is the internal transfer function.
- *
- * HISTORY
- * 3-May-13 Ralf Schmitt <ralf@systemexit.de>
- * Add support for strange GCC caller-save decisions
- * (ported from switch_aarch64_gcc.h)
- * 18-Aug-11 Alexey Borzenkov <snaury@gmail.com>
- * Correctly save rbp, csr and cw
- * 01-Apr-04 Hye-Shik Chang <perky@FreeBSD.org>
- * Ported from i386 to amd64.
- * 24-Nov-02 Christian Tismer <tismer@tismer.com>
- * needed to add another magic constant to insure
- * that f in slp_eval_frame(PyFrameObject *f)
- * STACK_REFPLUS will probably be 1 in most cases.
- * gets included into the saved stack area.
- * 17-Sep-02 Christian Tismer <tismer@tismer.com>
- * after virtualizing stack save/restore, the
- * stack size shrunk a bit. Needed to introduce
- * an adjustment STACK_MAGIC per platform.
- * 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
- * slightly changed framework for spark
- * 31-Avr-02 Armin Rigo <arigo@ulb.ac.be>
- * Added ebx, esi and edi register-saves.
- * 01-Mar-02 Samual M. Rushing <rushing@ironport.com>
- * Ported from i386.
- */
- #define STACK_REFPLUS 1
- #ifdef SLP_EVAL
- /* #define STACK_MAGIC 3 */
- /* the above works fine with gcc 2.96, but 2.95.3 wants this */
- #define STACK_MAGIC 0
- #define REGS_TO_SAVE "r12", "r13", "r14", "r15"
- static int
- slp_switch(void)
- {
- int err;
- void* rbp;
- void* rbx;
- unsigned int csr;
- unsigned short cw;
- register long *stackref, stsizediff;
- __asm__ volatile ("" : : : REGS_TO_SAVE);
- __asm__ volatile ("fstcw %0" : "=m" (cw));
- __asm__ volatile ("stmxcsr %0" : "=m" (csr));
- __asm__ volatile ("movq %%rbp, %0" : "=m" (rbp));
- __asm__ volatile ("movq %%rbx, %0" : "=m" (rbx));
- __asm__ ("movq %%rsp, %0" : "=g" (stackref));
- {
- SLP_SAVE_STATE(stackref, stsizediff);
- __asm__ volatile (
- "addq %0, %%rsp\n"
- "addq %0, %%rbp\n"
- :
- : "r" (stsizediff)
- );
- SLP_RESTORE_STATE();
- __asm__ volatile ("xorq %%rax, %%rax" : "=a" (err));
- }
- __asm__ volatile ("movq %0, %%rbx" : : "m" (rbx));
- __asm__ volatile ("movq %0, %%rbp" : : "m" (rbp));
- __asm__ volatile ("ldmxcsr %0" : : "m" (csr));
- __asm__ volatile ("fldcw %0" : : "m" (cw));
- __asm__ volatile ("" : : : REGS_TO_SAVE);
- return err;
- }
- #endif
- /*
- * further self-processing support
- */
- /*
- * if you want to add self-inspection tools, place them
- * here. See the x86_msvc for the necessary defines.
- * These features are highly experimental und not
- * essential yet.
- */
|