|
6 | 6 | // prolog when we run out.
|
7 | 7 |
|
8 | 8 | #if defined(__APPLE__) || defined(_WIN32)
|
9 |
| -#define RUST_NEW_STACK _rust_new_stack |
| 9 | +#define RUST_NEW_STACK2 _rust_new_stack2 |
10 | 10 | #define RUST_DEL_STACK _rust_del_stack
|
11 | 11 | #define RUST_GET_PREV_STACK _rust_get_prev_stack
|
12 | 12 | #define RUST_GET_TASK _rust_get_task
|
13 |
| -#define UPCALL_ALLOC_C_STACK _upcall_alloc_c_stack |
14 |
| -#define UPCALL_CALL_C_STACK _upcall_call_c_stack |
| 13 | +#define UPCALL_CALL_C _upcall_call_shim_on_c_stack |
15 | 14 | #define MORESTACK ___morestack
|
16 | 15 | #else
|
17 |
| -#define RUST_NEW_STACK rust_new_stack |
| 16 | +#define RUST_NEW_STACK2 rust_new_stack2 |
18 | 17 | #define RUST_DEL_STACK rust_del_stack
|
19 | 18 | #define RUST_GET_PREV_STACK rust_get_prev_stack
|
20 | 19 | #define RUST_GET_TASK rust_get_task
|
21 |
| -#define UPCALL_ALLOC_C_STACK upcall_alloc_c_stack |
22 |
| -#define UPCALL_CALL_C_STACK upcall_call_c_stack |
| 20 | +#define UPCALL_CALL_C upcall_call_shim_on_c_stack |
23 | 21 | #define MORESTACK __morestack
|
24 | 22 | #endif
|
25 | 23 |
|
26 |
| -#ifdef __APPLE__ |
27 |
| -#define ALIGNMENT 4 |
28 |
| -#else |
29 |
| -#define ALIGNMENT 8 |
30 |
| -#endif |
31 |
| - |
32 |
| -#if defined (__APPLE__) |
33 |
| -#define NEW_STACK_ADDR rust_new_stack_sym-.L$pic_ref_pt_0(%eax) |
34 |
| -#define DEL_STACK_ADDR rust_del_stack_sym-.L$pic_ref_pt_1(%edx) |
35 |
| -#else |
36 |
| -#if defined (_WIN32) |
37 |
| -#define NEW_STACK_ADDR $_rust_new_stack |
38 |
| -#define DEL_STACK_ADDR $_rust_del_stack |
39 |
| -#else |
40 |
| -#define NEW_STACK_ADDR $rust_new_stack |
41 |
| -#define DEL_STACK_ADDR $rust_del_stack |
42 |
| -#endif |
43 |
| -#endif |
44 |
| - |
| 24 | +#define ALIGNMENT 8 |
45 | 25 | #define RETURN_OFFSET 7
|
46 | 26 |
|
47 | 27 | .globl RUST_NEW_STACK
|
|
68 | 48 | #if defined(__linux__)
|
69 | 49 | MORESTACK:
|
70 | 50 |
|
71 |
| - // Sanity check to make sure that there is a currently-running task. |
72 |
| - subl $12,%esp |
73 |
| - calll RUST_GET_TASK |
74 |
| - testl %eax,%eax |
75 |
| - jz .L$bail |
76 |
| - |
77 |
| - movl $16, (%esp) |
78 |
| - calll UPCALL_ALLOC_C_STACK |
79 |
| - movl %eax,%edx |
80 |
| - |
81 |
| - movl %esp, 12(%edx) |
82 |
| - // C stack | esp+12 |
83 |
| - // ---------------------+------------------------- |
84 |
| - movl 20(%esp),%eax // | ra stksz argsz x ra args |
85 |
| - movl %eax,8(%edx) // argsz > | ra stksz argsz x ra args |
86 |
| - leal 28+ALIGNMENT(%esp),%eax // argsz | ra stksz argsz x ra args |
87 |
| - movl %eax,4(%edx) // argp > argsz | ra stksz argsz x ra args |
88 |
| - movl 16(%esp),%eax // argp argsz | ra stksz argsz x ra args |
89 |
| - movl %eax,(%edx) // stksz > argp argsz | ra stksz argsz x ra args |
90 |
| - |
91 |
| - calll .L$pic_ref_pt_0 |
92 |
| -.L$pic_ref_pt_0: |
93 |
| - popl %eax |
94 |
| - |
95 |
| - movl NEW_STACK_ADDR,%eax |
96 |
| - movl %eax,(%esp) |
97 |
| - movl %edx,4(%esp) |
98 |
| - // FIXME: Don't understand why just _here_ I have to say @PLT |
99 |
| - calll UPCALL_CALL_C_STACK@PLT |
100 |
| - |
101 |
| - movl 12(%esp),%edx // Grab the return pointer. |
102 |
| - addl $RETURN_OFFSET,%edx // Skip past the `add esp,4` and the `ret`. |
103 |
| - |
104 |
| - movl %eax,%esp // Switch stacks. |
105 |
| - calll *%edx // Re-enter the function that called us. |
106 |
| - |
107 |
| - // Now the function that called us has returned, so we need to delete the |
108 |
| - // old stack space. |
109 |
| - |
110 |
| - calll RUST_GET_PREV_STACK |
111 |
| - movl %eax,%esp // Switch back to the old stack. |
112 |
| - |
113 |
| - movl $0,(%esp) |
114 |
| - calll UPCALL_ALLOC_C_STACK |
115 |
| - |
116 |
| - calll .L$pic_ref_pt_1 |
117 |
| -.L$pic_ref_pt_1: |
118 |
| - popl %edx |
119 |
| - |
120 |
| - movl DEL_STACK_ADDR,%edx |
121 |
| - movl %edx,(%esp) |
122 |
| - movl %eax,4(%esp) |
123 |
| - calll UPCALL_CALL_C_STACK |
124 |
| - |
125 |
| - addl $12,%esp |
126 |
| - retl $8 // ra stksz argsz x ra args |
| 51 | + pushl %ebp |
| 52 | + movl %esp, %ebp |
| 53 | + |
| 54 | + // FIXME (1226): main is compiled with the split-stack prologue, |
| 55 | + // causing it to call __morestack, so we have to jump back out |
| 56 | + subl $24,%esp |
| 57 | + calll RUST_GET_TASK |
| 58 | + testl %eax,%eax |
| 59 | + jz .L$bail |
| 60 | + |
| 61 | + // The arguments to rust_new_stack2 |
| 62 | + movl %esp, 20(%esp) // Save the stack pointer |
| 63 | + movl 36(%esp),%eax // Size of stack arguments |
| 64 | + movl %eax,16(%esp) |
| 65 | + leal 44+ALIGNMENT(%esp),%eax // Address of stack arguments |
| 66 | + movl %eax,12(%esp) |
| 67 | + movl 32(%esp),%eax // The amount of stack needed |
| 68 | + movl %eax,8(%esp) |
| 69 | + |
| 70 | + movl $RUST_NEW_STACK2,4(%esp) |
| 71 | + leal 8(%esp), %eax |
| 72 | + movl %eax,(%esp) |
| 73 | + call UPCALL_CALL_C |
| 74 | + |
| 75 | + movl 28(%esp),%edx // Grab the return pointer. |
| 76 | + addl $RETURN_OFFSET,%edx // Skip past the `add esp,4` and the `ret`. |
| 77 | + |
| 78 | + movl %eax,%esp // Switch stacks. |
| 79 | + call *%edx // Re-enter the function that called us. |
| 80 | + |
| 81 | + // Now the function that called us has returned, so we need to delete the |
| 82 | + // old stack space. |
| 83 | + |
| 84 | + // NB: This is assuming we already have at least 2 words |
| 85 | + // pushed onto the C stack. This is always true because |
| 86 | + // Rust functions have implicit arguments. |
| 87 | + movl $RUST_GET_PREV_STACK,4(%esp) |
| 88 | + movl $0, (%esp) |
| 89 | + call UPCALL_CALL_C |
| 90 | + |
| 91 | + // Switch back to the rust stack |
| 92 | + movl %eax, %esp |
| 93 | + |
| 94 | + movl $RUST_DEL_STACK,4(%esp) |
| 95 | + movl $0, (%esp) |
| 96 | + call UPCALL_CALL_C |
| 97 | + |
| 98 | + addl $24,%esp |
| 99 | + popl %ebp |
| 100 | + retl $8 |
127 | 101 |
|
128 | 102 | .L$bail:
|
129 |
| - movl 12(%esp),%edx |
130 |
| - addl $RETURN_OFFSET,%edx |
131 |
| - addl $12+4+8+ALIGNMENT,%esp |
132 |
| - jmpl *%edx |
| 103 | + movl 28(%esp),%edx |
| 104 | + addl $RETURN_OFFSET,%edx |
| 105 | + |
| 106 | + addl $24, %esp |
| 107 | + popl %ebp |
| 108 | + addl $4+8+ALIGNMENT,%esp |
| 109 | + |
| 110 | + jmpl *%edx |
133 | 111 |
|
134 | 112 |
|
135 | 113 | #else
|
|
0 commit comments