4bd0997db76f26e7682554d88ce506140cf96d3d
1 #ifndef UST_PROCESSOR_H
2 #define UST_PROCESSOR_H
7 extern __thread
long ust_reg_stack
[500];
8 extern volatile __thread
long *ust_reg_stack_ptr
;
26 #ifdef CONFIG_UST_GDB_INTEGRATION
29 #define save_registers(regsptr) \
31 /* save original esp */ \
33 /* push original eflags */ \
35 /* eax will hold the ptr to the private stack bottom */ \
37 /* ebx is used for TLS access */ \
39 /* ecx will be used to temporarily hold the stack bottom addr */ \
41 /* rdi is the input to __tls_get_addr, and also a temp var */ \
43 /* Get GOT address */ \
44 "call __i686.get_pc_thunk.bx\n\t" \
45 "addl $_GLOBAL_OFFSET_TABLE_, %%ebx\n\t" \
46 /* Save registers before call (not using ecx yet but we must preserve \
47 the original value of edx. */ \
49 /* Start TLS access of private reg stack pointer */ \
50 "leal ust_reg_stack_ptr@tlsgd(,%%ebx,1),%%eax\n\t" \
51 "call ___tls_get_addr@plt\n\t" \
52 /* --- End TLS access */ \
54 /* check if ust_reg_stack_ptr has been initialized */ \
55 "movl (%%eax),%%ecx\n\t" \
56 "testl %%ecx,%%ecx\n\t" \
58 "movl %%eax,%%ecx\n\t" \
59 /* Save registers before call (using ecx and we must preserve \
60 the original value of edx. */ \
63 /* Start TLS access of private reg stack */ \
64 "leal ust_reg_stack@tlsgd(,%%ebx,1),%%eax\n\t" \
65 "call ___tls_get_addr@plt\n\t" \
66 /* --- End TLS access */ \
69 "addl $500,%%eax\n\t" \
70 "movl %%eax,(%%ecx)\n\t" \
71 "movl %%ecx,%%eax\n\t" \
72 /* now the pointer to the private stack is in eax. \
73 must add stack size so the ptr points to the stack bottom. */ \
75 /* Manually push esp to private stack */ \
76 "addl $-4,(%%eax)\n\t" \
77 "movl 20(%%esp), %%edi\n\t" \
78 "movl (%%eax), %%ebx\n\t" \
79 "movl %%edi, (%%ebx)\n\t" \
80 /* Manually push eflags to private stack */ \
81 "addl $-4,(%%eax)\n\t" \
82 "movl 16(%%esp), %%edi\n\t" \
83 "movl (%%eax), %%ebx\n\t" \
84 "movl %%edi, (%%ebx)\n\t" \
85 /* Manually push eax to private stack */ \
86 "addl $-4,(%%eax)\n\t" \
87 "movl 12(%%esp), %%edi\n\t" \
88 "movl (%%eax), %%ebx\n\t" \
89 "movl %%edi, (%%ebx)\n\t" \
90 /* Manually push ebx to private stack */ \
91 "addl $-4,(%%eax)\n\t" \
92 "movl 8(%%esp), %%edi\n\t" \
93 "movl (%%eax), %%ebx\n\t" \
94 "movl %%edi, (%%ebx)\n\t" \
95 /* Manually push ecx to private stack */ \
96 "addl $-4,(%%eax)\n\t" \
97 "movl 4(%%esp), %%edi\n\t" \
98 "movl (%%eax), %%ebx\n\t" \
99 "movl %%edi, (%%ebx)\n\t" \
100 /* Manually push edi to private stack */ \
101 "addl $-4,(%%eax)\n\t" \
102 "movl 0(%%esp), %%edi\n\t" \
103 "movl (%%eax), %%ebx\n\t" \
104 "movl %%edi, (%%ebx)\n\t" \
105 /* now push regs to tls */ \
106 /* -- esp already pushed -- */ \
107 /* -- eax already pushed -- */ \
108 /* -- ebx already pushed -- */ \
109 /* -- ecx already pushed -- */ \
110 /* -- edi already pushed -- */ \
111 "addl $-4,(%%eax)\n\t" \
112 "movl (%%eax), %%ebx\n\t" \
113 "movl %%edx,(%%ebx)\n\t" \
114 "addl $-4,(%%eax)\n\t" \
115 "movl (%%eax), %%ebx\n\t" \
116 "movl %%ebp,(%%ebx)\n\t" \
117 "addl $-4,(%%eax)\n\t" \
118 "movl (%%eax), %%ebx\n\t" \
119 "movl %%esi,(%%ebx)\n\t" \
121 "addl $-2,(%%eax)\n\t" \
122 "movl (%%eax), %%ebx\n\t" \
123 "movw %%cs, (%%ebx)\n\t" \
125 "addl $-2,(%%eax)\n\t" \
126 "movl (%%eax), %%ebx\n\t" \
127 "movw %%ss, (%%ebx)\n\t" \
128 /* restore original values of regs that were used internally */ \
133 /* cancel push of rsp */ \
134 "addl $4,%%esp\n\t" \
135 /* cancel push of eflags */ \
136 "addl $4,%%esp\n\t" \
138 memcpy(regsptr, (void *)ust_reg_stack_ptr, sizeof(struct registers)); \
139 ust_reg_stack_ptr = (void *)(((long)ust_reg_stack_ptr) + sizeof(struct registers));
141 #else /* CONFIG_UST_GDB_INTEGRATION */
143 #define save_registers(a)
145 #endif /* CONFIG_UST_GDB_INTEGRATION */
147 #define RELATIVE_ADDRESS(__rel_label__) __rel_label__
149 #define _ASM_PTR ".long "
151 #else /* below is code for x86-64 */
154 int padding
; /* 4 bytes */
172 unsigned long rflags
;
176 #ifdef CONFIG_UST_GDB_INTEGRATION
178 #define save_registers(regsptr) \
180 /* save original rsp */ \
182 /* push original rflags */ \
184 /* rax will hold the ptr to the private stack bottom */ \
186 /* rbx will be used to temporarily hold the stack bottom addr */ \
188 /* rdi is the input to __tls_get_addr, and also a temp var */ \
190 /* Start TLS access of private reg stack pointer */ \
192 "leaq ust_reg_stack_ptr@tlsgd(%%rip), %%rdi\n\t" \
195 "call __tls_get_addr@plt\n\t" \
196 /* --- End TLS access */ \
197 /* check if ust_reg_stack_ptr has been initialized */ \
198 "movq (%%rax),%%rbx\n\t" \
199 "testq %%rbx,%%rbx\n\t" \
201 "movq %%rax,%%rbx\n\t" \
202 /* Start TLS access of private reg stack */ \
204 "leaq ust_reg_stack@tlsgd(%%rip), %%rdi\n\t" \
207 "call __tls_get_addr@plt\n\t" \
208 /* --- End TLS access */ \
209 "addq $500,%%rax\n\t" \
210 "movq %%rax,(%%rbx)\n\t" \
211 "movq %%rbx,%%rax\n\t" \
212 /* now the pointer to the private stack is in rax.
213 must add stack size so the ptr points to the stack bottom. */ \
215 /* Manually push rsp to private stack */ \
216 "addq $-8,(%%rax)\n\t" \
217 "movq 32(%%rsp), %%rdi\n\t" \
218 "movq (%%rax), %%rbx\n\t" \
219 "movq %%rdi, (%%rbx)\n\t" \
220 /* Manually push eflags to private stack */ \
221 "addq $-8,(%%rax)\n\t" \
222 "movq 24(%%rsp), %%rdi\n\t" \
223 "movq (%%rax), %%rbx\n\t" \
224 "movq %%rdi, (%%rbx)\n\t" \
225 /* Manually push rax to private stack */ \
226 "addq $-8,(%%rax)\n\t" \
227 "movq 16(%%rsp), %%rdi\n\t" \
228 "movq (%%rax), %%rbx\n\t" \
229 "movq %%rdi, (%%rbx)\n\t" \
230 /* Manually push rbx to private stack */ \
231 "addq $-8,(%%rax)\n\t" \
232 "movq 8(%%rsp), %%rdi\n\t" \
233 "movq (%%rax), %%rbx\n\t" \
234 "movq %%rdi, (%%rbx)\n\t" \
235 /* Manually push rdi to private stack */ \
236 "addq $-8,(%%rax)\n\t" \
237 "movq 0(%%rsp), %%rdi\n\t" \
238 "movq (%%rax), %%rbx\n\t" \
239 "movq %%rdi, (%%rbx)\n\t" \
240 /* now push regs to tls */ \
241 /* -- rsp already pushed -- */ \
242 /* -- rax already pushed -- */ \
243 /* -- rbx already pushed -- */ \
244 /* -- rdi already pushed -- */ \
245 "addq $-8,(%%rax)\n\t" \
246 "movq (%%rax), %%rbx\n\t" \
247 "movq %%rcx,(%%rbx)\n\t" \
248 "addq $-8,(%%rax)\n\t" \
249 "movq (%%rax), %%rbx\n\t" \
250 "movq %%rdx,(%%rbx)\n\t" \
251 "addq $-8,(%%rax)\n\t" \
252 "movq (%%rax), %%rbx\n\t" \
253 "movq %%rbp,(%%rbx)\n\t" \
254 "addq $-8,(%%rax)\n\t" \
255 "movq (%%rax), %%rbx\n\t" \
256 "movq %%rsi,(%%rbx)\n\t" \
257 "addq $-8,(%%rax)\n\t" \
258 "movq (%%rax), %%rbx\n\t" \
259 "movq %%r8,(%%rbx)\n\t" \
260 "addq $-8,(%%rax)\n\t" \
261 "movq (%%rax), %%rbx\n\t" \
262 "movq %%r9,(%%rbx)\n\t" \
263 "addq $-8,(%%rax)\n\t" \
264 "movq (%%rax), %%rbx\n\t" \
265 "movq %%r10,(%%rbx)\n\t" \
266 "addq $-8,(%%rax)\n\t" \
267 "movq (%%rax), %%rbx\n\t" \
268 "movq %%r11,(%%rbx)\n\t" \
269 "addq $-8,(%%rax)\n\t" \
270 "movq (%%rax), %%rbx\n\t" \
271 "movq %%r12,(%%rbx)\n\t" \
272 "addq $-8,(%%rax)\n\t" \
273 "movq (%%rax), %%rbx\n\t" \
274 "movq %%r13,(%%rbx)\n\t" \
275 "addq $-8,(%%rax)\n\t" \
276 "movq (%%rax), %%rbx\n\t" \
277 "movq %%r14,(%%rbx)\n\t" \
278 "addq $-8,(%%rax)\n\t" \
279 "movq (%%rax), %%rbx\n\t" \
280 "movq %%r15,(%%rbx)\n\t" \
282 "addq $-2,(%%rax)\n\t" \
283 "movq (%%rax), %%rbx\n\t" \
284 "movw %%cs, (%%rbx)\n\t" \
286 "addq $-2,(%%rax)\n\t" \
287 "movq (%%rax), %%rbx\n\t" \
288 "movw %%ss, (%%rbx)\n\t" \
289 /* add padding for struct registers */ \
290 "addq $-4,(%%rax)\n\t" \
291 /* restore original values of regs that were used internally */ \
295 /* cancel push of rsp */ \
296 "addq $8,%%rsp\n\t" \
297 /* cancel push of rflags */ \
298 "addq $8,%%rsp\n\t" \
300 memcpy(regsptr, (void *)ust_reg_stack_ptr, sizeof(struct registers)); \
301 ust_reg_stack_ptr = (void *)(((long)ust_reg_stack_ptr) + sizeof(struct registers));
303 #else /* CONFIG_UST_GDB_INTEGRATION */
305 #define save_registers(a)
307 #endif /* CONFIG_UST_GDB_INTEGRATION */
309 /* Macro to insert the address of a relative jump in an assembly stub,
310 * in a relocatable way. On x86-64, this uses a special (%rip) notation. */
311 #define RELATIVE_ADDRESS(__rel_label__) __rel_label__(%%rip)
313 #define _ASM_PTR ".quad "
317 #endif /* UST_PROCESSOR_H */
This page took 0.051604 seconds and 4 git commands to generate.