remove save_ip stub
[ust.git] / include / ust / processor.h
1 #ifndef UST_PROCESSOR_H
2 #define UST_PROCESSOR_H
3
4 #include <stddef.h>
5 #include <string.h>
6
7 extern __thread long ust_reg_stack[500];
8 extern volatile __thread long *ust_reg_stack_ptr;
9
10 #ifndef __x86_64
11
12 struct registers {
13 short ss;
14 short cs;
15 long esi;
16 long ebp;
17 long edx;
18 long ecx;
19 long edi;
20 long ebx;
21 long eax;
22 long eflags;
23 long esp;
24 };
25
26 #ifdef CONFIG_UST_GDB_INTEGRATION
27
28 //#error "GDB integration not supported for x86-32 yet."
29
30 #define save_registers(regsptr) \
31 asm volatile ( \
32 /* save original esp */ \
33 "pushl %%esp\n\t" \
34 /* push original eflags */ \
35 "pushfl\n\t" \
36 /* eax will hold the ptr to the private stack bottom */ \
37 "pushl %%eax\n\t" \
38 /* ebx will be used to temporarily hold the stack bottom addr */ \
39 "pushl %%ebx\n\t" \
40 /* rdi is the input to __tls_get_addr, and also a temp var */ \
41 "pushl %%edi\n\t" \
42 /* Start TLS access of private reg stack pointer */ \
43 "leal ust_reg_stack_ptr@tlsgd(,%%ebx,1),%%eax\n\t" \
44 "call ___tls_get_addr@plt\n\t" \
45 /* --- End TLS access */ \
46 /* check if ust_reg_stack_ptr has been initialized */ \
47 "movl (%%eax),%%ebx\n\t" \
48 "testl %%ebx,%%ebx\n\t" \
49 "jne 1f\n\t" \
50 "movl %%eax,%%ebx\n\t" \
51 /* Start TLS access of private reg stack */ \
52 "leal ust_reg_stack@tlsgd(,%%ebx,1),%%eax\n\t" \
53 "call ___tls_get_addr@plt\n\t" \
54 /* --- End TLS access */ \
55 "addl $500,%%eax\n\t" \
56 "movl %%eax,(%%ebx)\n\t" \
57 "movl %%ebx,%%eax\n\t" \
58 /* now the pointer to the private stack is in eax. \
59 must add stack size so the ptr points to the stack bottom. */ \
60 "1:\n\t" \
61 /* Manually push esp to private stack */ \
62 "addl $-4,(%%eax)\n\t" \
63 "movl 16(%%esp), %%edi\n\t" \
64 "movl (%%eax), %%ebx\n\t" \
65 "movl %%edi, (%%ebx)\n\t" \
66 /* Manually push eflags to private stack */ \
67 "addl $-4,(%%eax)\n\t" \
68 "movl 12(%%esp), %%edi\n\t" \
69 "movl (%%eax), %%ebx\n\t" \
70 "movl %%edi, (%%ebx)\n\t" \
71 /* Manually push eax to private stack */ \
72 "addl $-4,(%%eax)\n\t" \
73 "movl 8(%%esp), %%edi\n\t" \
74 "movl (%%eax), %%ebx\n\t" \
75 "movl %%edi, (%%ebx)\n\t" \
76 /* Manually push ebx to private stack */ \
77 "addl $-4,(%%eax)\n\t" \
78 "movl 4(%%esp), %%edi\n\t" \
79 "movl (%%eax), %%ebx\n\t" \
80 "movl %%edi, (%%ebx)\n\t" \
81 /* Manually push edi to private stack */ \
82 "addl $-4,(%%eax)\n\t" \
83 "movl 0(%%esp), %%edi\n\t" \
84 "movl (%%eax), %%ebx\n\t" \
85 "movl %%edi, (%%ebx)\n\t" \
86 /* now push regs to tls */ \
87 /* -- esp already pushed -- */ \
88 /* -- eax already pushed -- */ \
89 /* -- ebx already pushed -- */ \
90 /* -- edi already pushed -- */ \
91 "addl $-4,(%%eax)\n\t" \
92 "movl (%%eax), %%ebx\n\t" \
93 "movl %%ecx,(%%ebx)\n\t" \
94 "addl $-4,(%%eax)\n\t" \
95 "movl (%%eax), %%ebx\n\t" \
96 "movl %%edx,(%%ebx)\n\t" \
97 "addl $-4,(%%eax)\n\t" \
98 "movl (%%eax), %%ebx\n\t" \
99 "movl %%ebp,(%%ebx)\n\t" \
100 "addl $-4,(%%eax)\n\t" \
101 "movl (%%eax), %%ebx\n\t" \
102 "movl %%esi,(%%ebx)\n\t" \
103 /* push cs */ \
104 "addl $-2,(%%eax)\n\t" \
105 "movl (%%eax), %%ebx\n\t" \
106 "movw %%cs, (%%ebx)\n\t" \
107 /* push ss */ \
108 "addl $-2,(%%eax)\n\t" \
109 "movl (%%eax), %%ebx\n\t" \
110 "movw %%ss, (%%ebx)\n\t" \
111 /* restore original values of regs that were used internally */ \
112 "popl %%edi\n\t" \
113 "popl %%ebx\n\t" \
114 "popl %%eax\n\t" \
115 /* cancel push of rsp */ \
116 "addl $4,%%esp\n\t" \
117 /* cancel push of eflags */ \
118 "addl $4,%%esp\n\t" \
119 ::: "memory"); \
120 memcpy(regsptr, (void *)ust_reg_stack_ptr, sizeof(struct registers)); \
121 ust_reg_stack_ptr = (void *)(((long)ust_reg_stack_ptr) + sizeof(struct registers));
122
123 #else /* CONFIG_UST_GDB_INTEGRATION */
124
125 #define save_registers(a)
126
127 #endif /* CONFIG_UST_GDB_INTEGRATION */
128
129 #define RELATIVE_ADDRESS(__rel_label__) __rel_label__
130
131 #define _ASM_PTR ".long "
132
133 #else /* below is code for x86-64 */
134
135 struct registers {
136 int padding; /* 4 bytes */
137 short ss;
138 short cs;
139 unsigned long r15;
140 unsigned long r14;
141 unsigned long r13;
142 unsigned long r12;
143 unsigned long r11;
144 unsigned long r10;
145 unsigned long r9;
146 unsigned long r8;
147 unsigned long rsi;
148 unsigned long rbp;
149 unsigned long rdx;
150 unsigned long rcx;
151 unsigned long rdi;
152 unsigned long rbx;
153 unsigned long rax;
154 unsigned long rflags;
155 unsigned long rsp;
156 };
157
158 #ifdef CONFIG_UST_GDB_INTEGRATION
159
160 #define save_registers(regsptr) \
161 asm volatile ( \
162 /* save original rsp */ \
163 "pushq %%rsp\n\t" \
164 /* push original rflags */ \
165 "pushfq\n\t" \
166 /* rax will hold the ptr to the private stack bottom */ \
167 "pushq %%rax\n\t" \
168 /* rbx will be used to temporarily hold the stack bottom addr */ \
169 "pushq %%rbx\n\t" \
170 /* rdi is the input to __tls_get_addr, and also a temp var */ \
171 "pushq %%rdi\n\t" \
172 /* Start TLS access of private reg stack pointer */ \
173 ".byte 0x66\n\t" \
174 "leaq ust_reg_stack_ptr@tlsgd(%%rip), %%rdi\n\t" \
175 ".word 0x6666\n\t" \
176 "rex64\n\t" \
177 "call __tls_get_addr@plt\n\t" \
178 /* --- End TLS access */ \
179 /* check if ust_reg_stack_ptr has been initialized */ \
180 "movq (%%rax),%%rbx\n\t" \
181 "testq %%rbx,%%rbx\n\t" \
182 "jne 1f\n\t" \
183 "movq %%rax,%%rbx\n\t" \
184 /* Start TLS access of private reg stack */ \
185 ".byte 0x66\n\t" \
186 "leaq ust_reg_stack@tlsgd(%%rip), %%rdi\n\t" \
187 ".word 0x6666\n\t" \
188 "rex64\n\t" \
189 "call __tls_get_addr@plt\n\t" \
190 /* --- End TLS access */ \
191 "addq $500,%%rax\n\t" \
192 "movq %%rax,(%%rbx)\n\t" \
193 "movq %%rbx,%%rax\n\t" \
194 /* now the pointer to the private stack is in rax.
195 must add stack size so the ptr points to the stack bottom. */ \
196 "1:\n\t" \
197 /* Manually push rsp to private stack */ \
198 "addq $-8,(%%rax)\n\t" \
199 "movq 32(%%rsp), %%rdi\n\t" \
200 "movq (%%rax), %%rbx\n\t" \
201 "movq %%rdi, (%%rbx)\n\t" \
202 /* Manually push eflags to private stack */ \
203 "addq $-8,(%%rax)\n\t" \
204 "movq 24(%%rsp), %%rdi\n\t" \
205 "movq (%%rax), %%rbx\n\t" \
206 "movq %%rdi, (%%rbx)\n\t" \
207 /* Manually push rax to private stack */ \
208 "addq $-8,(%%rax)\n\t" \
209 "movq 16(%%rsp), %%rdi\n\t" \
210 "movq (%%rax), %%rbx\n\t" \
211 "movq %%rdi, (%%rbx)\n\t" \
212 /* Manually push rbx to private stack */ \
213 "addq $-8,(%%rax)\n\t" \
214 "movq 8(%%rsp), %%rdi\n\t" \
215 "movq (%%rax), %%rbx\n\t" \
216 "movq %%rdi, (%%rbx)\n\t" \
217 /* Manually push rdi to private stack */ \
218 "addq $-8,(%%rax)\n\t" \
219 "movq 0(%%rsp), %%rdi\n\t" \
220 "movq (%%rax), %%rbx\n\t" \
221 "movq %%rdi, (%%rbx)\n\t" \
222 /* now push regs to tls */ \
223 /* -- rsp already pushed -- */ \
224 /* -- rax already pushed -- */ \
225 /* -- rbx already pushed -- */ \
226 /* -- rdi already pushed -- */ \
227 "addq $-8,(%%rax)\n\t" \
228 "movq (%%rax), %%rbx\n\t" \
229 "movq %%rcx,(%%rbx)\n\t" \
230 "addq $-8,(%%rax)\n\t" \
231 "movq (%%rax), %%rbx\n\t" \
232 "movq %%rdx,(%%rbx)\n\t" \
233 "addq $-8,(%%rax)\n\t" \
234 "movq (%%rax), %%rbx\n\t" \
235 "movq %%rbp,(%%rbx)\n\t" \
236 "addq $-8,(%%rax)\n\t" \
237 "movq (%%rax), %%rbx\n\t" \
238 "movq %%rsi,(%%rbx)\n\t" \
239 "addq $-8,(%%rax)\n\t" \
240 "movq (%%rax), %%rbx\n\t" \
241 "movq %%r8,(%%rbx)\n\t" \
242 "addq $-8,(%%rax)\n\t" \
243 "movq (%%rax), %%rbx\n\t" \
244 "movq %%r9,(%%rbx)\n\t" \
245 "addq $-8,(%%rax)\n\t" \
246 "movq (%%rax), %%rbx\n\t" \
247 "movq %%r10,(%%rbx)\n\t" \
248 "addq $-8,(%%rax)\n\t" \
249 "movq (%%rax), %%rbx\n\t" \
250 "movq %%r11,(%%rbx)\n\t" \
251 "addq $-8,(%%rax)\n\t" \
252 "movq (%%rax), %%rbx\n\t" \
253 "movq %%r12,(%%rbx)\n\t" \
254 "addq $-8,(%%rax)\n\t" \
255 "movq (%%rax), %%rbx\n\t" \
256 "movq %%r13,(%%rbx)\n\t" \
257 "addq $-8,(%%rax)\n\t" \
258 "movq (%%rax), %%rbx\n\t" \
259 "movq %%r14,(%%rbx)\n\t" \
260 "addq $-8,(%%rax)\n\t" \
261 "movq (%%rax), %%rbx\n\t" \
262 "movq %%r15,(%%rbx)\n\t" \
263 /* push cs */ \
264 "addq $-2,(%%rax)\n\t" \
265 "movq (%%rax), %%rbx\n\t" \
266 "movw %%cs, (%%rbx)\n\t" \
267 /* push ss */ \
268 "addq $-2,(%%rax)\n\t" \
269 "movq (%%rax), %%rbx\n\t" \
270 "movw %%ss, (%%rbx)\n\t" \
271 /* add padding for struct registers */ \
272 "addq $-4,(%%rax)\n\t" \
273 /* restore original values of regs that were used internally */ \
274 "popq %%rdi\n\t" \
275 "popq %%rbx\n\t" \
276 "popq %%rax\n\t" \
277 /* cancel push of rsp */ \
278 "addq $8,%%rsp\n\t" \
279 /* cancel push of rflags */ \
280 "addq $8,%%rsp\n\t" \
281 ::); \
282 memcpy(regsptr, (void *)ust_reg_stack_ptr, sizeof(struct registers)); \
283 ust_reg_stack_ptr = (void *)(((long)ust_reg_stack_ptr) + sizeof(struct registers));
284
285 #else /* CONFIG_UST_GDB_INTEGRATION */
286
287 #define save_registers(a)
288
289 #endif /* CONFIG_UST_GDB_INTEGRATION */
290
291 /* Macro to insert the address of a relative jump in an assembly stub,
292 * in a relocatable way. On x86-64, this uses a special (%rip) notation. */
293 #define RELATIVE_ADDRESS(__rel_label__) __rel_label__(%%rip)
294
295 #define _ASM_PTR ".quad "
296
297 #endif
298
299 #endif /* UST_PROCESSOR_H */
This page took 0.036413 seconds and 5 git commands to generate.