1 #ifndef __X86_64_ELF_H__
2 #define __X86_64_ELF_H__
3
4 typedef struct {
5 unsigned long r15;
6 unsigned long r14;
7 unsigned long r13;
8 unsigned long r12;
9 unsigned long rbp;
10 unsigned long rbx;
11 unsigned long r11;
12 unsigned long r10;
13 unsigned long r9;
14 unsigned long r8;
15 unsigned long rax;
16 unsigned long rcx;
17 unsigned long rdx;
18 unsigned long rsi;
19 unsigned long rdi;
20 unsigned long orig_rax;
21 unsigned long rip;
22 unsigned long cs;
23 unsigned long rflags;
24 unsigned long rsp;
25 unsigned long ss;
26 unsigned long thread_fs;
27 unsigned long thread_gs;
28 unsigned long ds;
29 unsigned long es;
30 unsigned long fs;
31 unsigned long gs;
32 } ELF_Gregset;
33
elf_core_save_regs(ELF_Gregset * core_regs,crash_xen_core_t * xen_core_regs)34 static inline void elf_core_save_regs(ELF_Gregset *core_regs,
35 crash_xen_core_t *xen_core_regs)
36 {
37 unsigned long tmp;
38
39 asm volatile("movq %%r15,%0" : "=m"(core_regs->r15));
40 asm volatile("movq %%r14,%0" : "=m"(core_regs->r14));
41 asm volatile("movq %%r13,%0" : "=m"(core_regs->r13));
42 asm volatile("movq %%r12,%0" : "=m"(core_regs->r12));
43 asm volatile("movq %%rbp,%0" : "=m"(core_regs->rbp));
44 asm volatile("movq %%rbx,%0" : "=m"(core_regs->rbx));
45 asm volatile("movq %%r11,%0" : "=m"(core_regs->r11));
46 asm volatile("movq %%r10,%0" : "=m"(core_regs->r10));
47 asm volatile("movq %%r9,%0" : "=m"(core_regs->r9));
48 asm volatile("movq %%r8,%0" : "=m"(core_regs->r8));
49 asm volatile("movq %%rax,%0" : "=m"(core_regs->rax));
50 asm volatile("movq %%rcx,%0" : "=m"(core_regs->rcx));
51 asm volatile("movq %%rdx,%0" : "=m"(core_regs->rdx));
52 asm volatile("movq %%rsi,%0" : "=m"(core_regs->rsi));
53 asm volatile("movq %%rdi,%0" : "=m"(core_regs->rdi));
54 /* orig_rax not filled in for now */
55 core_regs->rip = (unsigned long)elf_core_save_regs;
56 asm volatile("movl %%cs, %%eax;" :"=a"(core_regs->cs));
57 asm volatile("pushfq; popq %0" :"=m"(core_regs->rflags));
58 asm volatile("movq %%rsp,%0" : "=m"(core_regs->rsp));
59 asm volatile("movl %%ss, %%eax;" :"=a"(core_regs->ss));
60 /* thread_fs not filled in for now */
61 /* thread_gs not filled in for now */
62 asm volatile("movl %%ds, %%eax;" :"=a"(core_regs->ds));
63 asm volatile("movl %%es, %%eax;" :"=a"(core_regs->es));
64 asm volatile("movl %%fs, %%eax;" :"=a"(core_regs->fs));
65 asm volatile("movl %%gs, %%eax;" :"=a"(core_regs->gs));
66
67 asm volatile("mov %%cr0, %0" : "=r" (tmp) : );
68 xen_core_regs->cr0 = tmp;
69
70 asm volatile("mov %%cr2, %0" : "=r" (tmp) : );
71 xen_core_regs->cr2 = tmp;
72
73 asm volatile("mov %%cr3, %0" : "=r" (tmp) : );
74 xen_core_regs->cr3 = tmp;
75
76 asm volatile("mov %%cr4, %0" : "=r" (tmp) : );
77 xen_core_regs->cr4 = tmp;
78 }
79
80 #endif /* __X86_64_ELF_H__ */
81
82 /*
83 * Local variables:
84 * mode: C
85 * c-file-style: "BSD"
86 * c-basic-offset: 4
87 * tab-width: 4
88 * indent-tabs-mode: nil
89 * End:
90 */
91