d56bcf5f323612626e72a99af21348f89fefa700
[eradicate] / src / util.h
1 #ifndef UTIL_H_
2 #define UTIL_H_
3
4 #include "inttypes.h"
5
6 #ifdef __GNUC__
7 #define INLINE __inline
8 #define PACKED __attribute__((packed))
9
10 #elif defined(__WATCOMC__)
11 #define INLINE __inline
12 #define PACKED
13
14 #else
15 #define INLINE
16 #define PACKED
17 #endif
18
19 /* fast conversion of double -> 32bit int
20  * for details see:
21  *  - http://chrishecker.com/images/f/fb/Gdmfp.pdf
22  *  - http://stereopsis.com/FPU.html#convert
23  */
24 static INLINE int32_t cround64(double val)
25 {
26         val += 6755399441055744.0;
27         return *(int32_t*)&val;
28 }
29
30 extern uint32_t perf_start_count, perf_interval_count;
31
32 #ifdef __WATCOMC__
33 void memcpy64(void *dest, void *src, int count);
34 #pragma aux memcpy64 = \
35         "cploop:" \
36         "movq mm0, [edx]" \
37         "movq [ebx], mm0" \
38         "add edx, 8" \
39         "add ebx, 8" \
40         "dec ecx" \
41         "jnz cploop" \
42         "emms" \
43         parm[ebx][edx][ecx];
44
45 void perf_start(void);
46 #pragma aux perf_start = \
47         "xor eax, eax" \
48         "cpuid" \
49         "rdtsc" \
50         "mov [perf_start_count], eax" \
51         modify[eax ebx ecx edx];
52
53 void perf_end(void);
54 #pragma aux perf_end = \
55         "xor eax, eax" \
56         "cpuid" \
57         "rdtsc" \
58         "sub eax, [perf_start_count]" \
59         "mov [perf_interval_count], eax" \
60         modify [eax ebx ecx edx];
61
62 void debug_break(void);
63 #pragma aux debug_break = "int 3";
64 #endif
65
66 #ifdef __GNUC__
67 #define memcpy64(dest, src, count) asm volatile ( \
68         "0:\n\t" \
69         "movq (%1), %%mm0\n\t" \
70         "movq %%mm0, (%0)\n\t" \
71         "add $8, %1\n\t" \
72         "add $8, %0\n\t" \
73         "dec %2\n\t" \
74         "jnz 0b\n\t" \
75         "emms\n\t" \
76         :: "r"(dest), "r"(src), "r"(count))
77
78 #define perf_start()  asm volatile ( \
79         "xor %%eax, %%eax\n" \
80         "cpuid\n" \
81         "rdtsc\n" \
82         "mov %%eax, %0\n" \
83         : "=m"(perf_start_count) \
84         :: "%eax", "%ebx", "%ecx", "%edx")
85
86 #define perf_end() asm volatile ( \
87         "xor %%eax, %%eax\n" \
88         "cpuid\n" \
89         "rdtsc\n" \
90         "sub %1, %%eax\n" \
91         "mov %%eax, %0\n" \
92         : "=m"(perf_interval_count) \
93         : "m"(perf_start_count) \
94         : "%eax", "%ebx", "%ecx", "%edx")
95
96 #define debug_break() \
97         asm volatile ("int $3")
98 #endif
99
100 #ifdef _MSC_VER
101 #define perf_start() \
102         do { \
103                 __asm { \
104                         xor eax, eax \
105                         cpuid \
106                         rdtsc \
107                         mov [perf_start_count], eax \
108                 } \
109         } while(0)
110
111 #define perf_end() \
112         do { \
113                 __asm { \
114                         xor eax, eax \
115                         cpuid \
116                         rdtsc \
117                         sub eax, [perf_start_count] \
118                         mov [perf_interval_count], eax \
119                 } \
120         } while(0)
121
122 #define debug_break() \
123         do { \
124                 __asm { int 3 } \
125         } while(0)
126 #endif
127
128 #endif  /* UTIL_H_ */