7 #define INLINE __inline
8 #define PACKED __attribute__((packed))
10 #elif defined(__WATCOMC__)
11 #define INLINE __inline
19 /* fast conversion of double -> 32bit int
21 * - http://chrishecker.com/images/f/fb/Gdmfp.pdf
22 * - http://stereopsis.com/FPU.html#convert
24 static INLINE int32_t cround64(double val)
26 val += 6755399441055744.0;
27 return *(int32_t*)&val;
30 extern uint32_t perf_start_count, perf_interval_count;
33 void memcpy64(void *dest, void *src, int count);
34 #pragma aux memcpy64 = \
45 void perf_start(void);
46 #pragma aux perf_start = \
50 "mov [perf_start_count], eax" \
51 modify[eax ebx ecx edx];
54 #pragma aux perf_end = \
58 "sub eax, [perf_start_count]" \
59 "mov [perf_interval_count], eax" \
60 modify [eax ebx ecx edx];
62 void debug_break(void);
63 #pragma aux debug_break = "int 3";
67 #define memcpy64(dest, src, count) asm volatile ( \
69 "movq (%1), %%mm0\n\t" \
70 "movq %%mm0, (%0)\n\t" \
76 :: "r"(dest), "r"(src), "r"(count))
78 #define perf_start() asm volatile ( \
79 "xor %%eax, %%eax\n" \
83 : "=m"(perf_start_count) \
84 :: "%eax", "%ebx", "%ecx", "%edx")
86 #define perf_end() asm volatile ( \
87 "xor %%eax, %%eax\n" \
92 : "=m"(perf_interval_count) \
93 : "m"(perf_start_count) \
94 : "%eax", "%ebx", "%ecx", "%edx")
96 #define debug_break() \
97 asm volatile ("int $3")
101 #define perf_start() \
107 mov [perf_start_count], eax \
117 sub eax, [perf_start_count] \
118 mov [perf_interval_count], eax \
122 #define debug_break() \