Author: tkreuzer
Date: Sat Dec 20 21:22:17 2008
New Revision: 38220
URL:
http://svn.reactos.org/svn/reactos?rev=38220&view=rev
Log:
Use "C" interface for intrinsic functions to avoid problems with C++. Fix _rotr
and _rotl.
Modified:
trunk/reactos/include/crt/mingw32/intrin_x86.h
Modified: trunk/reactos/include/crt/mingw32/intrin_x86.h
URL:
http://svn.reactos.org/svn/reactos/trunk/reactos/include/crt/mingw32/intrin…
==============================================================================
--- trunk/reactos/include/crt/mingw32/intrin_x86.h [iso-8859-1] (original)
+++ trunk/reactos/include/crt/mingw32/intrin_x86.h [iso-8859-1] Sat Dec 20 21:22:17 2008
@@ -64,6 +64,10 @@
Pay attention to the type of barrier. Make it match with what Visual C++
would use in the same case
*/
+
+#ifdef __cplusplus
+extern "C" {
+#endif
/*** Stack frame juggling ***/
#define _ReturnAddress() (__builtin_return_address(0))
@@ -819,14 +823,14 @@
return retval;
}
-static __inline__ __attribute__((always_inline)) unsigned char _rotl8(const unsigned char
value, const unsigned char shift)
+static __inline__ __attribute__((always_inline)) unsigned char _rotl8(unsigned char
value, unsigned char shift)
{
unsigned char retval;
__asm__("rolb %b[shift], %b[retval]" : [retval] "=rm" (retval) :
"[retval]" (value), [shift] "Nc" (shift));
return retval;
}
-static __inline__ __attribute__((always_inline)) unsigned short _rotl16(const unsigned
short value, const unsigned char shift)
+static __inline__ __attribute__((always_inline)) unsigned short _rotl16(unsigned short
value, unsigned char shift)
{
unsigned short retval;
__asm__("rolw %b[shift], %w[retval]" : [retval] "=rm" (retval) :
"[retval]" (value), [shift] "Nc" (shift));
@@ -834,14 +838,14 @@
}
#ifndef __MSVCRT__
-static __inline__ __attribute__((always_inline)) unsigned int _rotl(const unsigned int
value, const int shift)
+static __inline__ __attribute__((always_inline)) unsigned int _rotl(unsigned int value,
int shift)
{
unsigned long retval;
__asm__("roll %b[shift], %k[retval]" : [retval] "=rm" (retval) :
"[retval]" (value), [shift] "Nc" (shift));
return retval;
}
-static __inline__ __attribute__((always_inline)) unsigned long _rotr(const unsigned int
value, const unsigned char shift)
+static __inline__ __attribute__((always_inline)) unsigned int _rotr(unsigned int value,
int shift)
{
unsigned long retval;
__asm__("rorl %b[shift], %k[retval]" : [retval] "=rm" (retval) :
"[retval]" (value), [shift] "Nc" (shift));
@@ -849,14 +853,14 @@
}
#endif
-static __inline__ __attribute__((always_inline)) unsigned char _rotr8(const unsigned char
value, const unsigned char shift)
+static __inline__ __attribute__((always_inline)) unsigned char _rotr8(unsigned char
value, unsigned char shift)
{
unsigned char retval;
__asm__("rorb %b[shift], %b[retval]" : [retval] "=rm" (retval) :
"[retval]" (value), [shift] "Nc" (shift));
return retval;
}
-static __inline__ __attribute__((always_inline)) unsigned short _rotr16(const unsigned
short value, const unsigned char shift)
+static __inline__ __attribute__((always_inline)) unsigned short _rotr16(unsigned short
value, unsigned char shift)
{
unsigned short retval;
__asm__("rorw %b[shift], %w[retval]" : [retval] "=rm" (retval) :
"[retval]" (value), [shift] "Nc" (shift));
@@ -1292,6 +1296,10 @@
__asm__ __volatile__("sidt %0" : : "m"(*(short*)Destination) :
"memory");
}
+#ifdef __cplusplus
+}
+#endif
+
#endif /* KJK_INTRIN_X86_H_ */
/* EOF */