Removing #pragma clang system_header after verifying that clang already 
treats it as a system header.

http://llvm-reviews.chandlerc.com/D1766

CHANGE SINCE LAST DIFF
  http://llvm-reviews.chandlerc.com/D1766?vs=4502&id=4521#toc

Files:
  lib/Headers/Intrin.h
Index: lib/Headers/Intrin.h
===================================================================
--- lib/Headers/Intrin.h
+++ lib/Headers/Intrin.h
@@ -314,7 +314,7 @@
                                             void *_Exchange, void *_Comparand);
 long _InterlockedOr_np(long volatile *_Value, long _Mask);
 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
-__int64 _InterlockedOr64_np(__int64 volatile *_Value, _int64 _Mask);
+__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
 long _InterlockedXor_np(long volatile *_Value, long _Mask);
 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
@@ -343,6 +343,369 @@
 
 #endif /* __X86_64__ */
 
+/*----------------------------------------------------------------------------*\
+|* Bit Twiddling
+\*----------------------------------------------------------------------------*/
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_rotl8(unsigned char _Value, unsigned char _Shift) {
+  _Shift &= 0x7;
+  return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value;
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_rotr8(unsigned char _Value, unsigned char _Shift) {
+  _Shift &= 0x7;
+  return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value;
+}
+static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
+_rotl16(unsigned short _Value, unsigned char _Shift) {
+  _Shift &= 0xf;
+  return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value;
+}
+static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
+_rotr16(unsigned short _Value, unsigned char _Shift) {
+  _Shift &= 0xf;
+  return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value;
+}
+static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
+_rotl(unsigned int _Value, int _Shift) {
+  _Shift &= 0x1f;
+  return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
+}
+static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
+_rotr(unsigned int _Value, int _Shift) {
+  _Shift &= 0x1f;
+  return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
+}
+static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
+_lrotl(unsigned long _Value, int _Shift) {
+  _Shift &= 0x1f;
+  return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
+}
+static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
+_lrotr(unsigned long _Value, int _Shift) {
+  _Shift &= 0x1f;
+  return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
+}
+static
+__inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
+_rotl64(unsigned __int64 _Value, int _Shift) {
+  _Shift &= 0x3f;
+  return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value;
+}
+static
+__inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
+_rotr64(unsigned __int64 _Value, int _Shift) {
+  _Shift &= 0x3f;
+  return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value;
+}
+/*----------------------------------------------------------------------------*\
+|* Bit Counting and Testing
+\*----------------------------------------------------------------------------*/
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_BitScanForward(unsigned long *_Index, unsigned long _Mask) {
+  if (!_Mask)
+    return 0;
+  *_Index = __builtin_ctzl(_Mask);
+  return 1;
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
+  if (!_Mask)
+    return 0;
+  *_Index = 31 - __builtin_clzl(_Mask);
+  return 1;
+}
+static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
+_lzcnt_u32(unsigned int a) {
+  if (!a)
+    return 32;
+  return __builtin_clzl(a);
+}
+static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
+__popcnt16(unsigned short value) {
+  return __builtin_popcount((int)value);
+}
+static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
+__popcnt(unsigned int value) {
+  return __builtin_popcount(value);
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_bittest(long const *a, long b) {
+  return (*a >> b) & 1;
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_bittestandcomplement(long *a, long b) {
+  unsigned char x = (*a >> b) & 1;
+  *a = *a ^ (1 << b);
+  return x;
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_bittestandreset(long *a, long b) {
+  unsigned char x = (*a >> b) & 1;
+  *a = *a & ~(1 << b);
+  return x;
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_bittestandset(long *a, long b) {
+  unsigned char x = (*a >> b) & 1;
+  *a = *a & (1 << b);
+  return x;
+}
+#ifdef __X86_64__
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
+  if (!_Mask)
+    return 0;
+  *_Index = __builtin_ctzll(_Mask);
+  return 1;
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
+  if (!_Mask)
+    return 0;
+  *_Index = 63 - __builtin_clzll(_Mask);
+  return 1;
+}
+static
+__inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
+_lzcnt_u64(unsigned __int64 a) {
+  if (!a)
+    return 64;
+  return __builtin_clzll(a);
+}
+static __inline__
+unsigned __int64 __attribute__((__always_inline__, __nodebug__))
+ __popcnt64(unsigned __int64 value) {
+  return __builtin_popcountll(value);
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_bittest64(__int64 const *a, __int64 b) {
+  return (*a >> b) & 1;
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_bittestandcomplement64(__int64 *a, __int64 b) {
+  unsigned char x = (*a >> b) & 1;
+  *a = *a ^ (1ll << b);
+  return x;
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_bittestandreset64(__int64 *a, __int64 b) {
+  unsigned char x = (*a >> b) & 1;
+  *a = *a & ~(1ll << b);
+  return x;
+}
+static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
+_bittestandset64(__int64 *a, __int64 b) {
+  unsigned char x = (*a >> b) & 1;
+  *a = *a & (1ll << b);
+  return x;
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Interlocked Exchange Add
+\*----------------------------------------------------------------------------*/
+static __inline__ char __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchangeAdd8(char volatile *_Addend, char _Value) {
+  return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
+}
+static __inline__ short __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchangeAdd16(short volatile *_Addend, short _Value) {
+  return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
+}
+static __inline__ long __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchangeAdd(long volatile *_Addend, long _Value) {
+  return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
+}
+#ifdef __X86_64__
+static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
+  return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Interlocked Exchange Sub
+\*----------------------------------------------------------------------------*/
+static __inline__ char __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchangeSub8(char volatile *_Subend, char _Value) {
+  return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
+}
+static __inline__ short __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchangeSub16(short volatile *_Subend, short _Value) {
+  return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
+}
+static __inline__ long __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchangeSub(long volatile *_Subend, long _Value) {
+  return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
+}
+#ifdef __X86_64__
+static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
+  return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Interlocked Increment
+\*----------------------------------------------------------------------------*/
+static __inline__ char __attribute__((__always_inline__, __nodebug__))
+_InterlockedIncrement16(char volatile *_Value) {
+  return __atomic_add_fetch(_Value, 1, 0);
+}
+static __inline__ long __attribute__((__always_inline__, __nodebug__))
+_InterlockedIncrement(long volatile *_Value) {
+  return __atomic_add_fetch(_Value, 1, 0);
+}
+#ifdef __X86_64__
+static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
+_InterlockedIncrement64(__int64 volatile *_Value) {
+  return __atomic_add_fetch(_Value, 1, 0);
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Interlocked Decrement
+\*----------------------------------------------------------------------------*/
+static __inline__ char __attribute__((__always_inline__, __nodebug__))
+_InterlockedDecrement16(char volatile *_Value) {
+  return __atomic_sub_fetch(_Value, 1, 0);
+}
+static __inline__ long __attribute__((__always_inline__, __nodebug__))
+_InterlockedDecrement(long volatile *_Value) {
+  return __atomic_sub_fetch(_Value, 1, 0);
+}
+#ifdef __X86_64__
+static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
+_InterlockedDecrement64(__int64 volatile *_Value) {
+  return __atomic_sub_fetch(_Value, 1, 0);
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Interlocked And
+\*----------------------------------------------------------------------------*/
+static __inline__ char __attribute__((__always_inline__, __nodebug__))
+_InterlockedAnd8(char volatile *_Value, char _Mask) {
+  return __atomic_and_fetch(_Value, _Mask, 0);
+}
+static __inline__ short __attribute__((__always_inline__, __nodebug__))
+_InterlockedAnd16(short volatile *_Value, short _Mask) {
+  return __atomic_and_fetch(_Value, _Mask, 0);
+}
+static __inline__ long __attribute__((__always_inline__, __nodebug__))
+_InterlockedAnd(long volatile *_Value, long _Mask) {
+  return __atomic_and_fetch(_Value, _Mask, 0);
+}
+#ifdef __X86_64__
+static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
+_InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
+  return __atomic_and_fetch(_Value, _Mask, 0);
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Interlocked Or
+\*----------------------------------------------------------------------------*/
+static __inline__ char __attribute__((__always_inline__, __nodebug__))
+_InterlockedOr8(char volatile *_Value, char _Mask) {
+  return __atomic_or_fetch(_Value, _Mask, 0);
+}
+static __inline__ short __attribute__((__always_inline__, __nodebug__))
+_InterlockedOr16(short volatile *_Value, short _Mask) {
+  return __atomic_or_fetch(_Value, _Mask, 0);
+}
+static __inline__ long __attribute__((__always_inline__, __nodebug__))
+_InterlockedOr(long volatile *_Value, long _Mask) {
+  return __atomic_or_fetch(_Value, _Mask, 0);
+}
+#ifdef __X86_64__
+static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
+_InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
+  return __atomic_or_fetch(_Value, _Mask, 0);
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Interlocked Xor
+\*----------------------------------------------------------------------------*/
+static __inline__ char __attribute__((__always_inline__, __nodebug__))
+_InterlockedXor8(char volatile *_Value, char _Mask) {
+  return __atomic_xor_fetch(_Value, _Mask, 0);
+}
+static __inline__ short __attribute__((__always_inline__, __nodebug__))
+_InterlockedXor16(short volatile *_Value, short _Mask) {
+  return __atomic_xor_fetch(_Value, _Mask, 0);
+}
+static __inline__ long __attribute__((__always_inline__, __nodebug__))
+_InterlockedXor(long volatile *_Value, long _Mask) {
+  return __atomic_xor_fetch(_Value, _Mask, 0);
+}
+#ifdef __X86_64__
+static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
+_InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
+  return __atomic_xor_fetch(_Value, _Mask, 0);
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Interlocked Exchange
+\*----------------------------------------------------------------------------*/
+static __inline__ char __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchange8(char volatile *_Target, char _Value) {
+  __atomic_exchange(_Target, &_Value, &_Value, 0);
+  return _Value;
+}
+static __inline__ short __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchange16(short volatile *_Target, short _Value) {
+  __atomic_exchange(_Target, &_Value, &_Value, 0);
+  return _Value;
+}
+static __inline__ long __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchange(long volatile *_Target, long _Value) {
+  __atomic_exchange(_Target, &_Value, &_Value, 0);
+  return _Value;
+}
+#ifdef __X86_64__
+static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
+_InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
+  __atomic_exchange(_Target, &_Value, &_Value, 0);
+  return _Value;
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Interlocked Compare Exchange
+\*----------------------------------------------------------------------------*/
+static __inline__ char __attribute__((__always_inline__, __nodebug__))
+_InterlockedCompareExchange8(char volatile *_Destination,
+                             char _Exchange, char _Comparand) {
+  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
+  return _Comparand;
+}
+static __inline__ short __attribute__((__always_inline__, __nodebug__))
+_InterlockedCompareExchange16(short volatile *_Destination,
+                              short _Exchange, short _Comparand) {
+  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
+  return _Comparand;
+}
+static __inline__ long __attribute__((__always_inline__, __nodebug__))
+_InterlockedCompareExchange(long volatile *_Destination,
+                            long _Exchange, long _Comparand) {
+  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
+  return _Comparand;
+}
+#ifdef __X86_64__
+static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
+_InterlockedCompareExchange64(__int64 volatile *_Destination,
+                              __int64 _Exchange, __int64 _Comparand) {
+  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
+  return _Comparand;
+}
+#endif
+/*----------------------------------------------------------------------------*\
+|* Misc
+\*----------------------------------------------------------------------------*/
+static __inline__ void * __attribute__((__always_inline__, __nodebug__))
+_AddressOfReturnAddress(void) {
+  return (void*)((char*)__builtin_frame_address(0) + sizeof(void*));
+}
+static __inline__ void * __attribute__((__always_inline__, __nodebug__))
+_ReturnAddress(void) {
+  return __builtin_return_address(0);
+}
+
 #ifdef __cplusplus
 }
 #endif
_______________________________________________
cfe-commits mailing list
[email protected]
http://lists.cs.uiuc.edu/mailman/listinfo/cfe-commits

Reply via email to