@@ -463,14 +463,6 @@ _BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
463
463
* _Index = 31 - __builtin_clzl (_Mask );
464
464
return 1 ;
465
465
}
466
- static __inline__ unsigned short __DEFAULT_FN_ATTRS
467
- __popcnt16 (unsigned short _Value ) {
468
- return __builtin_popcount ((int )_Value );
469
- }
470
- static __inline__ unsigned int __DEFAULT_FN_ATTRS
471
- __popcnt (unsigned int _Value ) {
472
- return __builtin_popcount (_Value );
473
- }
474
466
static __inline__ unsigned char __DEFAULT_FN_ATTRS
475
467
_bittest (long const * _BitBase , long _BitPos ) {
476
468
return (* _BitBase >> _BitPos ) & 1 ;
@@ -513,11 +505,6 @@ _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
513
505
* _Index = 63 - __builtin_clzll (_Mask );
514
506
return 1 ;
515
507
}
516
- static __inline__
517
- unsigned __int64 __DEFAULT_FN_ATTRS
518
- __popcnt64 (unsigned __int64 _Value ) {
519
- return __builtin_popcountll (_Value );
520
- }
521
508
static __inline__ unsigned char __DEFAULT_FN_ATTRS
522
509
_bittest64 (__int64 const * _BitBase , __int64 _BitPos ) {
523
510
return (* _BitBase >> _BitPos ) & 1 ;
@@ -546,63 +533,63 @@ _interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
546
533
__atomic_fetch_or (_BitBase , 1ll << _BitPos , __ATOMIC_SEQ_CST );
547
534
return (_PrevVal >> _BitPos ) & 1 ;
548
535
}
549
- /*----------------------------------------------------------------------------*\
550
- |* Interlocked Exchange Add
551
- \*----------------------------------------------------------------------------*/
552
- static __inline__ __int64 __DEFAULT_FN_ATTRS
553
- _InterlockedExchangeAdd64 (__int64 volatile * _Addend , __int64 _Value ) {
554
- return __atomic_fetch_add (_Addend , _Value , __ATOMIC_SEQ_CST );
555
- }
556
- /*----------------------------------------------------------------------------*\
557
- |* Interlocked Exchange Sub
558
- \*----------------------------------------------------------------------------*/
559
- static __inline__ __int64 __DEFAULT_FN_ATTRS
560
- _InterlockedExchangeSub64 (__int64 volatile * _Subend , __int64 _Value ) {
561
- return __atomic_fetch_sub (_Subend , _Value , __ATOMIC_SEQ_CST );
562
- }
563
- /*----------------------------------------------------------------------------*\
564
- |* Interlocked Increment
565
- \*----------------------------------------------------------------------------*/
566
- static __inline__ __int64 __DEFAULT_FN_ATTRS
567
- _InterlockedIncrement64 (__int64 volatile * _Value ) {
568
- return __atomic_add_fetch (_Value , 1 , __ATOMIC_SEQ_CST );
569
- }
570
- /*----------------------------------------------------------------------------*\
571
- |* Interlocked Decrement
572
- \*----------------------------------------------------------------------------*/
573
- static __inline__ __int64 __DEFAULT_FN_ATTRS
574
- _InterlockedDecrement64 (__int64 volatile * _Value ) {
575
- return __atomic_sub_fetch (_Value , 1 , __ATOMIC_SEQ_CST );
576
- }
577
- /*----------------------------------------------------------------------------*\
578
- |* Interlocked And
579
- \*----------------------------------------------------------------------------*/
580
- static __inline__ __int64 __DEFAULT_FN_ATTRS
581
- _InterlockedAnd64 (__int64 volatile * _Value , __int64 _Mask ) {
582
- return __atomic_fetch_and (_Value , _Mask , __ATOMIC_SEQ_CST );
583
- }
584
- /*----------------------------------------------------------------------------*\
585
- |* Interlocked Or
586
- \*----------------------------------------------------------------------------*/
587
- static __inline__ __int64 __DEFAULT_FN_ATTRS
588
- _InterlockedOr64 (__int64 volatile * _Value , __int64 _Mask ) {
589
- return __atomic_fetch_or (_Value , _Mask , __ATOMIC_SEQ_CST );
590
- }
591
- /*----------------------------------------------------------------------------*\
592
- |* Interlocked Xor
593
- \*----------------------------------------------------------------------------*/
594
- static __inline__ __int64 __DEFAULT_FN_ATTRS
595
- _InterlockedXor64 (__int64 volatile * _Value , __int64 _Mask ) {
596
- return __atomic_fetch_xor (_Value , _Mask , __ATOMIC_SEQ_CST );
597
- }
598
- /*----------------------------------------------------------------------------*\
599
- |* Interlocked Exchange
600
- \*----------------------------------------------------------------------------*/
601
- static __inline__ __int64 __DEFAULT_FN_ATTRS
602
- _InterlockedExchange64 (__int64 volatile * _Target , __int64 _Value ) {
603
- __atomic_exchange (_Target , & _Value , & _Value , __ATOMIC_SEQ_CST );
604
- return _Value ;
605
- }
536
+ /*----------------------------------------------------------------------------*\
537
+ |* Interlocked Exchange Add
538
+ \*----------------------------------------------------------------------------*/
539
+ static __inline__ __int64 __DEFAULT_FN_ATTRS
540
+ _InterlockedExchangeAdd64 (__int64 volatile * _Addend , __int64 _Value ) {
541
+ return __atomic_fetch_add (_Addend , _Value , __ATOMIC_SEQ_CST );
542
+ }
543
+ /*----------------------------------------------------------------------------*\
544
+ |* Interlocked Exchange Sub
545
+ \*----------------------------------------------------------------------------*/
546
+ static __inline__ __int64 __DEFAULT_FN_ATTRS
547
+ _InterlockedExchangeSub64 (__int64 volatile * _Subend , __int64 _Value ) {
548
+ return __atomic_fetch_sub (_Subend , _Value , __ATOMIC_SEQ_CST );
549
+ }
550
+ /*----------------------------------------------------------------------------*\
551
+ |* Interlocked Increment
552
+ \*----------------------------------------------------------------------------*/
553
+ static __inline__ __int64 __DEFAULT_FN_ATTRS
554
+ _InterlockedIncrement64 (__int64 volatile * _Value ) {
555
+ return __atomic_add_fetch (_Value , 1 , __ATOMIC_SEQ_CST );
556
+ }
557
+ /*----------------------------------------------------------------------------*\
558
+ |* Interlocked Decrement
559
+ \*----------------------------------------------------------------------------*/
560
+ static __inline__ __int64 __DEFAULT_FN_ATTRS
561
+ _InterlockedDecrement64 (__int64 volatile * _Value ) {
562
+ return __atomic_sub_fetch (_Value , 1 , __ATOMIC_SEQ_CST );
563
+ }
564
+ /*----------------------------------------------------------------------------*\
565
+ |* Interlocked And
566
+ \*----------------------------------------------------------------------------*/
567
+ static __inline__ __int64 __DEFAULT_FN_ATTRS
568
+ _InterlockedAnd64 (__int64 volatile * _Value , __int64 _Mask ) {
569
+ return __atomic_fetch_and (_Value , _Mask , __ATOMIC_SEQ_CST );
570
+ }
571
+ /*----------------------------------------------------------------------------*\
572
+ |* Interlocked Or
573
+ \*----------------------------------------------------------------------------*/
574
+ static __inline__ __int64 __DEFAULT_FN_ATTRS
575
+ _InterlockedOr64 (__int64 volatile * _Value , __int64 _Mask ) {
576
+ return __atomic_fetch_or (_Value , _Mask , __ATOMIC_SEQ_CST );
577
+ }
578
+ /*----------------------------------------------------------------------------*\
579
+ |* Interlocked Xor
580
+ \*----------------------------------------------------------------------------*/
581
+ static __inline__ __int64 __DEFAULT_FN_ATTRS
582
+ _InterlockedXor64 (__int64 volatile * _Value , __int64 _Mask ) {
583
+ return __atomic_fetch_xor (_Value , _Mask , __ATOMIC_SEQ_CST );
584
+ }
585
+ /*----------------------------------------------------------------------------*\
586
+ |* Interlocked Exchange
587
+ \*----------------------------------------------------------------------------*/
588
+ static __inline__ __int64 __DEFAULT_FN_ATTRS
589
+ _InterlockedExchange64 (__int64 volatile * _Target , __int64 _Value ) {
590
+ __atomic_exchange (_Target , & _Value , & _Value , __ATOMIC_SEQ_CST );
591
+ return _Value ;
592
+ }
606
593
#endif
607
594
/*----------------------------------------------------------------------------*\
608
595
|* Barriers
0 commit comments