BVB Source Codes

mars Show ops_msvc_arm.hpp Source code

Return Download mars: download ops_msvc_arm.hpp Source code - Download mars Source code - Type:.hpp
  1. /*
  2.  * Distributed under the Boost Software License, Version 1.0.
  3.  * (See accompanying file LICENSE_1_0.txt or copy at
  4.  * http://www.boost.org/LICENSE_1_0.txt)
  5.  *
  6.  * Copyright (c) 2009 Helge Bahmann
  7.  * Copyright (c) 2012 Tim Blechmann
  8.  * Copyright (c) 2014 Andrey Semashev
  9.  */
  10. /*!
  11.  * \file   atomic/detail/ops_msvc_arm.hpp
  12.  *
  13.  * This header contains implementation of the \c operations template.
  14.  */
  15.  
  16. #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
  17. #define BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
  18.  
  19. #include <intrin.h>
  20. #include <boost/memory_order.hpp>
  21. #include <boost/type_traits/make_signed.hpp>
  22. #include <boost/atomic/detail/config.hpp>
  23. #include <boost/atomic/detail/interlocked.hpp>
  24. #include <boost/atomic/detail/storage_type.hpp>
  25. #include <boost/atomic/detail/operations_fwd.hpp>
  26. #include <boost/atomic/capabilities.hpp>
  27. #include <boost/atomic/detail/ops_msvc_common.hpp>
  28.  
  29. #ifdef BOOST_HAS_PRAGMA_ONCE
  30. #pragma once
  31. #endif
  32.  
  33. #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
  34. #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
  35. #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
  36. #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
  37. #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
  38. #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
  39. #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
  40. #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
  41.  
  42. namespace mars_boost {} namespace boost = mars_boost; namespace mars_boost {
  43. namespace atomics {
  44. namespace detail {
  45.  
  46. // A note about memory_order_consume. Technically, this architecture allows to avoid
  47. // unnecessary memory barrier after consume load since it supports data dependency ordering.
  48. // However, some compiler optimizations may break a seemingly valid code relying on data
  49. // dependency tracking by injecting bogus branches to aid out of order execution.
  50. // This may happen not only in Boost.Atomic code but also in user's code, which we have no
  51. // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
  52. // For this reason we promote memory_order_consume to memory_order_acquire.
  53.  
  54. struct msvc_arm_operations_base
  55. {
  56.     static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
  57.     {
  58.         __dmb(0xB); // _ARM_BARRIER_ISH, see armintr.h from MSVC 11 and later
  59.     }
  60.  
  61.     static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
  62.     {
  63.         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  64.  
  65.         if ((order & memory_order_release) != 0)
  66.             hardware_full_fence();
  67.  
  68.         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  69.     }
  70.  
  71.     static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
  72.     {
  73.         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  74.  
  75.         if (order == memory_order_seq_cst)
  76.             hardware_full_fence();
  77.  
  78.         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  79.     }
  80.  
  81.     static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
  82.     {
  83.         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  84.  
  85.         if ((order & (memory_order_consume | memory_order_acquire)) != 0)
  86.             hardware_full_fence();
  87.  
  88.         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  89.     }
  90.  
  91.     static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  92.     {
  93.         // Combine order flags together and promote memory_order_consume to memory_order_acquire
  94.         return static_cast< memory_order >(((failure_order | success_order) & ~memory_order_consume) | (((failure_order | success_order) & memory_order_consume) << 1u));
  95.     }
  96. };
  97.  
  98. template< typename T, typename Derived >
  99. struct msvc_arm_operations :
  100.     public msvc_arm_operations_base
  101. {
  102.     typedef T storage_type;
  103.  
  104.     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  105.     {
  106.         typedef typename make_signed< storage_type >::type signed_storage_type;
  107.         return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
  108.     }
  109.  
  110.     static BOOST_FORCEINLINE bool compare_exchange_weak(
  111.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  112.     {
  113.         return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
  114.     }
  115.  
  116.     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  117.     {
  118.         return !!Derived::exchange(storage, (storage_type)1, order);
  119.     }
  120.  
  121.     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  122.     {
  123.         Derived::store(storage, (storage_type)0, order);
  124.     }
  125.  
  126.     static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
  127.     {
  128.         return true;
  129.     }
  130. };
  131.  
  132. template< bool Signed >
  133. struct operations< 1u, Signed > :
  134.     public msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
  135. {
  136.     typedef msvc_arm_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
  137.     typedef typename base_type::storage_type storage_type;
  138.     typedef typename make_storage_type< 1u, Signed >::aligned aligned_storage_type;
  139.  
  140.     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  141.     {
  142.         base_type::fence_before_store(order);
  143.         BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
  144.         base_type::fence_after_store(order);
  145.     }
  146.  
  147.     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  148.     {
  149.         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
  150.         base_type::fence_after_load(order);
  151.         return v;
  152.     }
  153.  
  154.     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  155.     {
  156.         switch (order)
  157.         {
  158.         case memory_order_relaxed:
  159.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
  160.             break;
  161.         case memory_order_consume:
  162.         case memory_order_acquire:
  163.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
  164.             break;
  165.         case memory_order_release:
  166.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
  167.             break;
  168.         case memory_order_acq_rel:
  169.         case memory_order_seq_cst:
  170.         default:
  171.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
  172.             break;
  173.         }
  174.         return v;
  175.     }
  176.  
  177.     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  178.     {
  179.         switch (order)
  180.         {
  181.         case memory_order_relaxed:
  182.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
  183.             break;
  184.         case memory_order_consume:
  185.         case memory_order_acquire:
  186.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
  187.             break;
  188.         case memory_order_release:
  189.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
  190.             break;
  191.         case memory_order_acq_rel:
  192.         case memory_order_seq_cst:
  193.         default:
  194.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
  195.             break;
  196.         }
  197.         return v;
  198.     }
  199.  
  200.     static BOOST_FORCEINLINE bool compare_exchange_strong(
  201.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  202.     {
  203.         storage_type previous = expected, old_val;
  204.  
  205.         switch (cas_common_order(success_order, failure_order))
  206.         {
  207.         case memory_order_relaxed:
  208.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
  209.             break;
  210.         case memory_order_consume:
  211.         case memory_order_acquire:
  212.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
  213.             break;
  214.         case memory_order_release:
  215.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
  216.             break;
  217.         case memory_order_acq_rel:
  218.         case memory_order_seq_cst:
  219.         default:
  220.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
  221.             break;
  222.         }
  223.         expected = old_val;
  224.  
  225.         return (previous == old_val);
  226.     }
  227.  
  228.     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  229.     {
  230.         switch (order)
  231.         {
  232.         case memory_order_relaxed:
  233.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
  234.             break;
  235.         case memory_order_consume:
  236.         case memory_order_acquire:
  237.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
  238.             break;
  239.         case memory_order_release:
  240.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
  241.             break;
  242.         case memory_order_acq_rel:
  243.         case memory_order_seq_cst:
  244.         default:
  245.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
  246.             break;
  247.         }
  248.         return v;
  249.     }
  250.  
  251.     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  252.     {
  253.         switch (order)
  254.         {
  255.         case memory_order_relaxed:
  256.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
  257.             break;
  258.         case memory_order_consume:
  259.         case memory_order_acquire:
  260.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
  261.             break;
  262.         case memory_order_release:
  263.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
  264.             break;
  265.         case memory_order_acq_rel:
  266.         case memory_order_seq_cst:
  267.         default:
  268.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
  269.             break;
  270.         }
  271.         return v;
  272.     }
  273.  
  274.     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  275.     {
  276.         switch (order)
  277.         {
  278.         case memory_order_relaxed:
  279.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
  280.             break;
  281.         case memory_order_consume:
  282.         case memory_order_acquire:
  283.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
  284.             break;
  285.         case memory_order_release:
  286.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
  287.             break;
  288.         case memory_order_acq_rel:
  289.         case memory_order_seq_cst:
  290.         default:
  291.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
  292.             break;
  293.         }
  294.         return v;
  295.     }
  296. };
  297.  
  298. template< bool Signed >
  299. struct operations< 2u, Signed > :
  300.     public msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
  301. {
  302.     typedef msvc_arm_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
  303.     typedef typename base_type::storage_type storage_type;
  304.     typedef typename make_storage_type< 2u, Signed >::aligned aligned_storage_type;
  305.  
  306.     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  307.     {
  308.         base_type::fence_before_store(order);
  309.         BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
  310.         base_type::fence_after_store(order);
  311.     }
  312.  
  313.     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  314.     {
  315.         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
  316.         base_type::fence_after_load(order);
  317.         return v;
  318.     }
  319.  
  320.     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  321.     {
  322.         switch (order)
  323.         {
  324.         case memory_order_relaxed:
  325.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
  326.             break;
  327.         case memory_order_consume:
  328.         case memory_order_acquire:
  329.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
  330.             break;
  331.         case memory_order_release:
  332.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
  333.             break;
  334.         case memory_order_acq_rel:
  335.         case memory_order_seq_cst:
  336.         default:
  337.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
  338.             break;
  339.         }
  340.         return v;
  341.     }
  342.  
  343.     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  344.     {
  345.         switch (order)
  346.         {
  347.         case memory_order_relaxed:
  348.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
  349.             break;
  350.         case memory_order_consume:
  351.         case memory_order_acquire:
  352.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
  353.             break;
  354.         case memory_order_release:
  355.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
  356.             break;
  357.         case memory_order_acq_rel:
  358.         case memory_order_seq_cst:
  359.         default:
  360.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
  361.             break;
  362.         }
  363.         return v;
  364.     }
  365.  
  366.     static BOOST_FORCEINLINE bool compare_exchange_strong(
  367.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  368.     {
  369.         storage_type previous = expected, old_val;
  370.  
  371.         switch (cas_common_order(success_order, failure_order))
  372.         {
  373.         case memory_order_relaxed:
  374.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
  375.             break;
  376.         case memory_order_consume:
  377.         case memory_order_acquire:
  378.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
  379.             break;
  380.         case memory_order_release:
  381.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
  382.             break;
  383.         case memory_order_acq_rel:
  384.         case memory_order_seq_cst:
  385.         default:
  386.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
  387.             break;
  388.         }
  389.         expected = old_val;
  390.  
  391.         return (previous == old_val);
  392.     }
  393.  
  394.     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  395.     {
  396.         switch (order)
  397.         {
  398.         case memory_order_relaxed:
  399.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
  400.             break;
  401.         case memory_order_consume:
  402.         case memory_order_acquire:
  403.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
  404.             break;
  405.         case memory_order_release:
  406.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
  407.             break;
  408.         case memory_order_acq_rel:
  409.         case memory_order_seq_cst:
  410.         default:
  411.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
  412.             break;
  413.         }
  414.         return v;
  415.     }
  416.  
  417.     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  418.     {
  419.         switch (order)
  420.         {
  421.         case memory_order_relaxed:
  422.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
  423.             break;
  424.         case memory_order_consume:
  425.         case memory_order_acquire:
  426.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
  427.             break;
  428.         case memory_order_release:
  429.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
  430.             break;
  431.         case memory_order_acq_rel:
  432.         case memory_order_seq_cst:
  433.         default:
  434.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
  435.             break;
  436.         }
  437.         return v;
  438.     }
  439.  
  440.     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  441.     {
  442.         switch (order)
  443.         {
  444.         case memory_order_relaxed:
  445.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
  446.             break;
  447.         case memory_order_consume:
  448.         case memory_order_acquire:
  449.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
  450.             break;
  451.         case memory_order_release:
  452.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
  453.             break;
  454.         case memory_order_acq_rel:
  455.         case memory_order_seq_cst:
  456.         default:
  457.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
  458.             break;
  459.         }
  460.         return v;
  461.     }
  462. };
  463.  
  464. template< bool Signed >
  465. struct operations< 4u, Signed > :
  466.     public msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
  467. {
  468.     typedef msvc_arm_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
  469.     typedef typename base_type::storage_type storage_type;
  470.     typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type;
  471.  
  472.     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  473.     {
  474.         base_type::fence_before_store(order);
  475.         BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
  476.         base_type::fence_after_store(order);
  477.     }
  478.  
  479.     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  480.     {
  481.         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
  482.         base_type::fence_after_load(order);
  483.         return v;
  484.     }
  485.  
  486.     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  487.     {
  488.         switch (order)
  489.         {
  490.         case memory_order_relaxed:
  491.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
  492.             break;
  493.         case memory_order_consume:
  494.         case memory_order_acquire:
  495.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
  496.             break;
  497.         case memory_order_release:
  498.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
  499.             break;
  500.         case memory_order_acq_rel:
  501.         case memory_order_seq_cst:
  502.         default:
  503.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
  504.             break;
  505.         }
  506.         return v;
  507.     }
  508.  
  509.     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  510.     {
  511.         switch (order)
  512.         {
  513.         case memory_order_relaxed:
  514.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
  515.             break;
  516.         case memory_order_consume:
  517.         case memory_order_acquire:
  518.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
  519.             break;
  520.         case memory_order_release:
  521.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
  522.             break;
  523.         case memory_order_acq_rel:
  524.         case memory_order_seq_cst:
  525.         default:
  526.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
  527.             break;
  528.         }
  529.         return v;
  530.     }
  531.  
  532.     static BOOST_FORCEINLINE bool compare_exchange_strong(
  533.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  534.     {
  535.         storage_type previous = expected, old_val;
  536.  
  537.         switch (cas_common_order(success_order, failure_order))
  538.         {
  539.         case memory_order_relaxed:
  540.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
  541.             break;
  542.         case memory_order_consume:
  543.         case memory_order_acquire:
  544.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
  545.             break;
  546.         case memory_order_release:
  547.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
  548.             break;
  549.         case memory_order_acq_rel:
  550.         case memory_order_seq_cst:
  551.         default:
  552.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
  553.             break;
  554.         }
  555.         expected = old_val;
  556.  
  557.         return (previous == old_val);
  558.     }
  559.  
  560.     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  561.     {
  562.         switch (order)
  563.         {
  564.         case memory_order_relaxed:
  565.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
  566.             break;
  567.         case memory_order_consume:
  568.         case memory_order_acquire:
  569.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
  570.             break;
  571.         case memory_order_release:
  572.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
  573.             break;
  574.         case memory_order_acq_rel:
  575.         case memory_order_seq_cst:
  576.         default:
  577.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
  578.             break;
  579.         }
  580.         return v;
  581.     }
  582.  
  583.     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  584.     {
  585.         switch (order)
  586.         {
  587.         case memory_order_relaxed:
  588.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
  589.             break;
  590.         case memory_order_consume:
  591.         case memory_order_acquire:
  592.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
  593.             break;
  594.         case memory_order_release:
  595.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
  596.             break;
  597.         case memory_order_acq_rel:
  598.         case memory_order_seq_cst:
  599.         default:
  600.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
  601.             break;
  602.         }
  603.         return v;
  604.     }
  605.  
  606.     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  607.     {
  608.         switch (order)
  609.         {
  610.         case memory_order_relaxed:
  611.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
  612.             break;
  613.         case memory_order_consume:
  614.         case memory_order_acquire:
  615.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
  616.             break;
  617.         case memory_order_release:
  618.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
  619.             break;
  620.         case memory_order_acq_rel:
  621.         case memory_order_seq_cst:
  622.         default:
  623.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
  624.             break;
  625.         }
  626.         return v;
  627.     }
  628. };
  629.  
  630. template< bool Signed >
  631. struct operations< 8u, Signed > :
  632.     public msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
  633. {
  634.     typedef msvc_arm_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
  635.     typedef typename base_type::storage_type storage_type;
  636.     typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
  637.  
  638.     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  639.     {
  640.         base_type::fence_before_store(order);
  641.         BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
  642.         base_type::fence_after_store(order);
  643.     }
  644.  
  645.     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  646.     {
  647.         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
  648.         base_type::fence_after_load(order);
  649.         return v;
  650.     }
  651.  
  652.     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  653.     {
  654.         switch (order)
  655.         {
  656.         case memory_order_relaxed:
  657.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
  658.             break;
  659.         case memory_order_consume:
  660.         case memory_order_acquire:
  661.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
  662.             break;
  663.         case memory_order_release:
  664.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
  665.             break;
  666.         case memory_order_acq_rel:
  667.         case memory_order_seq_cst:
  668.         default:
  669.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
  670.             break;
  671.         }
  672.         return v;
  673.     }
  674.  
  675.     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  676.     {
  677.         switch (order)
  678.         {
  679.         case memory_order_relaxed:
  680.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
  681.             break;
  682.         case memory_order_consume:
  683.         case memory_order_acquire:
  684.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
  685.             break;
  686.         case memory_order_release:
  687.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
  688.             break;
  689.         case memory_order_acq_rel:
  690.         case memory_order_seq_cst:
  691.         default:
  692.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
  693.             break;
  694.         }
  695.         return v;
  696.     }
  697.  
  698.     static BOOST_FORCEINLINE bool compare_exchange_strong(
  699.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  700.     {
  701.         storage_type previous = expected, old_val;
  702.  
  703.         switch (cas_common_order(success_order, failure_order))
  704.         {
  705.         case memory_order_relaxed:
  706.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
  707.             break;
  708.         case memory_order_consume:
  709.         case memory_order_acquire:
  710.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
  711.             break;
  712.         case memory_order_release:
  713.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
  714.             break;
  715.         case memory_order_acq_rel:
  716.         case memory_order_seq_cst:
  717.         default:
  718.             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
  719.             break;
  720.         }
  721.         expected = old_val;
  722.  
  723.         return (previous == old_val);
  724.     }
  725.  
  726.     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  727.     {
  728.         switch (order)
  729.         {
  730.         case memory_order_relaxed:
  731.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
  732.             break;
  733.         case memory_order_consume:
  734.         case memory_order_acquire:
  735.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
  736.             break;
  737.         case memory_order_release:
  738.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
  739.             break;
  740.         case memory_order_acq_rel:
  741.         case memory_order_seq_cst:
  742.         default:
  743.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
  744.             break;
  745.         }
  746.         return v;
  747.     }
  748.  
  749.     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  750.     {
  751.         switch (order)
  752.         {
  753.         case memory_order_relaxed:
  754.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
  755.             break;
  756.         case memory_order_consume:
  757.         case memory_order_acquire:
  758.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
  759.             break;
  760.         case memory_order_release:
  761.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
  762.             break;
  763.         case memory_order_acq_rel:
  764.         case memory_order_seq_cst:
  765.         default:
  766.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
  767.             break;
  768.         }
  769.         return v;
  770.     }
  771.  
  772.     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  773.     {
  774.         switch (order)
  775.         {
  776.         case memory_order_relaxed:
  777.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
  778.             break;
  779.         case memory_order_consume:
  780.         case memory_order_acquire:
  781.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
  782.             break;
  783.         case memory_order_release:
  784.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
  785.             break;
  786.         case memory_order_acq_rel:
  787.         case memory_order_seq_cst:
  788.         default:
  789.             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
  790.             break;
  791.         }
  792.         return v;
  793.     }
  794. };
  795.  
  796.  
  797. BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
  798. {
  799.     BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  800.     if (order != memory_order_relaxed)
  801.         msvc_arm_operations_base::hardware_full_fence();
  802.     BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  803. }
  804.  
  805. BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
  806. {
  807.     if (order != memory_order_relaxed)
  808.         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
  809. }
  810.  
  811. } // namespace detail
  812. } // namespace atomics
  813. } // namespace mars_boost
  814.  
  815. #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
  816. #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
  817. #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
  818. #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
  819. #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
  820. #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
  821. #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
  822. #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
  823.  
  824. #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_ARM_HPP_INCLUDED_
  825.  
downloadops_msvc_arm.hpp Source code - Download mars Source code
Related Source Codes/Software:
Hero - Elegant transition library for iOS & tvOS 2017-06-09
deep-photo-styletransfer - Code and data for paper "Deep Photo Style Transfer... 2017-06-09
mastodon - A GNU Social-compatible microblogging server ... 2017-06-09
plyr - A simple HTML5, YouTube and Vimeo player ... 2017-06-08
prepack - Prepack is a partial evaluator for JavaScript. Pre... 2017-06-08
Public-APIs - 2017-06-09
lottie-ios - An iOS library to natively render After Effects ve... 2017-06-09
Awesome-Hacking - A collection of various awesome lists for hackers,... 2017-06-09
algorithms - Minimal examples of data structures and algorithms... 2017-06-10
lectures - Oxford Deep NLP 2017 course 2017-06-10
CRYENGINE - CRYENGINE is a powerful real-time game development... 2017-06-11
postal - 2017-06-11
reactide - Reactide is the first dedicated IDE for React web ... 2017-06-11
rkt - rkt is a pod-native container engine for Linux. It... 2017-06-11
uWebSockets - Tiny WebSockets https://for... 2017-06-11
realworld - TodoMVC for the RealWorld - Exemplary fullstack Me... 2017-06-11
goreplay - GoReplay is an open-source tool for capturing and ... 2017-06-10
pyenv - Simple Python version management 2017-06-10
redux-saga - An alternative side effect model for Redux apps ... 2017-06-10
angular-starter - 2017-06-10

 Back to top