BVB Source Codes

mars Show ops_gcc_x86.hpp Source code

Return Download mars: download ops_gcc_x86.hpp Source code - Download mars Source code - Type:.hpp
  1. /*
  2.  * Distributed under the Boost Software License, Version 1.0.
  3.  * (See accompanying file LICENSE_1_0.txt or copy at
  4.  * http://www.boost.org/LICENSE_1_0.txt)
  5.  *
  6.  * Copyright (c) 2009 Helge Bahmann
  7.  * Copyright (c) 2012 Tim Blechmann
  8.  * Copyright (c) 2014 Andrey Semashev
  9.  */
  10. /*!
  11.  * \file   atomic/detail/ops_gcc_x86.hpp
  12.  *
  13.  * This header contains implementation of the \c operations template.
  14.  */
  15.  
  16. #ifndef BOOST_ATOMIC_DETAIL_OPS_GCC_X86_HPP_INCLUDED_
  17. #define BOOST_ATOMIC_DETAIL_OPS_GCC_X86_HPP_INCLUDED_
  18.  
  19. #include <boost/memory_order.hpp>
  20. #include <boost/atomic/detail/config.hpp>
  21. #include <boost/atomic/detail/storage_type.hpp>
  22. #include <boost/atomic/detail/operations_fwd.hpp>
  23. #include <boost/atomic/capabilities.hpp>
  24. #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B) || defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
  25. #include <boost/atomic/detail/ops_gcc_x86_dcas.hpp>
  26. #include <boost/atomic/detail/ops_cas_based.hpp>
  27. #endif
  28.  
  29. #ifdef BOOST_HAS_PRAGMA_ONCE
  30. #pragma once
  31. #endif
  32.  
  33. #if defined(__x86_64__)
  34. #define BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "rdx"
  35. #else
  36. #define BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "edx"
  37. #endif
  38.  
  39. namespace mars_boost {} namespace boost = mars_boost; namespace mars_boost {
  40. namespace atomics {
  41. namespace detail {
  42.  
  43. struct gcc_x86_operations_base
  44. {
  45.     static BOOST_FORCEINLINE void fence_before(memory_order order) BOOST_NOEXCEPT
  46.     {
  47.         if ((order & memory_order_release) != 0)
  48.             __asm__ __volatile__ ("" ::: "memory");
  49.     }
  50.  
  51.     static BOOST_FORCEINLINE void fence_after(memory_order order) BOOST_NOEXCEPT
  52.     {
  53.         if ((order & memory_order_acquire) != 0)
  54.             __asm__ __volatile__ ("" ::: "memory");
  55.     }
  56. };
  57.  
  58. template< typename T, typename Derived >
  59. struct gcc_x86_operations :
  60.     public gcc_x86_operations_base
  61. {
  62.     typedef T storage_type;
  63.  
  64.     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  65.     {
  66.         if (order != memory_order_seq_cst)
  67.         {
  68.             fence_before(order);
  69.             storage = v;
  70.             fence_after(order);
  71.         }
  72.         else
  73.         {
  74.             Derived::exchange(storage, v, order);
  75.         }
  76.     }
  77.  
  78.     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
  79.     {
  80.         storage_type v = storage;
  81.         fence_after(order);
  82.         return v;
  83.     }
  84.  
  85.     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  86.     {
  87.         return Derived::fetch_add(storage, -v, order);
  88.     }
  89.  
  90.     static BOOST_FORCEINLINE bool compare_exchange_weak(
  91.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
  92.     {
  93.         return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
  94.     }
  95.  
  96.     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  97.     {
  98.         return !!Derived::exchange(storage, (storage_type)1, order);
  99.     }
  100.  
  101.     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  102.     {
  103.         store(storage, (storage_type)0, order);
  104.     }
  105.  
  106.     static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
  107.     {
  108.         return true;
  109.     }
  110. };
  111.  
  112. template< bool Signed >
  113. struct operations< 1u, Signed > :
  114.     public gcc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
  115. {
  116.     typedef gcc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
  117.     typedef typename base_type::storage_type storage_type;
  118.     typedef typename make_storage_type< 1u, Signed >::aligned aligned_storage_type;
  119.  
  120.     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  121.     {
  122.         __asm__ __volatile__
  123.         (
  124.             "lock; xaddb %0, %1"
  125.             : "+q" (v), "+m" (storage)
  126.             :
  127.             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  128.         );
  129.         return v;
  130.     }
  131.  
  132.     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  133.     {
  134.         __asm__ __volatile__
  135.         (
  136.             "xchgb %0, %1"
  137.             : "+q" (v), "+m" (storage)
  138.             :
  139.             : "memory"
  140.         );
  141.         return v;
  142.     }
  143.  
  144.     static BOOST_FORCEINLINE bool compare_exchange_strong(
  145.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
  146.     {
  147.         storage_type previous = expected;
  148.         bool success;
  149.         __asm__ __volatile__
  150.         (
  151.             "lock; cmpxchgb %3, %1\n\t"
  152.             "sete %2"
  153.             : "+a" (previous), "+m" (storage), "=q" (success)
  154.             : "q" (desired)
  155.             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  156.         );
  157.         expected = previous;
  158.         return success;
  159.     }
  160.  
  161. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, result)\
  162.     __asm__ __volatile__\
  163.     (\
  164.         "xor %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER ", %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "\n\t"\
  165.         ".align 16\n\t"\
  166.         "1: movb %[arg], %%dl\n\t"\
  167.         op " %%al, %%dl\n\t"\
  168.         "lock; cmpxchgb %%dl, %[storage]\n\t"\
  169.         "jne 1b"\
  170.         : [res] "+a" (result), [storage] "+m" (storage)\
  171.         : [arg] "q" (argument)\
  172.         : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER, "memory"\
  173.     )
  174.  
  175.     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  176.     {
  177.         storage_type res = storage;
  178.         BOOST_ATOMIC_DETAIL_CAS_LOOP("andb", v, res);
  179.         return res;
  180.     }
  181.  
  182.     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  183.     {
  184.         storage_type res = storage;
  185.         BOOST_ATOMIC_DETAIL_CAS_LOOP("orb", v, res);
  186.         return res;
  187.     }
  188.  
  189.     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  190.     {
  191.         storage_type res = storage;
  192.         BOOST_ATOMIC_DETAIL_CAS_LOOP("xorb", v, res);
  193.         return res;
  194.     }
  195.  
  196. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  197. };
  198.  
  199. template< bool Signed >
  200. struct operations< 2u, Signed > :
  201.     public gcc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
  202. {
  203.     typedef gcc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
  204.     typedef typename base_type::storage_type storage_type;
  205.     typedef typename make_storage_type< 2u, Signed >::aligned aligned_storage_type;
  206.  
  207.     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  208.     {
  209.         __asm__ __volatile__
  210.         (
  211.             "lock; xaddw %0, %1"
  212.             : "+q" (v), "+m" (storage)
  213.             :
  214.             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  215.         );
  216.         return v;
  217.     }
  218.  
  219.     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  220.     {
  221.         __asm__ __volatile__
  222.         (
  223.             "xchgw %0, %1"
  224.             : "+q" (v), "+m" (storage)
  225.             :
  226.             : "memory"
  227.         );
  228.         return v;
  229.     }
  230.  
  231.     static BOOST_FORCEINLINE bool compare_exchange_strong(
  232.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
  233.     {
  234.         storage_type previous = expected;
  235.         bool success;
  236.         __asm__ __volatile__
  237.         (
  238.             "lock; cmpxchgw %3, %1\n\t"
  239.             "sete %2"
  240.             : "+a" (previous), "+m" (storage), "=q" (success)
  241.             : "q" (desired)
  242.             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  243.         );
  244.         expected = previous;
  245.         return success;
  246.     }
  247.  
  248. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, result)\
  249.     __asm__ __volatile__\
  250.     (\
  251.         "xor %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER ", %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "\n\t"\
  252.         ".align 16\n\t"\
  253.         "1: movw %[arg], %%dx\n\t"\
  254.         op " %%ax, %%dx\n\t"\
  255.         "lock; cmpxchgw %%dx, %[storage]\n\t"\
  256.         "jne 1b"\
  257.         : [res] "+a" (result), [storage] "+m" (storage)\
  258.         : [arg] "q" (argument)\
  259.         : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER, "memory"\
  260.     )
  261.  
  262.     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  263.     {
  264.         storage_type res = storage;
  265.         BOOST_ATOMIC_DETAIL_CAS_LOOP("andw", v, res);
  266.         return res;
  267.     }
  268.  
  269.     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  270.     {
  271.         storage_type res = storage;
  272.         BOOST_ATOMIC_DETAIL_CAS_LOOP("orw", v, res);
  273.         return res;
  274.     }
  275.  
  276.     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  277.     {
  278.         storage_type res = storage;
  279.         BOOST_ATOMIC_DETAIL_CAS_LOOP("xorw", v, res);
  280.         return res;
  281.     }
  282.  
  283. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  284. };
  285.  
  286. template< bool Signed >
  287. struct operations< 4u, Signed > :
  288.     public gcc_x86_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
  289. {
  290.     typedef gcc_x86_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
  291.     typedef typename base_type::storage_type storage_type;
  292.     typedef typename make_storage_type< 4u, Signed >::aligned aligned_storage_type;
  293.  
  294.     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  295.     {
  296.         __asm__ __volatile__
  297.         (
  298.             "lock; xaddl %0, %1"
  299.             : "+r" (v), "+m" (storage)
  300.             :
  301.             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  302.         );
  303.         return v;
  304.     }
  305.  
  306.     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  307.     {
  308.         __asm__ __volatile__
  309.         (
  310.             "xchgl %0, %1"
  311.             : "+r" (v), "+m" (storage)
  312.             :
  313.             : "memory"
  314.         );
  315.         return v;
  316.     }
  317.  
  318.     static BOOST_FORCEINLINE bool compare_exchange_strong(
  319.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
  320.     {
  321.         storage_type previous = expected;
  322.         bool success;
  323.         __asm__ __volatile__
  324.         (
  325.             "lock; cmpxchgl %3, %1\n\t"
  326.             "sete %2"
  327.             : "+a" (previous), "+m" (storage), "=q" (success)
  328.             : "r" (desired)
  329.             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  330.         );
  331.         expected = previous;
  332.         return success;
  333.     }
  334.  
  335. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, result)\
  336.     __asm__ __volatile__\
  337.     (\
  338.         "xor %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER ", %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "\n\t"\
  339.         ".align 16\n\t"\
  340.         "1: movl %[arg], %%edx\n\t"\
  341.         op " %%eax, %%edx\n\t"\
  342.         "lock; cmpxchgl %%edx, %[storage]\n\t"\
  343.         "jne 1b"\
  344.         : [res] "+a" (result), [storage] "+m" (storage)\
  345.         : [arg] "r" (argument)\
  346.         : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER, "memory"\
  347.     )
  348.  
  349.     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  350.     {
  351.         storage_type res = storage;
  352.         BOOST_ATOMIC_DETAIL_CAS_LOOP("andl", v, res);
  353.         return res;
  354.     }
  355.  
  356.     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  357.     {
  358.         storage_type res = storage;
  359.         BOOST_ATOMIC_DETAIL_CAS_LOOP("orl", v, res);
  360.         return res;
  361.     }
  362.  
  363.     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  364.     {
  365.         storage_type res = storage;
  366.         BOOST_ATOMIC_DETAIL_CAS_LOOP("xorl", v, res);
  367.         return res;
  368.     }
  369.  
  370. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  371. };
  372.  
  373. #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B)
  374.  
  375. template< bool Signed >
  376. struct operations< 8u, Signed > :
  377.     public cas_based_operations< gcc_dcas_x86< Signed > >
  378. {
  379. };
  380.  
  381. #elif defined(__x86_64__)
  382.  
  383. template< bool Signed >
  384. struct operations< 8u, Signed > :
  385.     public gcc_x86_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
  386. {
  387.     typedef gcc_x86_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
  388.     typedef typename base_type::storage_type storage_type;
  389.     typedef typename make_storage_type< 8u, Signed >::aligned aligned_storage_type;
  390.  
  391.     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  392.     {
  393.         __asm__ __volatile__
  394.         (
  395.             "lock; xaddq %0, %1"
  396.             : "+r" (v), "+m" (storage)
  397.             :
  398.             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  399.         );
  400.         return v;
  401.     }
  402.  
  403.     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  404.     {
  405.         __asm__ __volatile__
  406.         (
  407.             "xchgq %0, %1"
  408.             : "+r" (v), "+m" (storage)
  409.             :
  410.             : "memory"
  411.         );
  412.         return v;
  413.     }
  414.  
  415.     static BOOST_FORCEINLINE bool compare_exchange_strong(
  416.         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
  417.     {
  418.         storage_type previous = expected;
  419.         bool success;
  420.         __asm__ __volatile__
  421.         (
  422.             "lock; cmpxchgq %3, %1\n\t"
  423.             "sete %2"
  424.             : "+a" (previous), "+m" (storage), "=q" (success)
  425.             : "r" (desired)
  426.             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  427.         );
  428.         expected = previous;
  429.         return success;
  430.     }
  431.  
  432. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, result)\
  433.     __asm__ __volatile__\
  434.     (\
  435.         "xor %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER ", %%" BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER "\n\t"\
  436.         ".align 16\n\t"\
  437.         "1: movq %[arg], %%rdx\n\t"\
  438.         op " %%rax, %%rdx\n\t"\
  439.         "lock; cmpxchgq %%rdx, %[storage]\n\t"\
  440.         "jne 1b"\
  441.         : [res] "+a" (result), [storage] "+m" (storage)\
  442.         : [arg] "r" (argument)\
  443.         : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER, "memory"\
  444.     )
  445.  
  446.     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  447.     {
  448.         storage_type res = storage;
  449.         BOOST_ATOMIC_DETAIL_CAS_LOOP("andq", v, res);
  450.         return res;
  451.     }
  452.  
  453.     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  454.     {
  455.         storage_type res = storage;
  456.         BOOST_ATOMIC_DETAIL_CAS_LOOP("orq", v, res);
  457.         return res;
  458.     }
  459.  
  460.     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  461.     {
  462.         storage_type res = storage;
  463.         BOOST_ATOMIC_DETAIL_CAS_LOOP("xorq", v, res);
  464.         return res;
  465.     }
  466.  
  467. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  468. };
  469.  
  470. #endif
  471.  
  472. #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
  473.  
  474. template< bool Signed >
  475. struct operations< 16u, Signed > :
  476.     public cas_based_operations< gcc_dcas_x86_64< Signed > >
  477. {
  478. };
  479.  
  480. #endif // defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
  481.  
  482. BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
  483. {
  484.     if (order == memory_order_seq_cst)
  485.     {
  486.         __asm__ __volatile__
  487.         (
  488. #if defined(__x86_64__) || defined(__SSE2__)
  489.             "mfence\n"
  490. #else
  491.             "lock; addl $0, (%%esp)\n"
  492. #endif
  493.             ::: "memory"
  494.         );
  495.     }
  496.     else if ((order & (memory_order_acquire | memory_order_release)) != 0)
  497.     {
  498.         __asm__ __volatile__ ("" ::: "memory");
  499.     }
  500. }
  501.  
  502. BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
  503. {
  504.     if (order != memory_order_relaxed)
  505.         __asm__ __volatile__ ("" ::: "memory");
  506. }
  507.  
  508. } // namespace detail
  509. } // namespace atomics
  510. } // namespace mars_boost
  511.  
  512. #undef BOOST_ATOMIC_DETAIL_TEMP_CAS_REGISTER
  513.  
  514. #endif // BOOST_ATOMIC_DETAIL_OPS_GCC_X86_HPP_INCLUDED_
  515.  
downloadops_gcc_x86.hpp Source code - Download mars Source code
Related Source Codes/Software:
Hero - Elegant transition library for iOS & tvOS 2017-06-09
deep-photo-styletransfer - Code and data for paper "Deep Photo Style Transfer... 2017-06-09
mastodon - A GNU Social-compatible microblogging server ... 2017-06-09
plyr - A simple HTML5, YouTube and Vimeo player ... 2017-06-08
prepack - Prepack is a partial evaluator for JavaScript. Pre... 2017-06-08
Public-APIs - 2017-06-09
lottie-ios - An iOS library to natively render After Effects ve... 2017-06-09
Awesome-Hacking - A collection of various awesome lists for hackers,... 2017-06-09
algorithms - Minimal examples of data structures and algorithms... 2017-06-10
lectures - Oxford Deep NLP 2017 course 2017-06-10
CRYENGINE - CRYENGINE is a powerful real-time game development... 2017-06-11
postal - 2017-06-11
reactide - Reactide is the first dedicated IDE for React web ... 2017-06-11
rkt - rkt is a pod-native container engine for Linux. It... 2017-06-11
uWebSockets - Tiny WebSockets https://for... 2017-06-11
realworld - TodoMVC for the RealWorld - Exemplary fullstack Me... 2017-06-11
goreplay - GoReplay is an open-source tool for capturing and ... 2017-06-10
pyenv - Simple Python version management 2017-06-10
redux-saga - An alternative side effect model for Redux apps ... 2017-06-10
angular-starter - 2017-06-10

 Back to top