root/core/batch.hpp

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


   1 /******************************************************************************
   2 ** $Header: svn+ssh://jmmcg@svn.code.sf.net/p/libjmmcg/code/trunk/libjmmcg/core/batch.hpp 2055 2017-05-13 19:35:47Z jmmcg $
   3 **
   4 ** Copyright © 2004 by J.M.McGuiness, coder@hussar.me.uk
   5 **
   6 ** This library is free software; you can redistribute it and/or
   7 ** modify it under the terms of the GNU Lesser General Public
   8 ** License as published by the Free Software Foundation; either
   9 ** version 2.1 of the License, or (at your option) any later version.
  10 **
  11 ** This library is distributed in the hope that it will be useful,
  12 ** but WITHOUT ANY WARRANTY; without even the implied warranty of
  13 ** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14 ** Lesser General Public License for more details.
  15 **
  16 ** You should have received a copy of the GNU Lesser General Public
  17 ** License along with this library; if not, write to the Free Software
  18 ** Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  19 */
  20 #include <bits/ios_base.h>
  21 
  22 namespace jmmcg {
  23 
  24         /// An adaptor for collections that batches up to I items being atomically removed from the collection, Colln.
  25         /**
  26                 This means that as items are removed from the collection, they are potentially added to the last item that was removed from the collection. This implies that the Colln::value_type is some kind of collection to which that work may be added.
  27         */
  28         template<
  29                 class Colln,    ///< The collection to be adapted.
  30                 unsigned long I ///< The maximum number of items per batch.
  31         >
  32         struct front_batch : public Colln {
  33                 static constexpr unsigned long max_size=I;      ///< The maximum number of items to batch.
  34                 typedef Colln base_t;
  35                 typedef typename base_t::thread_traits thread_traits;
  36                 typedef Colln container_type;
  37                 typedef typename container_type::value_ret_type value_ret_type;
  38                 typedef typename container_type::exception_type exeception_type;
  39 
  40                 static_assert(max_size>1UL,  "Wrong batch size.");
  41 
  42                 constexpr front_batch() noexcept(noexcept(base_t()))
  43                 : base_t() {}
  44                 explicit front_batch(typename base_t::have_work_type::atomic_t &a) noexcept(noexcept(base_t(std::declval<base_t>())))
  45                 : base_t(a) {}
  46                 front_batch(front_batch const &fb) noexcept(noexcept(base_t(std::declval<front_batch>())))
  47                 : base_t(fb) {}
  48                 front_batch &__fastcall FORCE_INLINE operator=(front_batch const &fb) noexcept(noexcept(base_t::operator=(std::declval<front_batch>()))) {
  49                         base_t::operator=(fb);
  50                         return *this;
  51                 }
  52 
  53                 /// The items are batched when popped from the queue.
  54                 /**
  55                         This is used to return a collection of items from the signalled_work_queue, in the order in which they were inserted. At least one item will be returned, and if there are sufficient items in the signalled_work_queue, then max_size items will be returned. This implies that the thread that extracts items from the queue does the work in batching them.
  56 
  57                         \return A batch of either one or max_size items.
  58                 */
  59                 value_ret_type __fastcall pop_front_nochk_nolk() noexcept(true) FORCE_INLINE;
  60         };
  61 
  62         /// Batch-sizes of zero aren't allowed.
  63         template<
  64                 class Colln     ///< The collection to be adapted.
  65         >
  66         class front_batch<Colln, 0UL> : public Colln {
  67         };
  68 
  69         /// Batch-sizes of zero aren't allowed.
  70         template<
  71                 class Colln     ///< The collection to be adapted.
  72         >
  73         struct front_batch<Colln, 1UL> : public Colln {
  74                 static constexpr unsigned long max_size=1UL;    ///< The maximum number of items to batch.
  75                 typedef Colln base_t;
  76                 typedef typename base_t::thread_traits thread_traits;
  77                 typedef Colln container_type;
  78                 typedef typename container_type::value_ret_type value_ret_type;
  79 
  80                 constexpr front_batch() noexcept(noexcept(base_t()))
  81                 : base_t() {}
  82                 explicit front_batch(typename base_t::have_work_type::atomic_t &a) noexcept(noexcept(base_t(a)))
  83                 : base_t(a) {}
  84                 front_batch(front_batch const &fb) noexcept(noexcept(base_t(std::declval<front_batch>())))
  85                 : base_t(fb) {}
  86                 front_batch &__fastcall FORCE_INLINE operator=(front_batch const &fb) noexcept(noexcept(base_t::operator=(std::declval<front_batch>()))) {
  87                         base_t::operator=(fb);
  88                         return *this;
  89                 }
  90 
  91                 value_ret_type __fastcall pop_front_nochk_nolk() noexcept(true);
  92         };
  93 
  94         /// An adaptor for collections that batches up to I items being both added to or removed from the collection, Colln.
  95         /**
  96                 This means that as items are added to the collection, they are potentially added to the last item that was added to the collection. This implies that the Colln::value_type is some kind of collection to which the new work may be added.
  97         */
  98         template<
  99                 class Colln,    ///< The collection to be adapted.
 100                 unsigned long I ///< The maximum number of items per batch.
 101         >
 102         struct back_batch final : public front_batch<Colln, I> {
 103                 typedef front_batch<Colln, I> base_t;
 104                 using base_t::max_size;
 105                 typedef typename base_t::thread_traits thread_traits;
 106                 typedef typename base_t::container_type container_type;
 107                 typedef typename container_type::value_type value_type;
 108                 typedef typename container_type::exception_type exception_type;
 109 
 110                 static_assert(max_size>1UL,  "Wrong batch size.");
 111 
 112                 constexpr back_batch() noexcept(noexcept(base_t()))
 113                 : base_t() {}
 114                 explicit back_batch(typename base_t::have_work_type::atomic_t &a) noexcept(noexcept(base_t(a)))
 115                 : base_t(a) {}
 116                 /// The items are batched when pushed onto the queue.
 117                 /**
 118                         The items are batched as they are added to the queue. Therefore the thread that adds the items does the batching work, and that the queue contains a mix of batched and unbatched items, thus potentially reducing the number of items added to the queue, therefore the number of memory allocations done.
 119                 */
 120 // TODO Implement this:         void __fastcall push_back(const value_type &data_item) noexcept(false);
 121 
 122                 /// The items are batched when pushed onto the queue.
 123                 /**
 124                         The items are batched as they are added to the queue.   Therefore the thread that adds the items does the batching work, and that the queue contains a mix of batched and unbatched items, thus potentially reducing the number of items added to the queue, therefore the number of memory allocations done.
 125                 */
 126 // TODO Implement this:         void __fastcall push(const value_type &data_item) noexcept(false);
 127         };
 128 
 129         /// Batch-sizes of zero aren't allowed.
 130         template<
 131                 class Colln     ///< The collection to be adapted.
 132         >
 133         class back_batch<Colln, 0UL> final : public front_batch<Colln, 0UL> {
 134         };
 135 
 136         /// If the batch-size is one, collapse this to an empty wrapper of the collection_type.
 137         template<
 138                 class Colln     ///< The collection to be adapted.
 139         >
 140         struct back_batch<Colln, 1UL> final : public front_batch<Colln, 1UL> {
 141                 typedef front_batch<Colln, 1UL> base_t;
 142                 using base_t::max_size;
 143                 typedef typename base_t::thread_traits thread_traits;
 144                 typedef typename base_t::container_type container_type;
 145                 typedef typename container_type::value_type value_type;
 146                 typedef typename container_type::exception_type exception_type;
 147 
 148                 constexpr back_batch() noexcept(noexcept(base_t()))
 149                 : base_t() {}
 150                 explicit back_batch(typename base_t::have_work_type::atomic_t &a) noexcept(noexcept(base_t(a)))
 151                 : base_t(a) {}
 152                 back_batch(back_batch const &bb) noexcept(noexcept(base_t(std::declval<back_batch>())))
 153                 : base_t(bb) {}
 154                 back_batch &__fastcall FORCE_INLINE operator=(back_batch const &bb) noexcept(noexcept(base_t::operator=(std::declval<back_batch>()))) {
 155                         base_t::operator=(bb);
 156                         return *this;
 157                 }
 158 
 159                 void __fastcall push_back(const value_type &data_item) noexcept(false) FORCE_INLINE;
 160                 void __fastcall push_back(value_type &&data_item) noexcept(false) FORCE_INLINE;
 161                 void __fastcall push(const value_type &data_item) noexcept(false) FORCE_INLINE;
 162                 void __fastcall push(value_type &&data_item) noexcept(false) FORCE_INLINE;
 163         };
 164 
 165 }
 166 
 167 #include "batch_impl.hpp"

/* [<][>][^][v][top][bottom][index][help] */