This source file includes following definitions.
- have_work_
- have_work_
- have_work_
- have_work
- add
- add
- remove
- remove
- try_remove
- clear
- have_work
- have_work
- add
- add
- pop_lock
- pop_lock
- push_lock
- push_lock
- colln
- colln
- pop_lock
- pop_lock
- push_lock
- push_lock
- colln
- colln
- pop_lock
- pop_lock
- push_lock
- push_lock
- colln
- colln
1 #ifndef libjmmcg_core_thread_safe_adaptors_hpp
2 #define libjmmcg_core_thread_safe_adaptors_hpp
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24 #include "exception.hpp"
25
26 #include <boost/mpl/assert.hpp>
27
28 #include <mutex>
29
30 namespace jmmcg { namespace ppd {
31
32
33
34
35
36
37
38 template<class Lk>
39 class no_signalling {
40 public:
41 typedef api_lock_traits<platform_api, sequential_mode>::anon_event_type atomic_t;
42 typedef Lk locker_type;
43 typedef typename locker_type::lock_traits lock_traits;
44
45
46
47
48 static constexpr generic_traits::memory_access_modes memory_access_mode=(
49 locker_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
50 && atomic_t::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
51 ? generic_traits::memory_access_modes::crew_memory_access
52 : generic_traits::memory_access_modes::erew_memory_access
53 );
54
55 constexpr no_signalling() noexcept(true) FORCE_INLINE
56 : lock_(), have_work_() {
57 }
58 explicit no_signalling(atomic_t &ev) noexcept(true) FORCE_INLINE
59 : lock_(), have_work_(&ev) {
60 assert(dynamic_cast<atomic_t *>(have_work_));
61 }
62 constexpr no_signalling(no_signalling const &s) noexcept(true) FORCE_INLINE
63 : lock_(), have_work_(s.have_work_) {
64 }
65
66 atomic_t & __fastcall have_work() noexcept(true) FORCE_INLINE {
67 assert(dynamic_cast<atomic_t *>(have_work_));
68 return *have_work_;
69 }
70 locker_type & __fastcall locker() const noexcept(true) FORCE_INLINE {
71 assert(dynamic_cast<locker_type const *>(&lock_));
72 return lock_;
73 }
74 locker_type & __fastcall locker() noexcept(true) {
75 assert(dynamic_cast<locker_type *>(&lock_));
76 return lock_;
77 }
78
79 void __fastcall add() noexcept(true) FORCE_INLINE {
80 if (have_work_) {
81 have_work_->set();
82 }
83 }
84 void __fastcall add(typename atomic_t::count_type const c) noexcept(true) FORCE_INLINE {
85 if (have_work_) {
86 for (typename atomic_t::count_type i=0; i<c; ++i) {
87 have_work_->set();
88 }
89 }
90 }
91 typename atomic_t::atomic_state_type __fastcall remove() noexcept(true) FORCE_INLINE {
92 if (have_work_) {
93 return have_work_->lock();
94 } else {
95 return atomic_t::atom_unset;
96 }
97 }
98 void __fastcall remove(typename atomic_t::count_type const c) noexcept(true) FORCE_INLINE {
99 if (have_work_) {
100 for (typename atomic_t::count_type i=0; i<c; ++i) {
101 have_work_->lock();
102 }
103 }
104 }
105 typename atomic_t::atomic_state_type __fastcall try_remove() noexcept(true) FORCE_INLINE {
106 assert(dynamic_cast<atomic_t *>(have_work_));
107 return have_work_->try_lock();
108 }
109 static constexpr void clear() noexcept(true) FORCE_INLINE {
110 }
111 static constexpr typename atomic_t::count_type __fastcall count() noexcept(true) FORCE_INLINE {
112 return 0;
113 }
114
115 private:
116 mutable locker_type lock_;
117 atomic_t *have_work_;
118 };
119
120
121
122
123
124 template<class Lk>
125 class signalling {
126 public:
127 typedef Lk atomic_t;
128 typedef typename atomic_t::lock_traits lock_traits;
129 typedef typename lock_traits::exception_type exception_type;
130 typedef typename atomic_t::locker_type locker_type;
131
132
133
134
135 static constexpr generic_traits::memory_access_modes memory_access_mode=atomic_t::memory_access_mode;
136
137 private:
138 atomic_t *have_work_;
139
140 public:
141 constexpr __stdcall signalling() noexcept(true) FORCE_INLINE
142 : have_work_() {
143 }
144 explicit signalling(atomic_t &ev) noexcept(true) FORCE_INLINE
145 : have_work_(&ev) {
146 assert(dynamic_cast<atomic_t *>(have_work_));
147 }
148 __stdcall signalling(signalling const &s) noexcept(true) FORCE_INLINE
149 : have_work_(s.have_work_) {
150 assert(dynamic_cast<atomic_t *>(have_work_));
151 }
152
153 signalling(signalling &&)=delete;
154 void operator=(signalling const &)=delete;
155 void operator=(signalling &&)=delete;
156
157 constexpr atomic_t & __fastcall have_work() const noexcept(true) FORCE_INLINE {
158 assert(dynamic_cast<atomic_t *>(have_work_));
159 return *have_work_;
160 }
161 atomic_t & __fastcall have_work() noexcept(true) FORCE_INLINE {
162 assert(dynamic_cast<atomic_t *>(have_work_));
163 return *have_work_;
164 }
165 constexpr locker_type & __fastcall locker() const noexcept(true) FORCE_INLINE {
166 assert(dynamic_cast<atomic_t const *>(have_work_));
167 return have_work_->locker();
168 }
169 locker_type & __fastcall locker() noexcept(true) FORCE_INLINE {
170 assert(dynamic_cast<atomic_t *>(have_work_));
171 return have_work_->locker();
172 }
173
174 void add() noexcept(false) FORCE_INLINE {
175 assert(dynamic_cast<atomic_t *>(have_work_));
176 typename lock_traits::atomic_state_type const ret=have_work_->set_nolk(atomic_t::states::new_work_arrived);
177 if (ret!=lock_traits::atom_set) {
178 throw exception_type(_T("Could not add more work to the atomic object."), jmmcg::info::function(__LINE__, __PRETTY_FUNCTION__, typeid(*this)), JMMCG_REVISION_HDR(_T("$Header: svn+ssh://jmmcg@svn.code.sf.net/p/libjmmcg/code/trunk/libjmmcg/core/thread_safe_adaptors.hpp 2287 2018-06-24 17:53:41Z jmmcg $")));
179 }
180 }
181 void __fastcall add(typename atomic_t::count_type const c) noexcept(false) FORCE_INLINE {
182 for (typename atomic_t::count_type i=0; i<c; ++i) {
183 add();
184 }
185 }
186 typename atomic_t::lock_result_type __fastcall remove() noexcept(noexcept(have_work_->lock(0))) FORCE_INLINE;
187 void __fastcall remove(typename atomic_t::count_type const c) noexcept(false) FORCE_INLINE;
188 typename atomic_t::lock_result_type __fastcall try_remove() noexcept(noexcept(have_work_->try_lock())) FORCE_INLINE;
189 void clear() noexcept(noexcept(have_work_->clear())) FORCE_INLINE;
190 typename atomic_t::count_type __fastcall count() const noexcept(noexcept(have_work_->count())) FORCE_INLINE;
191 };
192
193
194
195
196
197
198
199
200 template<
201 typename C,
202 typename M,
203 typename WL=typename M::write_lock_type,
204 class Sig=no_signalling<M>,
205 class MLk=typename lock::any_order::all<M::lock_traits::api_type, typename M::lock_traits::model_type, M, M>
206 >
207 class safe_colln : private C
208 {
209 public:
210 typedef C container_type;
211 typedef Sig have_work_type;
212 typedef M atomic_t;
213 typedef WL write_lock_type;
214 typedef typename atomic_t::read_lock_type read_lock_type;
215 typedef MLk lock_all_type;
216 typedef typename atomic_t::lock_traits lock_traits;
217 typedef api_threading_traits<lock_traits::api_type, typename lock_traits::model_type> thread_traits;
218 typedef typename container_type::reference reference;
219 typedef typename container_type::const_reference const_reference;
220 typedef typename container_type::size_type size_type;
221 typedef typename container_type::value_type value_type;
222 typedef typename lock_traits::exception_type exception_type;
223 using exit_requested_type=typename have_work_type::atomic_t;
224
225
226
227
228 static constexpr generic_traits::memory_access_modes memory_access_mode=(
229 write_lock_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
230 && read_lock_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
231 && lock_all_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
232
233 && have_work_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
234 ? generic_traits::memory_access_modes::crew_memory_access
235 : generic_traits::memory_access_modes::erew_memory_access
236 );
237
238 BOOST_MPL_ASSERT((std::is_same<typename write_lock_type::atomic_t, atomic_t>));
239
240
241 mutable have_work_type have_work;
242
243
244 atomic_t & __fastcall pop_lock() noexcept(true) FORCE_INLINE {
245 return have_work.locker();
246 }
247
248 atomic_t & __fastcall pop_lock() const noexcept(true) FORCE_INLINE {
249 return have_work.locker();
250 }
251
252 atomic_t & __fastcall push_lock() noexcept(true) FORCE_INLINE {
253 return have_work.locker();
254 }
255
256 atomic_t & __fastcall push_lock() const noexcept(true) FORCE_INLINE {
257 return have_work.locker();
258 }
259
260 __stdcall safe_colln() noexcept(noexcept(container_type()) && noexcept(have_work_type())) FORCE_INLINE;
261 explicit safe_colln(typename have_work_type::atomic_t &) FORCE_INLINE;
262 explicit safe_colln(std::initializer_list<value_type>) FORCE_INLINE;
263 explicit __stdcall safe_colln(size_type const sz, value_type const &v=value_type()) FORCE_INLINE;
264 template<class T1, class T2>
265 __stdcall FORCE_INLINE
266 safe_colln(size_type const sz, T1 const &, T2 const &);
267 explicit __stdcall safe_colln(const container_type &) FORCE_INLINE;
268 __stdcall safe_colln(const safe_colln &) noexcept(false) FORCE_INLINE;
269 __stdcall ~safe_colln() FORCE_INLINE;
270 safe_colln & __fastcall operator=(const safe_colln &) noexcept(false) FORCE_INLINE;
271
272 bool __fastcall empty() const noexcept(true) FORCE_INLINE;
273 size_type __fastcall sync_size() const noexcept(false) FORCE_INLINE;
274 size_type __fastcall size() const noexcept(true) FORCE_INLINE;
275
276 value_type __fastcall operator[](size_type s) const noexcept(false) FORCE_INLINE;
277
278 void __fastcall push_back(value_type const &v) noexcept(false) FORCE_INLINE;
279 void __fastcall push_back(value_type &&v) noexcept(false) FORCE_INLINE;
280
281 void __fastcall push_front(const value_type &v) noexcept(false) FORCE_INLINE;
282
283 void __fastcall push_front(value_type &&v) noexcept(false) FORCE_INLINE;
284
285 size_type __fastcall erase(const value_type &v) noexcept(false) FORCE_INLINE;
286
287 void __fastcall reserve(size_type sz) noexcept(false) FORCE_INLINE;
288
289 void __fastcall clear() noexcept(false) FORCE_INLINE;
290
291 void __fastcall swap(safe_colln &t) noexcept(false) FORCE_INLINE;
292
293
294
295
296
297
298
299
300
301 void __fastcall resize_noinit_nolk(const size_type sz) noexcept(false) FORCE_INLINE;
302
303
304
305
306
307 void __fastcall resize(const size_type sz) noexcept(false) FORCE_INLINE;
308
309 bool __fastcall operator==(safe_colln const &) const noexcept(true) FORCE_INLINE;
310 template<typename M1, typename WL1, class Sig1, class MLk1>
311 bool __fastcall FORCE_INLINE
312 operator==(safe_colln<C, M1, WL1, Sig1, MLk1> const &) const noexcept(true);
313
314 container_type const &colln() const noexcept(true) FORCE_INLINE {
315 return static_cast<container_type const &>(*this);
316 }
317 container_type &colln() noexcept(true) FORCE_INLINE {
318 return static_cast<container_type &>(*this);
319 }
320
321 };
322
323
324
325
326
327
328
329
330
331
332 template<
333 typename QT,
334 typename M,
335 typename WL=typename M::write_lock_type,
336 class Sig=no_signalling<M>,
337 class ValRet=typename QT::value_type,
338 class MLk=typename lock::any_order::all<M::lock_traits::api_type, typename M::lock_traits::model_type, M, M>
339 >
340 class queue : protected QT {
341 public:
342 typedef QT container_type;
343 typedef Sig have_work_type;
344 typedef M atomic_t;
345 typedef WL write_lock_type;
346 typedef typename atomic_t::read_lock_type read_lock_type;
347 typedef MLk lock_all_type;
348 typedef typename write_lock_type::lock_traits lock_traits;
349 typedef api_threading_traits<lock_traits::api_type, typename lock_traits::model_type> thread_traits;
350 typedef typename container_type::reference reference;
351 typedef typename container_type::const_reference const_reference;
352 typedef typename container_type::size_type size_type;
353 typedef typename container_type::value_type value_type;
354 typedef ValRet value_ret_type;
355 typedef typename lock_traits::exception_type exception_type;
356
357
358
359
360 static constexpr generic_traits::memory_access_modes memory_access_mode=(
361 write_lock_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
362 && read_lock_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
363 && lock_all_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
364 && value_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
365 && have_work_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
366 ? generic_traits::memory_access_modes::crew_memory_access
367 : generic_traits::memory_access_modes::erew_memory_access
368 );
369
370 BOOST_MPL_ASSERT((std::is_same<typename write_lock_type::atomic_t, atomic_t>));
371
372
373 mutable have_work_type have_work;
374
375
376 atomic_t & __fastcall pop_lock() noexcept(true) FORCE_INLINE {
377 return have_work.locker();
378 }
379
380 atomic_t & __fastcall pop_lock() const noexcept(true) FORCE_INLINE {
381 return have_work.locker();
382 }
383
384 atomic_t & __fastcall push_lock() noexcept(true) FORCE_INLINE {
385 return have_work.locker();
386 }
387
388 atomic_t & __fastcall push_lock() const noexcept(true) FORCE_INLINE {
389 return have_work.locker();
390 }
391
392 __stdcall queue() noexcept(noexcept(container_type()) && noexcept(have_work_type())) FORCE_INLINE;
393 explicit queue(typename have_work_type::atomic_t &) FORCE_INLINE;
394 __stdcall queue(queue const &) noexcept(false) FORCE_INLINE;
395 __stdcall ~queue() noexcept(true) FORCE_INLINE;
396 queue &__fastcall operator=(queue const &) noexcept(false) FORCE_INLINE;
397
398 bool __fastcall empty() const noexcept(true) FORCE_INLINE;
399 size_type __fastcall sync_size() const noexcept(false) FORCE_INLINE;
400 size_type __fastcall size() const noexcept(true) FORCE_INLINE;
401
402 value_type __fastcall front() const noexcept(false) FORCE_INLINE;
403
404 void __fastcall push_back(value_type const &v) noexcept(false) FORCE_INLINE;
405 void __fastcall push_back(value_type &&v) noexcept(false) FORCE_INLINE;
406
407 value_ret_type __fastcall pop_front() noexcept(false) FORCE_INLINE;
408 void __fastcall push_front(const value_type &v) noexcept(false) FORCE_INLINE;
409 void __fastcall push_front(value_type &&v) noexcept(false) FORCE_INLINE;
410
411 size_type __fastcall erase(const value_type &v) noexcept(false) FORCE_INLINE;
412
413 void __fastcall clear() noexcept(false) FORCE_INLINE;
414
415 container_type const &colln() const noexcept(true) FORCE_INLINE {
416 return static_cast<container_type const &>(*this);
417 }
418 container_type &colln() noexcept(true) FORCE_INLINE {
419 return static_cast<container_type &>(*this);
420 }
421
422 value_ret_type __fastcall pop_front_nolk() noexcept(false) FORCE_INLINE;
423
424 value_type __fastcall pop_front_1_nochk_nolk() noexcept(noexcept(have_work.remove())) FORCE_INLINE;
425 value_type __fastcall pop_front_1_nochk_nosig() noexcept(true) FORCE_INLINE;
426
427 protected:
428 virtual value_ret_type __fastcall pop_front_nochk_nolk() noexcept(false) FORCE_INLINE;
429 };
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444 template<
445 typename QT,
446 typename M,
447 typename WL=typename M::write_lock_type,
448 class Sig=no_signalling<M>,
449 class ValRet=typename QT::value_type,
450 class MLk=typename lock::any_order::all<M::lock_traits::api_type, typename M::lock_traits::model_type, M, M>
451 >
452 class funky_queue : private QT {
453 public:
454 typedef QT container_type;
455 typedef Sig have_work_type;
456 typedef M atomic_t;
457 typedef WL write_lock_type;
458 typedef typename atomic_t::read_lock_type read_lock_type;
459 typedef MLk lock_all_type;
460 typedef typename write_lock_type::lock_traits lock_traits;
461 typedef api_threading_traits<lock_traits::api_type, typename lock_traits::model_type> thread_traits;
462 typedef typename container_type::reference reference;
463 typedef typename container_type::const_reference const_reference;
464 typedef typename container_type::size_type size_type;
465 typedef typename container_type::value_type value_type;
466 typedef ValRet value_ret_type;
467 typedef typename lock_traits::exception_type exception_type;
468
469 static constexpr size_type serialise_size=2;
470
471
472
473
474 static constexpr generic_traits::memory_access_modes memory_access_mode=(
475 write_lock_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
476 && read_lock_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
477 && lock_all_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
478 && value_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
479 && have_work_type::memory_access_mode==generic_traits::memory_access_modes::crew_memory_access
480 ? generic_traits::memory_access_modes::crew_memory_access
481 : generic_traits::memory_access_modes::erew_memory_access
482 );
483
484 BOOST_MPL_ASSERT((std::is_same<typename write_lock_type::atomic_t, atomic_t>));
485
486
487 mutable have_work_type have_work;
488
489
490 atomic_t & __fastcall pop_lock() noexcept(true) FORCE_INLINE {
491 return pop_lock_;
492 }
493
494 atomic_t & __fastcall pop_lock() const noexcept(true) FORCE_INLINE {
495 return pop_lock_;
496 }
497
498 atomic_t & __fastcall push_lock() noexcept(true) FORCE_INLINE {
499 return push_lock_;
500 }
501
502 atomic_t & __fastcall push_lock() const noexcept(true) FORCE_INLINE {
503 return push_lock_;
504 }
505
506 __stdcall funky_queue() noexcept(noexcept(container_type()) && noexcept(have_work_type())) FORCE_INLINE;
507 explicit funky_queue(typename have_work_type::atomic_t &) FORCE_INLINE;
508 __stdcall funky_queue(funky_queue const &) noexcept(false) FORCE_INLINE;
509 __stdcall ~funky_queue() FORCE_INLINE;
510 funky_queue &__fastcall operator=(funky_queue const &) noexcept(false) FORCE_INLINE;
511
512 bool __fastcall empty() const noexcept(true) FORCE_INLINE;
513 size_type __fastcall sync_size() const noexcept(false) FORCE_INLINE;
514 size_type __fastcall size() const noexcept(true) FORCE_INLINE;
515
516 void __fastcall clear() noexcept(false) FORCE_INLINE;
517
518
519
520
521
522 bool __fastcall erase(value_type const &) noexcept(false) FORCE_INLINE;
523
524
525
526
527
528
529 value_type __fastcall front() const noexcept(false) FORCE_INLINE;
530
531
532
533
534
535 value_type __fastcall back() const noexcept(false) FORCE_INLINE;
536
537
538
539
540
541
542 void __fastcall push(value_type const &v) noexcept(false) FORCE_INLINE;
543
544
545
546
547
548 void __fastcall push(value_type &&v) noexcept(false) FORCE_INLINE;
549
550
551
552
553
554 void __fastcall push_back(value_type const &v) noexcept(false) FORCE_INLINE;
555
556
557
558
559
560 void __fastcall push_back(value_type &&v) noexcept(false) FORCE_INLINE;
561
562
563
564
565
566
567 value_ret_type __fastcall pop() noexcept(false) FORCE_INLINE;
568
569
570
571
572
573 value_ret_type __fastcall pop_front() noexcept(false) FORCE_INLINE;
574
575
576
577
578 void __fastcall remove(const value_type &e) noexcept(false) FORCE_INLINE;
579
580 container_type const &colln() const noexcept(true) FORCE_INLINE {
581 return static_cast<container_type const &>(*this);
582 }
583 container_type &colln() noexcept(true) FORCE_INLINE {
584 return static_cast<container_type &>(*this);
585 }
586
587 value_ret_type __fastcall pop_front_nolk() noexcept(false) FORCE_INLINE;
588
589 value_type __fastcall pop_front_1_nochk_nolk() noexcept(noexcept(have_work.remove())) FORCE_INLINE;
590 value_type __fastcall pop_front_1_nochk_nosig() noexcept(true) FORCE_INLINE;
591
592 protected:
593 value_type &__fastcall back_nolk() noexcept(true) FORCE_INLINE;
594 virtual value_ret_type __fastcall pop_front_nochk_nolk() noexcept(false) FORCE_INLINE;
595
596 private:
597 mutable atomic_t push_lock_;
598 mutable atomic_t pop_lock_;
599
600 value_ret_type __fastcall pop_nolk() noexcept(false) FORCE_INLINE;
601
602 value_type const &__fastcall back_nolk() const noexcept(true) FORCE_INLINE;
603 void __fastcall push_back_nolk(const value_type &e) noexcept(false) FORCE_INLINE;
604 void __fastcall push_back_nolk(value_type &&e) noexcept(false) FORCE_INLINE;
605 void __fastcall push_nolk(const value_type &e) noexcept(false) FORCE_INLINE;
606 void __fastcall push_nolk(value_type &&e) noexcept(false) FORCE_INLINE;
607 void __fastcall pop_nochk_nolk() noexcept(false) FORCE_INLINE;
608 };
609
610 } }
611
612 #include "thread_safe_adaptors_impl.hpp"
613
614 #endif