]>
Commit | Line | Data |
---|---|---|
4b45943a | 1 | /* elide.h: Generic lock elision support for powerpc. |
49a9f6ab | 2 | Copyright (C) 2015 Free Software Foundation, Inc. |
4b45943a AZ |
3 | This file is part of the GNU C Library. |
4 | ||
5 | The GNU C Library is free software; you can redistribute it and/or | |
6 | modify it under the terms of the GNU Lesser General Public | |
7 | License as published by the Free Software Foundation; either | |
8 | version 2.1 of the License, or (at your option) any later version. | |
9 | ||
10 | The GNU C Library is distributed in the hope that it will be useful, | |
11 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
12 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
13 | Lesser General Public License for more details. | |
14 | ||
15 | You should have received a copy of the GNU Lesser General Public | |
16 | License along with the GNU C Library; if not, see | |
17 | <http://www.gnu.org/licenses/>. */ | |
18 | ||
19 | #ifndef ELIDE_PPC_H | |
20 | # define ELIDE_PPC_H | |
21 | ||
22 | #ifdef ENABLE_LOCK_ELISION | |
23 | # include <htm.h> | |
24 | # include <elision-conf.h> | |
25 | ||
6ec52bf6 TMQMF |
26 | /* Get the new value of adapt_count according to the elision |
27 | configurations. Returns true if the system should retry again or false | |
28 | otherwise. */ | |
4b45943a | 29 | static inline bool |
6ec52bf6 | 30 | __get_new_count (uint8_t *adapt_count) |
4b45943a | 31 | { |
6ec52bf6 TMQMF |
32 | /* A persistent failure indicates that a retry will probably |
33 | result in another failure. Use normal locking now and | |
34 | for the next couple of calls. */ | |
35 | if (_TEXASRU_FAILURE_PERSISTENT (__builtin_get_texasru ())) | |
4b45943a | 36 | { |
6ec52bf6 TMQMF |
37 | if (__elision_aconf.skip_lock_internal_abort > 0) |
38 | *adapt_count = __elision_aconf.skip_lock_internal_abort; | |
4b45943a AZ |
39 | return false; |
40 | } | |
6ec52bf6 TMQMF |
41 | /* Same logic as above, but for a number of temporary failures in a |
42 | a row. */ | |
43 | else if (__elision_aconf.skip_lock_out_of_tbegin_retries > 0 | |
44 | && __elision_aconf.try_tbegin > 0) | |
45 | *adapt_count = __elision_aconf.skip_lock_out_of_tbegin_retries; | |
46 | return true; | |
4b45943a AZ |
47 | } |
48 | ||
6ec52bf6 TMQMF |
49 | /* CONCURRENCY NOTES: |
50 | ||
51 | The evaluation of the macro expression is_lock_free encompasses one or | |
52 | more loads from memory locations that are concurrently modified by other | |
53 | threads. For lock elision to work, this evaluation and the rest of the | |
54 | critical section protected by the lock must be atomic because an | |
55 | execution with lock elision must be equivalent to an execution in which | |
56 | the lock would have been actually acquired and released. Therefore, we | |
57 | evaluate is_lock_free inside of the transaction that represents the | |
58 | critical section for which we want to use lock elision, which ensures | |
59 | the atomicity that we require. */ | |
60 | ||
61 | /* Returns 0 if the lock defined by is_lock_free was elided. | |
62 | ADAPT_COUNT is a per-lock state variable. */ | |
63 | # define ELIDE_LOCK(adapt_count, is_lock_free) \ | |
64 | ({ \ | |
65 | int ret = 0; \ | |
66 | if (adapt_count > 0) \ | |
67 | (adapt_count)--; \ | |
68 | else \ | |
69 | for (int i = __elision_aconf.try_tbegin; i > 0; i--) \ | |
70 | { \ | |
71 | if (__builtin_tbegin (0)) \ | |
72 | { \ | |
73 | if (is_lock_free) \ | |
74 | { \ | |
75 | ret = 1; \ | |
76 | break; \ | |
77 | } \ | |
78 | __builtin_tabort (_ABORT_LOCK_BUSY); \ | |
79 | } \ | |
80 | else \ | |
81 | if (!__get_new_count(&adapt_count)) \ | |
82 | break; \ | |
83 | } \ | |
84 | ret; \ | |
85 | }) | |
4b45943a AZ |
86 | |
87 | # define ELIDE_TRYLOCK(adapt_count, is_lock_free, write) \ | |
6ec52bf6 TMQMF |
88 | ({ \ |
89 | int ret = 0; \ | |
90 | if (__elision_aconf.try_tbegin > 0) \ | |
91 | { \ | |
92 | if (write) \ | |
93 | __builtin_tabort (_ABORT_NESTED_TRYLOCK); \ | |
94 | ret = ELIDE_LOCK (adapt_count, is_lock_free); \ | |
95 | } \ | |
96 | ret; \ | |
97 | }) | |
4b45943a AZ |
98 | |
99 | ||
100 | static inline bool | |
101 | __elide_unlock (int is_lock_free) | |
102 | { | |
103 | if (is_lock_free) | |
104 | { | |
105 | __builtin_tend (0); | |
106 | return true; | |
107 | } | |
108 | return false; | |
109 | } | |
110 | ||
111 | # define ELIDE_UNLOCK(is_lock_free) \ | |
112 | __elide_unlock (is_lock_free) | |
113 | ||
114 | # else | |
115 | ||
116 | # define ELIDE_LOCK(adapt_count, is_lock_free) 0 | |
117 | # define ELIDE_TRYLOCK(adapt_count, is_lock_free, write) 0 | |
118 | # define ELIDE_UNLOCK(is_lock_free) 0 | |
119 | ||
120 | #endif /* ENABLE_LOCK_ELISION */ | |
121 | ||
122 | #endif |