ARGOBOTS  dce6e727ffc4ca5b3ffc04cb9517c6689be51ec5
abti_mem.h
Go to the documentation of this file.
1 /* -*- Mode: C; c-basic-offset:4 ; indent-tabs-mode:nil ; -*- */
2 /*
3  * See COPYRIGHT in top-level directory.
4  */
5 
6 #ifndef ABTI_MEM_H_INCLUDED
7 #define ABTI_MEM_H_INCLUDED
8 
9 /* Memory allocation */
10 
11 /* Round desc_size up to the cacheline size. The last four bytes will be
12  * used to determine whether the descriptor is allocated externally (i.e.,
13  * malloc()) or taken from a memory pool. */
14 #define ABTI_MEM_POOL_DESC_ELEM_SIZE \
15  ABTU_roundup_size(sizeof(ABTI_ythread), ABT_CONFIG_STATIC_CACHELINE_SIZE)
16 
17 enum {
18  ABTI_MEM_LP_MALLOC = 0,
19  ABTI_MEM_LP_MMAP_RP,
20  ABTI_MEM_LP_MMAP_HP_RP,
21  ABTI_MEM_LP_MMAP_HP_THP,
22  ABTI_MEM_LP_THP
23 };
24 
25 ABTU_ret_err int ABTI_mem_init(ABTI_global *p_global);
26 ABTU_ret_err int ABTI_mem_init_local(ABTI_global *p_global,
27  ABTI_xstream *p_local_xstream);
28 void ABTI_mem_finalize(ABTI_global *p_global);
29 void ABTI_mem_finalize_local(ABTI_xstream *p_local_xstream);
30 int ABTI_mem_check_lp_alloc(ABTI_global *p_global, int lp_alloc);
31 
32 #define ABTI_STACK_CANARY_VALUE ((uint64_t)0xbaadc0debaadc0de)
33 
34 /* Inline functions */
35 #if ABT_CONFIG_STACK_CHECK_TYPE == ABTI_STACK_CHECK_TYPE_CANARY
36 static inline void ABTI_mem_write_stack_canary(void *p_stack)
37 {
38  /* Write down stack canary. */
39  uint64_t i;
41  i += sizeof(uint64_t)) {
42  ((uint64_t *)p_stack)[i] = ABTI_STACK_CANARY_VALUE;
43  }
44 }
45 
46 static inline void ABTI_mem_check_stack_canary(void *p_stack)
47 {
48  uint64_t i;
50  i += sizeof(uint64_t)) {
51  ABTI_ASSERT(((uint64_t *)p_stack)[i] == ABTI_STACK_CANARY_VALUE);
52  }
53 }
54 #endif
55 
56 /* p_stack can be NULL. */
57 static inline void ABTI_mem_register_stack(const ABTI_global *p_global,
58  void *p_stacktop, size_t stacksize,
59  ABT_bool mprotect_if_needed)
60 {
61  void *p_stack = (void *)(((char *)p_stacktop) - stacksize);
62  if (mprotect_if_needed) {
63  if (p_global->stack_guard_kind == ABTI_STACK_GUARD_MPROTECT ||
64  p_global->stack_guard_kind == ABTI_STACK_GUARD_MPROTECT_STRICT) {
65  if (p_stack) {
66  int abt_errno =
68  p_global->sys_page_size),
69  p_global->sys_page_size, ABT_TRUE);
70  if (p_global->stack_guard_kind ==
71  ABTI_STACK_GUARD_MPROTECT_STRICT) {
72  ABTI_ASSERT(abt_errno == ABT_SUCCESS);
73  }
74  }
75  } else {
76 #if ABT_CONFIG_STACK_CHECK_TYPE == ABTI_STACK_CHECK_TYPE_CANARY
77  if (p_stack) {
78  ABTI_mem_write_stack_canary(p_stack);
79  }
80 #endif
81  }
82  } else {
83 #if ABT_CONFIG_STACK_CHECK_TYPE == ABTI_STACK_CHECK_TYPE_CANARY
84  if (!(p_global->stack_guard_kind == ABTI_STACK_GUARD_MPROTECT ||
85  p_global->stack_guard_kind == ABTI_STACK_GUARD_MPROTECT_STRICT) &&
86  p_stack) {
87  ABTI_mem_write_stack_canary(p_stack);
88  }
89 #endif
90  }
91  ABTI_VALGRIND_REGISTER_STACK(p_stack, stacksize);
92 }
93 
94 static inline void ABTI_mem_unregister_stack(const ABTI_global *p_global,
95  void *p_stacktop, size_t stacksize,
96  ABT_bool mprotect_if_needed)
97 {
98  void *p_stack = (void *)(((char *)p_stacktop) - stacksize);
99  if (mprotect_if_needed) {
100  if (p_global->stack_guard_kind == ABTI_STACK_GUARD_MPROTECT ||
101  p_global->stack_guard_kind == ABTI_STACK_GUARD_MPROTECT_STRICT) {
102  if (p_stack) {
103  int abt_errno =
105  p_global->sys_page_size),
106  p_global->sys_page_size, ABT_FALSE);
107  /* This should not fail since otherwise we cannot free this
108  * memory. */
109  ABTI_ASSERT(abt_errno == ABT_SUCCESS);
110  }
111  } else {
112 #if ABT_CONFIG_STACK_CHECK_TYPE == ABTI_STACK_CHECK_TYPE_CANARY
113  if (p_stack) {
114  ABTI_mem_check_stack_canary(p_stack);
115  }
116 #endif
117  }
118  } else {
119 #if ABT_CONFIG_STACK_CHECK_TYPE == ABTI_STACK_CHECK_TYPE_CANARY
120  if (!(p_global->stack_guard_kind == ABTI_STACK_GUARD_MPROTECT ||
121  p_global->stack_guard_kind == ABTI_STACK_GUARD_MPROTECT_STRICT) &&
122  p_stack) {
123  ABTI_mem_check_stack_canary(p_stack);
124  }
125 #endif
126  }
127  ABTI_VALGRIND_UNREGISTER_STACK(p_stack);
128 }
129 
130 ABTU_ret_err static inline int ABTI_mem_alloc_nythread(ABTI_local *p_local,
131  ABTI_thread **pp_thread)
132 {
133  ABTI_STATIC_ASSERT(sizeof(ABTI_thread) <= ABTI_MEM_POOL_DESC_ELEM_SIZE);
134  ABTI_thread *p_thread;
135 #ifdef ABT_CONFIG_USE_MEM_POOL
136  ABTI_xstream *p_local_xstream = ABTI_local_get_xstream_or_null(p_local);
137  if (!ABTI_IS_EXT_THREAD_ENABLED || p_local_xstream) {
138  /* It's not called on an external thread. Use a memory pool. */
139  int abt_errno = ABTI_mem_pool_alloc(&p_local_xstream->mem_pool_desc,
140  (void **)&p_thread);
141  ABTI_CHECK_ERROR(abt_errno);
142  p_thread->type = ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC;
143  } else
144 #endif
145  {
146  int abt_errno =
147  ABTU_malloc(ABTI_MEM_POOL_DESC_ELEM_SIZE, (void **)&p_thread);
148  ABTI_CHECK_ERROR(abt_errno);
149  p_thread->type = ABTI_THREAD_TYPE_MEM_MALLOC_DESC;
150  }
151  *pp_thread = p_thread;
152  return ABT_SUCCESS;
153 }
154 
155 static inline void ABTI_mem_free_nythread_mempool_impl(ABTI_global *p_global,
156  ABTI_local *p_local,
157  ABTI_thread *p_thread)
158 {
159  /* Return a descriptor. */
160 #ifdef ABT_CONFIG_USE_MEM_POOL
161  ABTI_xstream *p_local_xstream = ABTI_local_get_xstream_or_null(p_local);
162 #ifdef ABT_CONFIG_DISABLE_EXT_THREAD
163  /* Came from a memory pool. */
164  ABTI_mem_pool_free(&p_local_xstream->mem_pool_desc, p_thread);
165 #else
166  if (p_local_xstream) {
167  /* Came from a memory pool. */
168  ABTI_mem_pool_free(&p_local_xstream->mem_pool_desc, p_thread);
169  } else {
170  /* Return a stack to the global pool. */
171  ABTD_spinlock_acquire(&p_global->mem_pool_desc_lock);
172  ABTI_mem_pool_free(&p_global->mem_pool_desc_ext, p_thread);
173  ABTD_spinlock_release(&p_global->mem_pool_desc_lock);
174  }
175 #endif
176 #else /* !ABT_CONFIG_USE_MEM_POOL */
177  /* If a memory pool is disabled, this function should not be called. */
178  ABTI_ASSERT(0);
179 #endif
180 }
181 
182 ABTU_ret_err static inline int
183 ABTI_mem_alloc_ythread_desc_impl(ABTI_local *p_local, ABT_bool use_lazy_stack,
184  ABTI_ythread **pp_ythread)
185 {
186  ABTI_STATIC_ASSERT(sizeof(ABTI_ythread) <= ABTI_MEM_POOL_DESC_ELEM_SIZE);
187  ABTI_ythread *p_ythread;
188 #ifdef ABT_CONFIG_USE_MEM_POOL
189  ABTI_xstream *p_local_xstream = ABTI_local_get_xstream_or_null(p_local);
190  if (!ABTI_IS_EXT_THREAD_ENABLED || p_local_xstream) {
191  /* It's not called on an external thread. Use a memory pool. */
192  int abt_errno = ABTI_mem_pool_alloc(&p_local_xstream->mem_pool_desc,
193  (void **)&p_ythread);
194  ABTI_CHECK_ERROR(abt_errno);
195  p_ythread->thread.type =
196  use_lazy_stack
197  ? ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC_MEMPOOL_LAZY_STACK
198  : ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC;
199  } else
200 #endif
201  {
202  int abt_errno =
203  ABTU_malloc(ABTI_MEM_POOL_DESC_ELEM_SIZE, (void **)&p_ythread);
204  ABTI_CHECK_ERROR(abt_errno);
205  p_ythread->thread.type =
206  use_lazy_stack ? ABTI_THREAD_TYPE_MEM_MALLOC_DESC_MEMPOOL_LAZY_STACK
207  : ABTI_THREAD_TYPE_MEM_MALLOC_DESC;
208  }
209  *pp_ythread = p_ythread;
210  return ABT_SUCCESS;
211 }
212 
213 static inline void ABTI_mem_free_ythread_desc_mempool_impl(
214  ABTI_global *p_global, ABTI_local *p_local, ABTI_ythread *p_ythread)
215 {
216  /* Return a descriptor. */
217 #ifdef ABT_CONFIG_USE_MEM_POOL
218  ABTI_xstream *p_local_xstream = ABTI_local_get_xstream_or_null(p_local);
219 #ifdef ABT_CONFIG_DISABLE_EXT_THREAD
220  /* Came from a memory pool. */
221  ABTI_mem_pool_free(&p_local_xstream->mem_pool_desc, p_ythread);
222 #else
223  if (p_local_xstream) {
224  /* Came from a memory pool. */
225  ABTI_mem_pool_free(&p_local_xstream->mem_pool_desc, p_ythread);
226  } else {
227  /* Return a stack to the global pool. */
228  ABTD_spinlock_acquire(&p_global->mem_pool_desc_lock);
229  ABTI_mem_pool_free(&p_global->mem_pool_desc_ext, p_ythread);
230  ABTD_spinlock_release(&p_global->mem_pool_desc_lock);
231  }
232 #endif
233 #else /* !ABT_CONFIG_USE_MEM_POOL */
234  /* If a memory pool is disabled, this function should not be called. */
235  ABTI_ASSERT(0);
236 #endif
237 }
238 
239 #ifdef ABT_CONFIG_USE_MEM_POOL
240 ABTU_ret_err static inline int ABTI_mem_alloc_ythread_mempool_desc_stack_impl(
241  ABTI_mem_pool_local_pool *p_mem_pool_stack, size_t stacksize,
242  ABTI_ythread **pp_ythread, void **pp_stacktop)
243 {
244  /* stacksize must be a multiple of ABT_CONFIG_STATIC_CACHELINE_SIZE. */
245  ABTI_ASSERT((stacksize & (ABT_CONFIG_STATIC_CACHELINE_SIZE - 1)) == 0);
246  void *p_ythread;
247  int abt_errno = ABTI_mem_pool_alloc(p_mem_pool_stack, &p_ythread);
248  ABTI_CHECK_ERROR(abt_errno);
249 
250  *pp_stacktop = (void *)p_ythread;
251  *pp_ythread = (ABTI_ythread *)p_ythread;
252  return ABT_SUCCESS;
253 }
254 #endif
255 
256 ABTU_ret_err static inline int ABTI_mem_alloc_ythread_malloc_desc_stack_impl(
257  size_t stacksize, ABTI_ythread **pp_ythread, void **pp_stacktop)
258 {
259  /* stacksize must be a multiple of ABT_CONFIG_STATIC_CACHELINE_SIZE. */
260  size_t alloc_stacksize =
262  char *p_stack;
263  int abt_errno =
264  ABTU_malloc(alloc_stacksize + sizeof(ABTI_ythread), (void **)&p_stack);
265  ABTI_CHECK_ERROR(abt_errno);
266 
267  *pp_stacktop = (void *)(p_stack + alloc_stacksize);
268  *pp_ythread = (ABTI_ythread *)(p_stack + alloc_stacksize);
269  return ABT_SUCCESS;
270 }
271 
272 ABTU_ret_err static inline int
273 ABTI_mem_alloc_ythread_mempool_desc_stack(ABTI_global *p_global,
274  ABTI_local *p_local, size_t stacksize,
275  ABTI_ythread **pp_ythread)
276 {
277  ABTI_UB_ASSERT(stacksize == p_global->thread_stacksize);
278  ABTI_ythread *p_ythread;
279 #ifdef ABT_CONFIG_USE_MEM_POOL
280 #ifdef ABT_CONFIG_DISABLE_LAZY_STACK_ALLOC
281  const ABT_bool use_lazy_stack = ABT_FALSE;
282 #else
283  const ABT_bool use_lazy_stack = ABT_TRUE;
284 #endif
285  if (use_lazy_stack) {
286  /* Only allocate a descriptor here. */
287  int abt_errno =
288  ABTI_mem_alloc_ythread_desc_impl(p_local, ABT_TRUE, &p_ythread);
289  ABTI_CHECK_ERROR(abt_errno);
290  /* Initialize the context. */
291  ABTD_ythread_context_init_lazy(&p_ythread->ctx, stacksize);
292  *pp_ythread = p_ythread;
293  return ABT_SUCCESS;
294  } else {
295  void *p_stacktop;
296  /* Allocate a ULT stack and a descriptor together. */
297  ABTI_xstream *p_local_xstream = ABTI_local_get_xstream_or_null(p_local);
298  if (!ABTI_IS_EXT_THREAD_ENABLED || p_local_xstream) {
299  int abt_errno = ABTI_mem_alloc_ythread_mempool_desc_stack_impl(
300  &p_local_xstream->mem_pool_stack, stacksize, &p_ythread,
301  &p_stacktop);
302  ABTI_CHECK_ERROR(abt_errno);
303  p_ythread->thread.type = ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC_STACK;
304  ABTI_mem_register_stack(p_global, p_stacktop, stacksize, ABT_FALSE);
305  } else {
306  /* If an external thread allocates a stack, we use ABTU_malloc. */
307  int abt_errno =
308  ABTI_mem_alloc_ythread_malloc_desc_stack_impl(stacksize,
309  &p_ythread,
310  &p_stacktop);
311  ABTI_CHECK_ERROR(abt_errno);
312  p_ythread->thread.type = ABTI_THREAD_TYPE_MEM_MALLOC_DESC_STACK;
313  ABTI_mem_register_stack(p_global, p_stacktop, stacksize, ABT_TRUE);
314  }
315  /* Initialize the context. */
316  ABTD_ythread_context_init(&p_ythread->ctx, p_stacktop, stacksize);
317  *pp_ythread = p_ythread;
318  return ABT_SUCCESS;
319  }
320 #else
321  void *p_stacktop;
322  int abt_errno =
323  ABTI_mem_alloc_ythread_malloc_desc_stack_impl(stacksize, &p_ythread,
324  &p_stacktop);
325  ABTI_CHECK_ERROR(abt_errno);
326  p_ythread->thread.type = ABTI_THREAD_TYPE_MEM_MALLOC_DESC_STACK;
327  ABTI_mem_register_stack(p_global, p_stacktop, stacksize, ABT_TRUE);
328  /* Initialize the context. */
329  ABTD_ythread_context_init(&p_ythread->ctx, p_stacktop, stacksize);
330  *pp_ythread = p_ythread;
331  return ABT_SUCCESS;
332 #endif
333 }
334 
335 ABTU_ret_err static inline int
336 ABTI_mem_alloc_ythread_default(ABTI_global *p_global, ABTI_local *p_local,
337  ABTI_ythread **pp_ythread)
338 {
339  size_t stacksize = p_global->thread_stacksize;
340  return ABTI_mem_alloc_ythread_mempool_desc_stack(p_global, p_local,
341  stacksize, pp_ythread);
342 }
343 
344 ABTU_ret_err static inline int ABTI_mem_alloc_ythread_malloc_desc_stack(
345  ABTI_global *p_global, size_t stacksize, ABTI_ythread **pp_ythread)
346 {
347  ABTI_ythread *p_ythread;
348  void *p_stacktop;
349  int abt_errno =
350  ABTI_mem_alloc_ythread_malloc_desc_stack_impl(stacksize, &p_ythread,
351  &p_stacktop);
352  ABTI_CHECK_ERROR(abt_errno);
353 
354  /* Initialize the context. */
355  p_ythread->thread.type = ABTI_THREAD_TYPE_MEM_MALLOC_DESC_STACK;
356  ABTD_ythread_context_init(&p_ythread->ctx, p_stacktop, stacksize);
357  ABTI_mem_register_stack(p_global, p_stacktop, stacksize, ABT_TRUE);
358  *pp_ythread = p_ythread;
359  return ABT_SUCCESS;
360 }
361 
362 ABTU_ret_err static inline int
363 ABTI_mem_alloc_ythread_mempool_desc(ABTI_global *p_global, ABTI_local *p_local,
364  size_t stacksize, void *p_stacktop,
365  ABTI_ythread **pp_ythread)
366 {
367  ABTI_ythread *p_ythread;
368 
369  /* Use a descriptor pool for ABT_ythread. */
370  ABTI_STATIC_ASSERT(sizeof(ABTI_ythread) <= ABTI_MEM_POOL_DESC_ELEM_SIZE);
371  ABTI_STATIC_ASSERT(offsetof(ABTI_ythread, thread) == 0);
372  int abt_errno =
373  ABTI_mem_alloc_nythread(p_local, (ABTI_thread **)&p_ythread);
374  ABTI_CHECK_ERROR(abt_errno);
375  /* Initialize the context. */
376  ABTD_ythread_context_init(&p_ythread->ctx, p_stacktop, stacksize);
377  ABTI_mem_register_stack(p_global, p_stacktop, stacksize, ABT_TRUE);
378  *pp_ythread = p_ythread;
379  return ABT_SUCCESS;
380 }
381 
382 static inline void ABTI_mem_free_thread(ABTI_global *p_global,
383  ABTI_local *p_local,
384  ABTI_thread *p_thread)
385 {
386  /* Return stack. */
387 #ifdef ABT_CONFIG_USE_MEM_POOL
388  if (p_thread->type & ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC_STACK) {
389  ABTI_ythread *p_ythread = ABTI_thread_get_ythread(p_thread);
390  ABTI_mem_unregister_stack(p_global,
391  ABTD_ythread_context_get_stacktop(
392  &p_ythread->ctx),
393  ABTD_ythread_context_get_stacksize(
394  &p_ythread->ctx),
395  ABT_FALSE);
396 
397  ABTI_xstream *p_local_xstream = ABTI_local_get_xstream_or_null(p_local);
398  /* Came from a memory pool. */
399 #ifndef ABT_CONFIG_DISABLE_EXT_THREAD
400  if (p_local_xstream == NULL) {
401  /* Return a stack to the global pool. */
402  ABTD_spinlock_acquire(&p_global->mem_pool_stack_lock);
403  ABTI_mem_pool_free(&p_global->mem_pool_stack_ext, p_ythread);
404  ABTD_spinlock_release(&p_global->mem_pool_stack_lock);
405  return;
406  }
407 #endif
408  ABTI_mem_pool_free(&p_local_xstream->mem_pool_stack, p_ythread);
409  } else
410 #endif
411  if (p_thread->type &
412  ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC_MEMPOOL_LAZY_STACK) {
413  ABTI_ythread *p_ythread = ABTI_thread_get_ythread(p_thread);
414  /* If it is a lazy stack ULT, it should not have a stack. */
415  ABTI_UB_ASSERT(!ABTD_ythread_context_has_stack(&p_ythread->ctx));
416  ABTI_mem_free_ythread_desc_mempool_impl(p_global, p_local, p_ythread);
417  } else if (p_thread->type & ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC) {
418  /* Non-yieldable thread or yieldable thread without stack. */
419  ABTI_ythread *p_ythread = ABTI_thread_get_ythread_or_null(p_thread);
420  if (p_ythread) {
421  ABTI_mem_unregister_stack(p_global,
422  ABTD_ythread_context_get_stacktop(
423  &p_ythread->ctx),
424  ABTD_ythread_context_get_stacksize(
425  &p_ythread->ctx),
426  ABT_TRUE);
427  ABTI_mem_free_ythread_desc_mempool_impl(p_global, p_local,
428  p_ythread);
429  } else {
430  ABTI_mem_free_nythread_mempool_impl(p_global, p_local, p_thread);
431  }
432  } else if (p_thread->type & ABTI_THREAD_TYPE_MEM_MALLOC_DESC_STACK) {
433  ABTI_ythread *p_ythread = ABTI_thread_get_ythread(p_thread);
434  void *p_stacktop = ABTD_ythread_context_get_stacktop(&p_ythread->ctx);
435  size_t stacksize = ABTD_ythread_context_get_stacksize(&p_ythread->ctx);
436  ABTI_mem_unregister_stack(p_global, p_stacktop, stacksize, ABT_TRUE);
437  void *p_stack = (void *)(((char *)p_stacktop) - stacksize);
438  ABTU_free(p_stack);
439  } else if (p_thread->type &
440  ABTI_THREAD_TYPE_MEM_MALLOC_DESC_MEMPOOL_LAZY_STACK) {
441  ABTI_ythread *p_ythread = ABTI_thread_get_ythread(p_thread);
442  /* If it is a lazy stack ULT, it should not have a stack. */
443  ABTI_UB_ASSERT(!ABTD_ythread_context_has_stack(&p_ythread->ctx));
444  ABTU_free(p_ythread);
445  } else {
446  ABTI_ASSERT(p_thread->type & ABTI_THREAD_TYPE_MEM_MALLOC_DESC);
447  ABTI_STATIC_ASSERT(offsetof(ABTI_ythread, thread) == 0);
448  ABTI_ythread *p_ythread = ABTI_thread_get_ythread_or_null(p_thread);
449  if (p_ythread)
450  ABTI_mem_unregister_stack(p_global,
451  ABTD_ythread_context_get_stacktop(
452  &p_ythread->ctx),
453  ABTD_ythread_context_get_stacksize(
454  &p_ythread->ctx),
455  ABT_TRUE);
456  ABTU_free(p_thread);
457  }
458 }
459 
460 ABTU_ret_err static inline int
461 ABTI_mem_alloc_ythread_mempool_stack(ABTI_xstream *p_local_xstream,
462  ABTI_ythread *p_ythread)
463 {
464 #ifdef ABT_CONFIG_USE_MEM_POOL
465  ABTI_UB_ASSERT(p_ythread->thread.type &
466  (ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC_MEMPOOL_LAZY_STACK |
467  ABTI_THREAD_TYPE_MEM_MALLOC_DESC_MEMPOOL_LAZY_STACK));
468  void *p_stacktop;
469  int abt_errno =
470  ABTI_mem_pool_alloc(&p_local_xstream->mem_pool_stack, &p_stacktop);
471  ABTI_CHECK_ERROR(abt_errno);
472  ABTD_ythread_context_lazy_set_stack(&p_ythread->ctx, p_stacktop);
473  return ABT_SUCCESS;
474 #else
475  /* This function should not be called. */
476  ABTI_ASSERT(0);
477  return 0;
478 #endif
479 }
480 
481 static inline void
482 ABTI_mem_free_ythread_mempool_stack(ABTI_xstream *p_local_xstream,
483  ABTI_ythread *p_ythread)
484 {
485 #ifdef ABT_CONFIG_USE_MEM_POOL
486  ABTI_UB_ASSERT(p_ythread->thread.type &
487  (ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC_MEMPOOL_LAZY_STACK |
488  ABTI_THREAD_TYPE_MEM_MALLOC_DESC_MEMPOOL_LAZY_STACK));
489  void *p_stacktop = ABTD_ythread_context_get_stacktop(&p_ythread->ctx);
490  ABTD_ythread_context_lazy_unset_stack(&p_ythread->ctx);
491  ABTI_mem_pool_free(&p_local_xstream->mem_pool_stack, p_stacktop);
492 #else
493  /* This function should not be called. */
494  ABTI_ASSERT(0);
495 #endif
496 }
497 
498 /* Generic scalable memory pools. It uses a memory pool for ABTI_thread.
499  * The last four bytes will be used to determine whether the descriptor is
500  * allocated externally (i.e., malloc()) or taken from a memory pool. */
501 #define ABTI_MEM_POOL_DESC_SIZE (ABTI_MEM_POOL_DESC_ELEM_SIZE - 4)
502 
503 ABTU_ret_err static inline int ABTI_mem_alloc_desc(ABTI_local *p_local,
504  void **pp_desc)
505 {
506 #ifndef ABT_CONFIG_USE_MEM_POOL
507  return ABTU_malloc(ABTI_MEM_POOL_DESC_SIZE, pp_desc);
508 #else
509  void *p_desc;
510  ABTI_xstream *p_local_xstream = ABTI_local_get_xstream_or_null(p_local);
511  if (ABTI_IS_EXT_THREAD_ENABLED && p_local_xstream == NULL) {
512  /* For external threads */
513  int abt_errno = ABTU_malloc(ABTI_MEM_POOL_DESC_SIZE, &p_desc);
514  ABTI_CHECK_ERROR(abt_errno);
515  *(uint32_t *)(((char *)p_desc) + ABTI_MEM_POOL_DESC_SIZE) = 1;
516  *pp_desc = p_desc;
517  return ABT_SUCCESS;
518  } else {
519  /* Find the page that has an empty block */
520  int abt_errno =
521  ABTI_mem_pool_alloc(&p_local_xstream->mem_pool_desc, &p_desc);
522  ABTI_CHECK_ERROR(abt_errno);
523  /* To distinguish it from a malloc'ed case, assign non-NULL value. */
524  *(uint32_t *)(((char *)p_desc) + ABTI_MEM_POOL_DESC_SIZE) = 0;
525  *pp_desc = p_desc;
526  return ABT_SUCCESS;
527  }
528 #endif
529 }
530 
531 static inline void ABTI_mem_free_desc(ABTI_global *p_global,
532  ABTI_local *p_local, void *p_desc)
533 {
534 #ifndef ABT_CONFIG_USE_MEM_POOL
535  ABTU_free(p_desc);
536 #else
537  ABTI_xstream *p_local_xstream = ABTI_local_get_xstream_or_null(p_local);
538 #ifndef ABT_CONFIG_DISABLE_EXT_THREAD
539  if (*(uint32_t *)(((char *)p_desc) + ABTI_MEM_POOL_DESC_SIZE)) {
540  /* This was allocated by an external thread. */
541  ABTU_free(p_desc);
542  return;
543  } else if (!p_local_xstream) {
544  /* Return a stack and a descriptor to their global pools. */
545  ABTD_spinlock_acquire(&p_global->mem_pool_desc_lock);
546  ABTI_mem_pool_free(&p_global->mem_pool_desc_ext, p_desc);
547  ABTD_spinlock_release(&p_global->mem_pool_desc_lock);
548  return;
549  }
550 #endif
551  ABTI_mem_pool_free(&p_local_xstream->mem_pool_desc, p_desc);
552 #endif
553 }
554 
555 #endif /* ABTI_MEM_H_INCLUDED */
ABT_bool
int ABT_bool
Boolean type.
Definition: abt.h:1043
ABTU_roundup_ptr
static void * ABTU_roundup_ptr(void *ptr, size_t multiple)
Definition: abtu.h:105
ABTU_roundup_size
static size_t ABTU_roundup_size(size_t val, size_t multiple)
Definition: abtu.h:95
ABTU_mprotect
ABTU_ret_err int ABTU_mprotect(void *addr, size_t size, ABT_bool protect)
Definition: mprotect.c:9
ABTU_malloc
static ABTU_ret_err int ABTU_malloc(size_t size, void **p_ptr)
Definition: abtu.h:235
ABTU_roundup_uint64
static uint64_t ABTU_roundup_uint64(uint64_t val, uint64_t multiple)
Definition: abtu.h:85
ABT_CONFIG_STATIC_CACHELINE_SIZE
#define ABT_CONFIG_STATIC_CACHELINE_SIZE
Definition: abt_config.h:81
ABT_SUCCESS
#define ABT_SUCCESS
Error code: the routine returns successfully.
Definition: abt.h:92
ABTU_ret_err
#define ABTU_ret_err
Definition: abtu.h:155
ABT_TRUE
#define ABT_TRUE
True constant for ABT_bool.
Definition: abt.h:784
ABT_FALSE
#define ABT_FALSE
False constant for ABT_bool.
Definition: abt.h:786
ABTU_free
static void ABTU_free(void *ptr)
Definition: abtu.h:228
ABT_CONFIG_STACK_CHECK_CANARY_SIZE
#define ABT_CONFIG_STACK_CHECK_CANARY_SIZE
Definition: abt_config.h:75