1 #ifndef JEMALLOC_INTERNAL_TSD_INLINES_H
2 #define JEMALLOC_INTERNAL_TSD_INLINES_H
3
4 #ifndef JEMALLOC_ENABLE_INLINE
5 malloc_tsd_protos(JEMALLOC_ATTR(unused), , tsd_t)
6
7 tsd_t *tsd_fetch_impl(bool init);
8 tsd_t *tsd_fetch(void);
9 tsdn_t *tsd_tsdn(tsd_t *tsd);
10 bool tsd_nominal(tsd_t *tsd);
11 #define O(n, t, c) \
12 t *tsd_##n##p_get(tsd_t *tsd); \
13 t tsd_##n##_get(tsd_t *tsd); \
14 void tsd_##n##_set(tsd_t *tsd, t n);
15 MALLOC_TSD
16 #undef O
17 tsdn_t *tsdn_fetch(void);
18 bool tsdn_null(const tsdn_t *tsdn);
19 tsd_t *tsdn_tsd(tsdn_t *tsdn);
20 rtree_ctx_t *tsdn_rtree_ctx(tsdn_t *tsdn, rtree_ctx_t *fallback);
21 #endif
22
23 #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_TSD_C_))
24 malloc_tsd_externs(, tsd_t)
25 malloc_tsd_funcs(JEMALLOC_ALWAYS_INLINE, , tsd_t, tsd_initializer, tsd_cleanup)
26
27 JEMALLOC_ALWAYS_INLINE tsd_t *
tsd_fetch_impl(bool init)28 tsd_fetch_impl(bool init)
29 {
30 tsd_t *tsd = tsd_get(init);
31
32 if (!init && tsd_get_allocates() && tsd == NULL)
33 return (NULL);
34 assert(tsd != NULL);
35
36 if (unlikely(tsd->state != tsd_state_nominal)) {
37 if (tsd->state == tsd_state_uninitialized) {
38 tsd->state = tsd_state_nominal;
39 /* Trigger cleanup handler registration. */
40 tsd_set(tsd);
41 } else if (tsd->state == tsd_state_purgatory) {
42 tsd->state = tsd_state_reincarnated;
43 tsd_set(tsd);
44 } else
45 assert(tsd->state == tsd_state_reincarnated);
46 }
47
48 return (tsd);
49 }
50
51 JEMALLOC_ALWAYS_INLINE tsd_t *
tsd_fetch(void)52 tsd_fetch(void)
53 {
54 return (tsd_fetch_impl(true));
55 }
56
57 JEMALLOC_ALWAYS_INLINE tsdn_t *
tsd_tsdn(tsd_t * tsd)58 tsd_tsdn(tsd_t *tsd)
59 {
60 return ((tsdn_t *)tsd);
61 }
62
63 JEMALLOC_INLINE bool
tsd_nominal(tsd_t * tsd)64 tsd_nominal(tsd_t *tsd)
65 {
66 return (tsd->state == tsd_state_nominal);
67 }
68
69 #define O(n, t, c) \
70 JEMALLOC_ALWAYS_INLINE t * \
71 tsd_##n##p_get(tsd_t *tsd) \
72 { \
73 return (&tsd->n); \
74 } \
75 \
76 JEMALLOC_ALWAYS_INLINE t \
77 tsd_##n##_get(tsd_t *tsd) \
78 { \
79 return (*tsd_##n##p_get(tsd)); \
80 } \
81 \
82 JEMALLOC_ALWAYS_INLINE void \
83 tsd_##n##_set(tsd_t *tsd, t n) \
84 { \
85 assert(tsd->state == tsd_state_nominal); \
86 tsd->n = n; \
87 }
88 MALLOC_TSD
89 #undef O
90
91 JEMALLOC_ALWAYS_INLINE tsdn_t *
tsdn_fetch(void)92 tsdn_fetch(void)
93 {
94 if (!tsd_booted_get())
95 return (NULL);
96
97 return (tsd_tsdn(tsd_fetch_impl(false)));
98 }
99
100 JEMALLOC_ALWAYS_INLINE bool
tsdn_null(const tsdn_t * tsdn)101 tsdn_null(const tsdn_t *tsdn)
102 {
103 return (tsdn == NULL);
104 }
105
106 JEMALLOC_ALWAYS_INLINE tsd_t *
tsdn_tsd(tsdn_t * tsdn)107 tsdn_tsd(tsdn_t *tsdn)
108 {
109 assert(!tsdn_null(tsdn));
110
111 return (&tsdn->tsd);
112 }
113
114 JEMALLOC_ALWAYS_INLINE rtree_ctx_t *
tsdn_rtree_ctx(tsdn_t * tsdn,rtree_ctx_t * fallback)115 tsdn_rtree_ctx(tsdn_t *tsdn, rtree_ctx_t *fallback)
116 {
117 /*
118 * If tsd cannot be accessed, initialize the fallback rtree_ctx and
119 * return a pointer to it.
120 */
121 if (unlikely(tsdn_null(tsdn))) {
122 static const rtree_ctx_t rtree_ctx = RTREE_CTX_INITIALIZER;
123 memcpy(fallback, &rtree_ctx, sizeof(rtree_ctx_t));
124 return (fallback);
125 }
126 return (tsd_rtree_ctxp_get(tsdn_tsd(tsdn)));
127 }
128 #endif
129
130 #endif /* JEMALLOC_INTERNAL_TSD_INLINES_H */
131