-
Notifications
You must be signed in to change notification settings - Fork 20
/
Copy pathtls.h
171 lines (136 loc) · 4.72 KB
/
tls.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
// Copyright (c) 2016-2017 Nuxi, https://nuxi.nl/
//
// SPDX-License-Identifier: BSD-2-Clause
#ifndef COMMON_TLS_H
#define COMMON_TLS_H
#include <assert.h>
#include <cloudabi_types.h>
#include <stdalign.h>
#include <stddef.h>
#if defined(__aarch64__)
#define TLS_VARIANT 1
#define TCB_SIZE 16
// Fetches the TCB from the CPU's registers.
static inline cloudabi_tcb_t *tcb_get(void) {
cloudabi_tcb_t *tcb;
asm volatile("mrs %0, tpidr_el0" : "=r"(tcb));
return tcb;
}
// Changes the TCB in the CPU's registers.
static inline void tcb_set(cloudabi_tcb_t *tcb) {
asm volatile("msr tpidr_el0, %0" : : "r"(tcb));
}
#elif defined(__arm__)
#define TLS_VARIANT 1
#define TCB_SIZE 8
// Fetches the TCB from the CPU's registers.
static inline cloudabi_tcb_t *tcb_get(void) {
cloudabi_tcb_t *tcb;
asm volatile("mrc p15, 0, %0, cr13, cr0, 2" : "=r"(tcb));
return tcb;
}
// Changes the TCB in the CPU's registers.
static inline void tcb_set(cloudabi_tcb_t *tcb) {
asm volatile("mcr p15, 0, %0, cr13, cr0, 2" : : "r"(tcb));
}
#elif defined(__i386__)
#define TLS_VARIANT 2
// Fetches the TCB from the CPU's registers.
static inline cloudabi_tcb_t *tcb_get(void) {
cloudabi_tcb_t *tcb;
asm volatile("mov %%gs:0, %0" : "=r"(tcb));
return tcb;
}
// Changes the TCB in the CPU's registers.
static inline void tcb_set(cloudabi_tcb_t *tcb) {
asm volatile("mov %0, %%gs:0" : : "r"(tcb));
}
#elif defined(__x86_64__)
#define TLS_VARIANT 2
// Fetches the TCB from the CPU's registers.
static inline cloudabi_tcb_t *tcb_get(void) {
cloudabi_tcb_t *tcb;
asm volatile("mov %%fs:0, %0" : "=r"(tcb));
return tcb;
}
// Changes the TCB in the CPU's registers.
static inline void tcb_set(cloudabi_tcb_t *tcb) {
asm volatile("mov %0, %%fs:0" : : "r"(tcb));
}
#else
#error "Unsupported architecture"
#endif
#if TLS_VARIANT == 1
// TLS Variant I: TLS register points to the TCB. The TLS data is stored
// after the TCB. This approach has the disadvantage that the TCB size
// needs to be known.
static_assert(sizeof(cloudabi_tcb_t) <= TCB_SIZE,
"TCB does not fit in reserved space before TLS");
// Computes the total size needed to store a TCB with TLS data.
static inline size_t tls_size(void) {
return TCB_SIZE + __pt_tls_memsz_aligned +
(__pt_tls_align > alignof(cloudabi_tcb_t) ? __pt_tls_align
: sizeof(cloudabi_tcb_t)) -
1;
}
// Computes the address of the TCB in the combined TCB/TLS area.
static inline cloudabi_tcb_t *tcb_addr(char *buf) {
if (alignof(cloudabi_tcb_t) < __pt_tls_align) {
return (
cloudabi_tcb_t *)(__roundup((uintptr_t)buf + TCB_SIZE, __pt_tls_align) -
TCB_SIZE);
} else {
return (cloudabi_tcb_t *)__roundup((uintptr_t)buf, alignof(cloudabi_tcb_t));
}
}
// Computes the address of the TLS data in the combined TCB/TLS area.
static inline char *tls_addr(char *buf) {
return (char *)tcb_addr(buf) + TCB_SIZE;
}
// Fetches the TLS area of the currently running thread.
static inline char *tls_get(void) {
return (char *)tcb_get() + TCB_SIZE;
}
#elif TLS_VARIANT == 2
// TLS Variant II: TLS register points to the TCB. The TLS data is
// stored before the TCB. This approach has the advantage that the TCB
// size does not need to be known.
// Computes the total size needed to store a TCB with TLS data.
static inline size_t tls_size(void) {
return __pt_tls_memsz_aligned + sizeof(cloudabi_tcb_t) +
(__pt_tls_align > alignof(cloudabi_tcb_t) ? __pt_tls_align
: sizeof(cloudabi_tcb_t)) -
1;
}
// Computes the address of the TLS data in the combined TCB/TLS area.
static inline char *tls_addr(char *buf) {
if (alignof(cloudabi_tcb_t) < __pt_tls_align) {
return (char *)(__roundup((uintptr_t)buf, __pt_tls_align));
} else {
return (char *)(__roundup((uintptr_t)buf + __pt_tls_memsz_aligned,
alignof(cloudabi_tcb_t)) -
__pt_tls_memsz_aligned);
}
}
// Computes the address of the TCB in the combined TCB/TLS area.
static inline cloudabi_tcb_t *tcb_addr(char *buf) {
return (cloudabi_tcb_t *)(tls_addr(buf) + __pt_tls_memsz_aligned);
}
// Fetches the TLS area of the currently running thread.
static inline char *tls_get(void) {
return (char *)tcb_get() - __pt_tls_memsz_aligned;
}
#else
#error "Unknown TLS variant"
#endif
// Changes the CPU's registers to point to a new TLS area.
//
// This function ensures that the TCB of the old TLS area is copied into
// the new TLS area. This ensures that the runtime (kernel, emulator,
// etc) still has access to its own private data.
static inline void tls_replace(char *buf) {
cloudabi_tcb_t *tcb = tcb_addr(buf);
*tcb = *tcb_get();
tcb_set(tcb);
}
#endif