Commit 08970fc4e0385790a7b093adfaa4165a189f9eb0
1 parent
9ea2b82ed6
Exists in
master
and in
7 other branches
x86: msr: fix bogus return values from rdmsr_safe/wrmsr_safe
Impact: bogus error codes (+other?) on x86-64 The rdmsr_safe/wrmsr_safe routines have macros for the handling of the edx:eax arguments. Those macros take a variable number of assembly arguments. This is rather inherently incompatible with using %digit-style escapes in the inline assembly; replace those with %[name]-style escapes. This fixes miscompilation on x86-64, which at the very least caused bogus return values. It is possible that this could also corrupt the return value; I am not sure. Signed-off-by: H. Peter Anvin <hpa@zytor.com>
Showing 1 changed file with 8 additions and 8 deletions Inline Diff
include/asm-x86/msr.h
1 | #ifndef __ASM_X86_MSR_H_ | 1 | #ifndef __ASM_X86_MSR_H_ |
2 | #define __ASM_X86_MSR_H_ | 2 | #define __ASM_X86_MSR_H_ |
3 | 3 | ||
4 | #include <asm/msr-index.h> | 4 | #include <asm/msr-index.h> |
5 | 5 | ||
6 | #ifndef __ASSEMBLY__ | 6 | #ifndef __ASSEMBLY__ |
7 | # include <linux/types.h> | 7 | # include <linux/types.h> |
8 | #endif | 8 | #endif |
9 | 9 | ||
10 | #ifdef __KERNEL__ | 10 | #ifdef __KERNEL__ |
11 | #ifndef __ASSEMBLY__ | 11 | #ifndef __ASSEMBLY__ |
12 | 12 | ||
13 | #include <asm/asm.h> | 13 | #include <asm/asm.h> |
14 | #include <asm/errno.h> | 14 | #include <asm/errno.h> |
15 | 15 | ||
16 | static inline unsigned long long native_read_tscp(unsigned int *aux) | 16 | static inline unsigned long long native_read_tscp(unsigned int *aux) |
17 | { | 17 | { |
18 | unsigned long low, high; | 18 | unsigned long low, high; |
19 | asm volatile(".byte 0x0f,0x01,0xf9" | 19 | asm volatile(".byte 0x0f,0x01,0xf9" |
20 | : "=a" (low), "=d" (high), "=c" (*aux)); | 20 | : "=a" (low), "=d" (high), "=c" (*aux)); |
21 | return low | ((u64)high << 32); | 21 | return low | ((u64)high << 32); |
22 | } | 22 | } |
23 | 23 | ||
24 | /* | 24 | /* |
25 | * i386 calling convention returns 64-bit value in edx:eax, while | 25 | * i386 calling convention returns 64-bit value in edx:eax, while |
26 | * x86_64 returns at rax. Also, the "A" constraint does not really | 26 | * x86_64 returns at rax. Also, the "A" constraint does not really |
27 | * mean rdx:rax in x86_64, so we need specialized behaviour for each | 27 | * mean rdx:rax in x86_64, so we need specialized behaviour for each |
28 | * architecture | 28 | * architecture |
29 | */ | 29 | */ |
30 | #ifdef CONFIG_X86_64 | 30 | #ifdef CONFIG_X86_64 |
31 | #define DECLARE_ARGS(val, low, high) unsigned low, high | 31 | #define DECLARE_ARGS(val, low, high) unsigned low, high |
32 | #define EAX_EDX_VAL(val, low, high) ((low) | ((u64)(high) << 32)) | 32 | #define EAX_EDX_VAL(val, low, high) ((low) | ((u64)(high) << 32)) |
33 | #define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high) | 33 | #define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high) |
34 | #define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high) | 34 | #define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high) |
35 | #else | 35 | #else |
36 | #define DECLARE_ARGS(val, low, high) unsigned long long val | 36 | #define DECLARE_ARGS(val, low, high) unsigned long long val |
37 | #define EAX_EDX_VAL(val, low, high) (val) | 37 | #define EAX_EDX_VAL(val, low, high) (val) |
38 | #define EAX_EDX_ARGS(val, low, high) "A" (val) | 38 | #define EAX_EDX_ARGS(val, low, high) "A" (val) |
39 | #define EAX_EDX_RET(val, low, high) "=A" (val) | 39 | #define EAX_EDX_RET(val, low, high) "=A" (val) |
40 | #endif | 40 | #endif |
41 | 41 | ||
42 | static inline unsigned long long native_read_msr(unsigned int msr) | 42 | static inline unsigned long long native_read_msr(unsigned int msr) |
43 | { | 43 | { |
44 | DECLARE_ARGS(val, low, high); | 44 | DECLARE_ARGS(val, low, high); |
45 | 45 | ||
46 | asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr)); | 46 | asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr)); |
47 | return EAX_EDX_VAL(val, low, high); | 47 | return EAX_EDX_VAL(val, low, high); |
48 | } | 48 | } |
49 | 49 | ||
50 | static inline unsigned long long native_read_msr_safe(unsigned int msr, | 50 | static inline unsigned long long native_read_msr_safe(unsigned int msr, |
51 | int *err) | 51 | int *err) |
52 | { | 52 | { |
53 | DECLARE_ARGS(val, low, high); | 53 | DECLARE_ARGS(val, low, high); |
54 | 54 | ||
55 | asm volatile("2: rdmsr ; xor %0,%0\n" | 55 | asm volatile("2: rdmsr ; xor %[err],%[err]\n" |
56 | "1:\n\t" | 56 | "1:\n\t" |
57 | ".section .fixup,\"ax\"\n\t" | 57 | ".section .fixup,\"ax\"\n\t" |
58 | "3: mov %3,%0 ; jmp 1b\n\t" | 58 | "3: mov %[fault],%[err] ; jmp 1b\n\t" |
59 | ".previous\n\t" | 59 | ".previous\n\t" |
60 | _ASM_EXTABLE(2b, 3b) | 60 | _ASM_EXTABLE(2b, 3b) |
61 | : "=r" (*err), EAX_EDX_RET(val, low, high) | 61 | : [err] "=r" (*err), EAX_EDX_RET(val, low, high) |
62 | : "c" (msr), "i" (-EFAULT)); | 62 | : "c" (msr), [fault] "i" (-EFAULT)); |
63 | return EAX_EDX_VAL(val, low, high); | 63 | return EAX_EDX_VAL(val, low, high); |
64 | } | 64 | } |
65 | 65 | ||
66 | static inline void native_write_msr(unsigned int msr, | 66 | static inline void native_write_msr(unsigned int msr, |
67 | unsigned low, unsigned high) | 67 | unsigned low, unsigned high) |
68 | { | 68 | { |
69 | asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory"); | 69 | asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory"); |
70 | } | 70 | } |
71 | 71 | ||
72 | static inline int native_write_msr_safe(unsigned int msr, | 72 | static inline int native_write_msr_safe(unsigned int msr, |
73 | unsigned low, unsigned high) | 73 | unsigned low, unsigned high) |
74 | { | 74 | { |
75 | int err; | 75 | int err; |
76 | asm volatile("2: wrmsr ; xor %0,%0\n" | 76 | asm volatile("2: wrmsr ; xor %[err],%[err]\n" |
77 | "1:\n\t" | 77 | "1:\n\t" |
78 | ".section .fixup,\"ax\"\n\t" | 78 | ".section .fixup,\"ax\"\n\t" |
79 | "3: mov %4,%0 ; jmp 1b\n\t" | 79 | "3: mov %[fault],%[err] ; jmp 1b\n\t" |
80 | ".previous\n\t" | 80 | ".previous\n\t" |
81 | _ASM_EXTABLE(2b, 3b) | 81 | _ASM_EXTABLE(2b, 3b) |
82 | : "=a" (err) | 82 | : [err] "=a" (err) |
83 | : "c" (msr), "0" (low), "d" (high), | 83 | : "c" (msr), "0" (low), "d" (high), |
84 | "i" (-EFAULT) | 84 | [fault] "i" (-EFAULT) |
85 | : "memory"); | 85 | : "memory"); |
86 | return err; | 86 | return err; |
87 | } | 87 | } |
88 | 88 | ||
89 | extern unsigned long long native_read_tsc(void); | 89 | extern unsigned long long native_read_tsc(void); |
90 | 90 | ||
91 | static __always_inline unsigned long long __native_read_tsc(void) | 91 | static __always_inline unsigned long long __native_read_tsc(void) |
92 | { | 92 | { |
93 | DECLARE_ARGS(val, low, high); | 93 | DECLARE_ARGS(val, low, high); |
94 | 94 | ||
95 | rdtsc_barrier(); | 95 | rdtsc_barrier(); |
96 | asm volatile("rdtsc" : EAX_EDX_RET(val, low, high)); | 96 | asm volatile("rdtsc" : EAX_EDX_RET(val, low, high)); |
97 | rdtsc_barrier(); | 97 | rdtsc_barrier(); |
98 | 98 | ||
99 | return EAX_EDX_VAL(val, low, high); | 99 | return EAX_EDX_VAL(val, low, high); |
100 | } | 100 | } |
101 | 101 | ||
102 | static inline unsigned long long native_read_pmc(int counter) | 102 | static inline unsigned long long native_read_pmc(int counter) |
103 | { | 103 | { |
104 | DECLARE_ARGS(val, low, high); | 104 | DECLARE_ARGS(val, low, high); |
105 | 105 | ||
106 | asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter)); | 106 | asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter)); |
107 | return EAX_EDX_VAL(val, low, high); | 107 | return EAX_EDX_VAL(val, low, high); |
108 | } | 108 | } |
109 | 109 | ||
110 | #ifdef CONFIG_PARAVIRT | 110 | #ifdef CONFIG_PARAVIRT |
111 | #include <asm/paravirt.h> | 111 | #include <asm/paravirt.h> |
112 | #else | 112 | #else |
113 | #include <linux/errno.h> | 113 | #include <linux/errno.h> |
114 | /* | 114 | /* |
115 | * Access to machine-specific registers (available on 586 and better only) | 115 | * Access to machine-specific registers (available on 586 and better only) |
116 | * Note: the rd* operations modify the parameters directly (without using | 116 | * Note: the rd* operations modify the parameters directly (without using |
117 | * pointer indirection), this allows gcc to optimize better | 117 | * pointer indirection), this allows gcc to optimize better |
118 | */ | 118 | */ |
119 | 119 | ||
120 | #define rdmsr(msr, val1, val2) \ | 120 | #define rdmsr(msr, val1, val2) \ |
121 | do { \ | 121 | do { \ |
122 | u64 __val = native_read_msr((msr)); \ | 122 | u64 __val = native_read_msr((msr)); \ |
123 | (val1) = (u32)__val; \ | 123 | (val1) = (u32)__val; \ |
124 | (val2) = (u32)(__val >> 32); \ | 124 | (val2) = (u32)(__val >> 32); \ |
125 | } while (0) | 125 | } while (0) |
126 | 126 | ||
127 | static inline void wrmsr(unsigned msr, unsigned low, unsigned high) | 127 | static inline void wrmsr(unsigned msr, unsigned low, unsigned high) |
128 | { | 128 | { |
129 | native_write_msr(msr, low, high); | 129 | native_write_msr(msr, low, high); |
130 | } | 130 | } |
131 | 131 | ||
132 | #define rdmsrl(msr, val) \ | 132 | #define rdmsrl(msr, val) \ |
133 | ((val) = native_read_msr((msr))) | 133 | ((val) = native_read_msr((msr))) |
134 | 134 | ||
135 | #define wrmsrl(msr, val) \ | 135 | #define wrmsrl(msr, val) \ |
136 | native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32)) | 136 | native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32)) |
137 | 137 | ||
138 | /* wrmsr with exception handling */ | 138 | /* wrmsr with exception handling */ |
139 | static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) | 139 | static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) |
140 | { | 140 | { |
141 | return native_write_msr_safe(msr, low, high); | 141 | return native_write_msr_safe(msr, low, high); |
142 | } | 142 | } |
143 | 143 | ||
144 | /* rdmsr with exception handling */ | 144 | /* rdmsr with exception handling */ |
145 | #define rdmsr_safe(msr, p1, p2) \ | 145 | #define rdmsr_safe(msr, p1, p2) \ |
146 | ({ \ | 146 | ({ \ |
147 | int __err; \ | 147 | int __err; \ |
148 | u64 __val = native_read_msr_safe((msr), &__err); \ | 148 | u64 __val = native_read_msr_safe((msr), &__err); \ |
149 | (*p1) = (u32)__val; \ | 149 | (*p1) = (u32)__val; \ |
150 | (*p2) = (u32)(__val >> 32); \ | 150 | (*p2) = (u32)(__val >> 32); \ |
151 | __err; \ | 151 | __err; \ |
152 | }) | 152 | }) |
153 | 153 | ||
154 | static inline int rdmsrl_safe(unsigned msr, unsigned long long *p) | 154 | static inline int rdmsrl_safe(unsigned msr, unsigned long long *p) |
155 | { | 155 | { |
156 | int err; | 156 | int err; |
157 | 157 | ||
158 | *p = native_read_msr_safe(msr, &err); | 158 | *p = native_read_msr_safe(msr, &err); |
159 | return err; | 159 | return err; |
160 | } | 160 | } |
161 | 161 | ||
162 | #define rdtscl(low) \ | 162 | #define rdtscl(low) \ |
163 | ((low) = (u32)native_read_tsc()) | 163 | ((low) = (u32)native_read_tsc()) |
164 | 164 | ||
165 | #define rdtscll(val) \ | 165 | #define rdtscll(val) \ |
166 | ((val) = native_read_tsc()) | 166 | ((val) = native_read_tsc()) |
167 | 167 | ||
168 | #define rdpmc(counter, low, high) \ | 168 | #define rdpmc(counter, low, high) \ |
169 | do { \ | 169 | do { \ |
170 | u64 _l = native_read_pmc((counter)); \ | 170 | u64 _l = native_read_pmc((counter)); \ |
171 | (low) = (u32)_l; \ | 171 | (low) = (u32)_l; \ |
172 | (high) = (u32)(_l >> 32); \ | 172 | (high) = (u32)(_l >> 32); \ |
173 | } while (0) | 173 | } while (0) |
174 | 174 | ||
175 | #define rdtscp(low, high, aux) \ | 175 | #define rdtscp(low, high, aux) \ |
176 | do { \ | 176 | do { \ |
177 | unsigned long long _val = native_read_tscp(&(aux)); \ | 177 | unsigned long long _val = native_read_tscp(&(aux)); \ |
178 | (low) = (u32)_val; \ | 178 | (low) = (u32)_val; \ |
179 | (high) = (u32)(_val >> 32); \ | 179 | (high) = (u32)(_val >> 32); \ |
180 | } while (0) | 180 | } while (0) |
181 | 181 | ||
182 | #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux)) | 182 | #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux)) |
183 | 183 | ||
184 | #endif /* !CONFIG_PARAVIRT */ | 184 | #endif /* !CONFIG_PARAVIRT */ |
185 | 185 | ||
186 | 186 | ||
187 | #define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val), \ | 187 | #define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val), \ |
188 | (u32)((val) >> 32)) | 188 | (u32)((val) >> 32)) |
189 | 189 | ||
190 | #define write_tsc(val1, val2) wrmsr(0x10, (val1), (val2)) | 190 | #define write_tsc(val1, val2) wrmsr(0x10, (val1), (val2)) |
191 | 191 | ||
192 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, (val), 0) | 192 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, (val), 0) |
193 | 193 | ||
194 | #ifdef CONFIG_SMP | 194 | #ifdef CONFIG_SMP |
195 | int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | 195 | int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); |
196 | int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | 196 | int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); |
197 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | 197 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); |
198 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | 198 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); |
199 | #else /* CONFIG_SMP */ | 199 | #else /* CONFIG_SMP */ |
200 | static inline int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | 200 | static inline int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) |
201 | { | 201 | { |
202 | rdmsr(msr_no, *l, *h); | 202 | rdmsr(msr_no, *l, *h); |
203 | return 0; | 203 | return 0; |
204 | } | 204 | } |
205 | static inline int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | 205 | static inline int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) |
206 | { | 206 | { |
207 | wrmsr(msr_no, l, h); | 207 | wrmsr(msr_no, l, h); |
208 | return 0; | 208 | return 0; |
209 | } | 209 | } |
210 | static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, | 210 | static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, |
211 | u32 *l, u32 *h) | 211 | u32 *l, u32 *h) |
212 | { | 212 | { |
213 | return rdmsr_safe(msr_no, l, h); | 213 | return rdmsr_safe(msr_no, l, h); |
214 | } | 214 | } |
215 | static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | 215 | static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) |
216 | { | 216 | { |
217 | return wrmsr_safe(msr_no, l, h); | 217 | return wrmsr_safe(msr_no, l, h); |
218 | } | 218 | } |
219 | #endif /* CONFIG_SMP */ | 219 | #endif /* CONFIG_SMP */ |
220 | #endif /* __ASSEMBLY__ */ | 220 | #endif /* __ASSEMBLY__ */ |
221 | #endif /* __KERNEL__ */ | 221 | #endif /* __KERNEL__ */ |
222 | 222 | ||
223 | 223 | ||
224 | #endif | 224 | #endif |
225 | 225 |