Commit 5306c31c5733cb4a79cc002e0c3ad256fd439614
Committed by
Ingo Molnar
1 parent
86c269fea3
Exists in
ti-lsk-linux-4.1.y
and in
10 other branches
perf/x86/uncore/hsw-ep: Handle systems with only two SBOXes
There was another report of a boot failure with a #GP fault in the uncore SBOX initialization. The earlier work around was not enough for this system. The boot was failing while trying to initialize the third SBOX. This patch detects parts with only two SBOXes and limits the number of SBOX units to two there. Stable material, as it affects boot problems on 3.18. Tested-by: Andreas Oehler <andreas@oehler-net.de> Signed-off-by: Andi Kleen <ak@linux.intel.com> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: <stable@vger.kernel.org> Cc: Arnaldo Carvalho de Melo <acme@kernel.org> Cc: Stephane Eranian <eranian@google.com> Cc: Yan, Zheng <zheng.z.yan@intel.com> Link: http://lkml.kernel.org/r/1420583675-9163-1-git-send-email-andi@firstfloor.org Signed-off-by: Ingo Molnar <mingo@kernel.org>
Showing 2 changed files with 18 additions and 1 deletions Inline Diff
arch/x86/kernel/cpu/perf_event_intel_uncore.h
1 | #include <linux/module.h> | 1 | #include <linux/module.h> |
2 | #include <linux/slab.h> | 2 | #include <linux/slab.h> |
3 | #include <linux/pci.h> | 3 | #include <linux/pci.h> |
4 | #include <linux/perf_event.h> | 4 | #include <linux/perf_event.h> |
5 | #include "perf_event.h" | 5 | #include "perf_event.h" |
6 | 6 | ||
7 | #define UNCORE_PMU_NAME_LEN 32 | 7 | #define UNCORE_PMU_NAME_LEN 32 |
8 | #define UNCORE_PMU_HRTIMER_INTERVAL (60LL * NSEC_PER_SEC) | 8 | #define UNCORE_PMU_HRTIMER_INTERVAL (60LL * NSEC_PER_SEC) |
9 | #define UNCORE_SNB_IMC_HRTIMER_INTERVAL (5ULL * NSEC_PER_SEC) | 9 | #define UNCORE_SNB_IMC_HRTIMER_INTERVAL (5ULL * NSEC_PER_SEC) |
10 | 10 | ||
11 | #define UNCORE_FIXED_EVENT 0xff | 11 | #define UNCORE_FIXED_EVENT 0xff |
12 | #define UNCORE_PMC_IDX_MAX_GENERIC 8 | 12 | #define UNCORE_PMC_IDX_MAX_GENERIC 8 |
13 | #define UNCORE_PMC_IDX_FIXED UNCORE_PMC_IDX_MAX_GENERIC | 13 | #define UNCORE_PMC_IDX_FIXED UNCORE_PMC_IDX_MAX_GENERIC |
14 | #define UNCORE_PMC_IDX_MAX (UNCORE_PMC_IDX_FIXED + 1) | 14 | #define UNCORE_PMC_IDX_MAX (UNCORE_PMC_IDX_FIXED + 1) |
15 | 15 | ||
16 | #define UNCORE_PCI_DEV_DATA(type, idx) ((type << 8) | idx) | 16 | #define UNCORE_PCI_DEV_DATA(type, idx) ((type << 8) | idx) |
17 | #define UNCORE_PCI_DEV_TYPE(data) ((data >> 8) & 0xff) | 17 | #define UNCORE_PCI_DEV_TYPE(data) ((data >> 8) & 0xff) |
18 | #define UNCORE_PCI_DEV_IDX(data) (data & 0xff) | 18 | #define UNCORE_PCI_DEV_IDX(data) (data & 0xff) |
19 | #define UNCORE_EXTRA_PCI_DEV 0xff | 19 | #define UNCORE_EXTRA_PCI_DEV 0xff |
20 | #define UNCORE_EXTRA_PCI_DEV_MAX 2 | 20 | #define UNCORE_EXTRA_PCI_DEV_MAX 3 |
21 | 21 | ||
22 | /* support up to 8 sockets */ | 22 | /* support up to 8 sockets */ |
23 | #define UNCORE_SOCKET_MAX 8 | 23 | #define UNCORE_SOCKET_MAX 8 |
24 | 24 | ||
25 | #define UNCORE_EVENT_CONSTRAINT(c, n) EVENT_CONSTRAINT(c, n, 0xff) | 25 | #define UNCORE_EVENT_CONSTRAINT(c, n) EVENT_CONSTRAINT(c, n, 0xff) |
26 | 26 | ||
27 | struct intel_uncore_ops; | 27 | struct intel_uncore_ops; |
28 | struct intel_uncore_pmu; | 28 | struct intel_uncore_pmu; |
29 | struct intel_uncore_box; | 29 | struct intel_uncore_box; |
30 | struct uncore_event_desc; | 30 | struct uncore_event_desc; |
31 | 31 | ||
32 | struct intel_uncore_type { | 32 | struct intel_uncore_type { |
33 | const char *name; | 33 | const char *name; |
34 | int num_counters; | 34 | int num_counters; |
35 | int num_boxes; | 35 | int num_boxes; |
36 | int perf_ctr_bits; | 36 | int perf_ctr_bits; |
37 | int fixed_ctr_bits; | 37 | int fixed_ctr_bits; |
38 | unsigned perf_ctr; | 38 | unsigned perf_ctr; |
39 | unsigned event_ctl; | 39 | unsigned event_ctl; |
40 | unsigned event_mask; | 40 | unsigned event_mask; |
41 | unsigned fixed_ctr; | 41 | unsigned fixed_ctr; |
42 | unsigned fixed_ctl; | 42 | unsigned fixed_ctl; |
43 | unsigned box_ctl; | 43 | unsigned box_ctl; |
44 | unsigned msr_offset; | 44 | unsigned msr_offset; |
45 | unsigned num_shared_regs:8; | 45 | unsigned num_shared_regs:8; |
46 | unsigned single_fixed:1; | 46 | unsigned single_fixed:1; |
47 | unsigned pair_ctr_ctl:1; | 47 | unsigned pair_ctr_ctl:1; |
48 | unsigned *msr_offsets; | 48 | unsigned *msr_offsets; |
49 | struct event_constraint unconstrainted; | 49 | struct event_constraint unconstrainted; |
50 | struct event_constraint *constraints; | 50 | struct event_constraint *constraints; |
51 | struct intel_uncore_pmu *pmus; | 51 | struct intel_uncore_pmu *pmus; |
52 | struct intel_uncore_ops *ops; | 52 | struct intel_uncore_ops *ops; |
53 | struct uncore_event_desc *event_descs; | 53 | struct uncore_event_desc *event_descs; |
54 | const struct attribute_group *attr_groups[4]; | 54 | const struct attribute_group *attr_groups[4]; |
55 | struct pmu *pmu; /* for custom pmu ops */ | 55 | struct pmu *pmu; /* for custom pmu ops */ |
56 | }; | 56 | }; |
57 | 57 | ||
58 | #define pmu_group attr_groups[0] | 58 | #define pmu_group attr_groups[0] |
59 | #define format_group attr_groups[1] | 59 | #define format_group attr_groups[1] |
60 | #define events_group attr_groups[2] | 60 | #define events_group attr_groups[2] |
61 | 61 | ||
62 | struct intel_uncore_ops { | 62 | struct intel_uncore_ops { |
63 | void (*init_box)(struct intel_uncore_box *); | 63 | void (*init_box)(struct intel_uncore_box *); |
64 | void (*disable_box)(struct intel_uncore_box *); | 64 | void (*disable_box)(struct intel_uncore_box *); |
65 | void (*enable_box)(struct intel_uncore_box *); | 65 | void (*enable_box)(struct intel_uncore_box *); |
66 | void (*disable_event)(struct intel_uncore_box *, struct perf_event *); | 66 | void (*disable_event)(struct intel_uncore_box *, struct perf_event *); |
67 | void (*enable_event)(struct intel_uncore_box *, struct perf_event *); | 67 | void (*enable_event)(struct intel_uncore_box *, struct perf_event *); |
68 | u64 (*read_counter)(struct intel_uncore_box *, struct perf_event *); | 68 | u64 (*read_counter)(struct intel_uncore_box *, struct perf_event *); |
69 | int (*hw_config)(struct intel_uncore_box *, struct perf_event *); | 69 | int (*hw_config)(struct intel_uncore_box *, struct perf_event *); |
70 | struct event_constraint *(*get_constraint)(struct intel_uncore_box *, | 70 | struct event_constraint *(*get_constraint)(struct intel_uncore_box *, |
71 | struct perf_event *); | 71 | struct perf_event *); |
72 | void (*put_constraint)(struct intel_uncore_box *, struct perf_event *); | 72 | void (*put_constraint)(struct intel_uncore_box *, struct perf_event *); |
73 | }; | 73 | }; |
74 | 74 | ||
75 | struct intel_uncore_pmu { | 75 | struct intel_uncore_pmu { |
76 | struct pmu pmu; | 76 | struct pmu pmu; |
77 | char name[UNCORE_PMU_NAME_LEN]; | 77 | char name[UNCORE_PMU_NAME_LEN]; |
78 | int pmu_idx; | 78 | int pmu_idx; |
79 | int func_id; | 79 | int func_id; |
80 | struct intel_uncore_type *type; | 80 | struct intel_uncore_type *type; |
81 | struct intel_uncore_box ** __percpu box; | 81 | struct intel_uncore_box ** __percpu box; |
82 | struct list_head box_list; | 82 | struct list_head box_list; |
83 | }; | 83 | }; |
84 | 84 | ||
85 | struct intel_uncore_extra_reg { | 85 | struct intel_uncore_extra_reg { |
86 | raw_spinlock_t lock; | 86 | raw_spinlock_t lock; |
87 | u64 config, config1, config2; | 87 | u64 config, config1, config2; |
88 | atomic_t ref; | 88 | atomic_t ref; |
89 | }; | 89 | }; |
90 | 90 | ||
91 | struct intel_uncore_box { | 91 | struct intel_uncore_box { |
92 | int phys_id; | 92 | int phys_id; |
93 | int n_active; /* number of active events */ | 93 | int n_active; /* number of active events */ |
94 | int n_events; | 94 | int n_events; |
95 | int cpu; /* cpu to collect events */ | 95 | int cpu; /* cpu to collect events */ |
96 | unsigned long flags; | 96 | unsigned long flags; |
97 | atomic_t refcnt; | 97 | atomic_t refcnt; |
98 | struct perf_event *events[UNCORE_PMC_IDX_MAX]; | 98 | struct perf_event *events[UNCORE_PMC_IDX_MAX]; |
99 | struct perf_event *event_list[UNCORE_PMC_IDX_MAX]; | 99 | struct perf_event *event_list[UNCORE_PMC_IDX_MAX]; |
100 | unsigned long active_mask[BITS_TO_LONGS(UNCORE_PMC_IDX_MAX)]; | 100 | unsigned long active_mask[BITS_TO_LONGS(UNCORE_PMC_IDX_MAX)]; |
101 | u64 tags[UNCORE_PMC_IDX_MAX]; | 101 | u64 tags[UNCORE_PMC_IDX_MAX]; |
102 | struct pci_dev *pci_dev; | 102 | struct pci_dev *pci_dev; |
103 | struct intel_uncore_pmu *pmu; | 103 | struct intel_uncore_pmu *pmu; |
104 | u64 hrtimer_duration; /* hrtimer timeout for this box */ | 104 | u64 hrtimer_duration; /* hrtimer timeout for this box */ |
105 | struct hrtimer hrtimer; | 105 | struct hrtimer hrtimer; |
106 | struct list_head list; | 106 | struct list_head list; |
107 | struct list_head active_list; | 107 | struct list_head active_list; |
108 | void *io_addr; | 108 | void *io_addr; |
109 | struct intel_uncore_extra_reg shared_regs[0]; | 109 | struct intel_uncore_extra_reg shared_regs[0]; |
110 | }; | 110 | }; |
111 | 111 | ||
112 | #define UNCORE_BOX_FLAG_INITIATED 0 | 112 | #define UNCORE_BOX_FLAG_INITIATED 0 |
113 | 113 | ||
114 | struct uncore_event_desc { | 114 | struct uncore_event_desc { |
115 | struct kobj_attribute attr; | 115 | struct kobj_attribute attr; |
116 | const char *config; | 116 | const char *config; |
117 | }; | 117 | }; |
118 | 118 | ||
119 | ssize_t uncore_event_show(struct kobject *kobj, | 119 | ssize_t uncore_event_show(struct kobject *kobj, |
120 | struct kobj_attribute *attr, char *buf); | 120 | struct kobj_attribute *attr, char *buf); |
121 | 121 | ||
122 | #define INTEL_UNCORE_EVENT_DESC(_name, _config) \ | 122 | #define INTEL_UNCORE_EVENT_DESC(_name, _config) \ |
123 | { \ | 123 | { \ |
124 | .attr = __ATTR(_name, 0444, uncore_event_show, NULL), \ | 124 | .attr = __ATTR(_name, 0444, uncore_event_show, NULL), \ |
125 | .config = _config, \ | 125 | .config = _config, \ |
126 | } | 126 | } |
127 | 127 | ||
128 | #define DEFINE_UNCORE_FORMAT_ATTR(_var, _name, _format) \ | 128 | #define DEFINE_UNCORE_FORMAT_ATTR(_var, _name, _format) \ |
129 | static ssize_t __uncore_##_var##_show(struct kobject *kobj, \ | 129 | static ssize_t __uncore_##_var##_show(struct kobject *kobj, \ |
130 | struct kobj_attribute *attr, \ | 130 | struct kobj_attribute *attr, \ |
131 | char *page) \ | 131 | char *page) \ |
132 | { \ | 132 | { \ |
133 | BUILD_BUG_ON(sizeof(_format) >= PAGE_SIZE); \ | 133 | BUILD_BUG_ON(sizeof(_format) >= PAGE_SIZE); \ |
134 | return sprintf(page, _format "\n"); \ | 134 | return sprintf(page, _format "\n"); \ |
135 | } \ | 135 | } \ |
136 | static struct kobj_attribute format_attr_##_var = \ | 136 | static struct kobj_attribute format_attr_##_var = \ |
137 | __ATTR(_name, 0444, __uncore_##_var##_show, NULL) | 137 | __ATTR(_name, 0444, __uncore_##_var##_show, NULL) |
138 | 138 | ||
139 | static inline unsigned uncore_pci_box_ctl(struct intel_uncore_box *box) | 139 | static inline unsigned uncore_pci_box_ctl(struct intel_uncore_box *box) |
140 | { | 140 | { |
141 | return box->pmu->type->box_ctl; | 141 | return box->pmu->type->box_ctl; |
142 | } | 142 | } |
143 | 143 | ||
144 | static inline unsigned uncore_pci_fixed_ctl(struct intel_uncore_box *box) | 144 | static inline unsigned uncore_pci_fixed_ctl(struct intel_uncore_box *box) |
145 | { | 145 | { |
146 | return box->pmu->type->fixed_ctl; | 146 | return box->pmu->type->fixed_ctl; |
147 | } | 147 | } |
148 | 148 | ||
149 | static inline unsigned uncore_pci_fixed_ctr(struct intel_uncore_box *box) | 149 | static inline unsigned uncore_pci_fixed_ctr(struct intel_uncore_box *box) |
150 | { | 150 | { |
151 | return box->pmu->type->fixed_ctr; | 151 | return box->pmu->type->fixed_ctr; |
152 | } | 152 | } |
153 | 153 | ||
154 | static inline | 154 | static inline |
155 | unsigned uncore_pci_event_ctl(struct intel_uncore_box *box, int idx) | 155 | unsigned uncore_pci_event_ctl(struct intel_uncore_box *box, int idx) |
156 | { | 156 | { |
157 | return idx * 4 + box->pmu->type->event_ctl; | 157 | return idx * 4 + box->pmu->type->event_ctl; |
158 | } | 158 | } |
159 | 159 | ||
160 | static inline | 160 | static inline |
161 | unsigned uncore_pci_perf_ctr(struct intel_uncore_box *box, int idx) | 161 | unsigned uncore_pci_perf_ctr(struct intel_uncore_box *box, int idx) |
162 | { | 162 | { |
163 | return idx * 8 + box->pmu->type->perf_ctr; | 163 | return idx * 8 + box->pmu->type->perf_ctr; |
164 | } | 164 | } |
165 | 165 | ||
166 | static inline unsigned uncore_msr_box_offset(struct intel_uncore_box *box) | 166 | static inline unsigned uncore_msr_box_offset(struct intel_uncore_box *box) |
167 | { | 167 | { |
168 | struct intel_uncore_pmu *pmu = box->pmu; | 168 | struct intel_uncore_pmu *pmu = box->pmu; |
169 | return pmu->type->msr_offsets ? | 169 | return pmu->type->msr_offsets ? |
170 | pmu->type->msr_offsets[pmu->pmu_idx] : | 170 | pmu->type->msr_offsets[pmu->pmu_idx] : |
171 | pmu->type->msr_offset * pmu->pmu_idx; | 171 | pmu->type->msr_offset * pmu->pmu_idx; |
172 | } | 172 | } |
173 | 173 | ||
174 | static inline unsigned uncore_msr_box_ctl(struct intel_uncore_box *box) | 174 | static inline unsigned uncore_msr_box_ctl(struct intel_uncore_box *box) |
175 | { | 175 | { |
176 | if (!box->pmu->type->box_ctl) | 176 | if (!box->pmu->type->box_ctl) |
177 | return 0; | 177 | return 0; |
178 | return box->pmu->type->box_ctl + uncore_msr_box_offset(box); | 178 | return box->pmu->type->box_ctl + uncore_msr_box_offset(box); |
179 | } | 179 | } |
180 | 180 | ||
181 | static inline unsigned uncore_msr_fixed_ctl(struct intel_uncore_box *box) | 181 | static inline unsigned uncore_msr_fixed_ctl(struct intel_uncore_box *box) |
182 | { | 182 | { |
183 | if (!box->pmu->type->fixed_ctl) | 183 | if (!box->pmu->type->fixed_ctl) |
184 | return 0; | 184 | return 0; |
185 | return box->pmu->type->fixed_ctl + uncore_msr_box_offset(box); | 185 | return box->pmu->type->fixed_ctl + uncore_msr_box_offset(box); |
186 | } | 186 | } |
187 | 187 | ||
188 | static inline unsigned uncore_msr_fixed_ctr(struct intel_uncore_box *box) | 188 | static inline unsigned uncore_msr_fixed_ctr(struct intel_uncore_box *box) |
189 | { | 189 | { |
190 | return box->pmu->type->fixed_ctr + uncore_msr_box_offset(box); | 190 | return box->pmu->type->fixed_ctr + uncore_msr_box_offset(box); |
191 | } | 191 | } |
192 | 192 | ||
193 | static inline | 193 | static inline |
194 | unsigned uncore_msr_event_ctl(struct intel_uncore_box *box, int idx) | 194 | unsigned uncore_msr_event_ctl(struct intel_uncore_box *box, int idx) |
195 | { | 195 | { |
196 | return box->pmu->type->event_ctl + | 196 | return box->pmu->type->event_ctl + |
197 | (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + | 197 | (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + |
198 | uncore_msr_box_offset(box); | 198 | uncore_msr_box_offset(box); |
199 | } | 199 | } |
200 | 200 | ||
201 | static inline | 201 | static inline |
202 | unsigned uncore_msr_perf_ctr(struct intel_uncore_box *box, int idx) | 202 | unsigned uncore_msr_perf_ctr(struct intel_uncore_box *box, int idx) |
203 | { | 203 | { |
204 | return box->pmu->type->perf_ctr + | 204 | return box->pmu->type->perf_ctr + |
205 | (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + | 205 | (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) + |
206 | uncore_msr_box_offset(box); | 206 | uncore_msr_box_offset(box); |
207 | } | 207 | } |
208 | 208 | ||
209 | static inline | 209 | static inline |
210 | unsigned uncore_fixed_ctl(struct intel_uncore_box *box) | 210 | unsigned uncore_fixed_ctl(struct intel_uncore_box *box) |
211 | { | 211 | { |
212 | if (box->pci_dev) | 212 | if (box->pci_dev) |
213 | return uncore_pci_fixed_ctl(box); | 213 | return uncore_pci_fixed_ctl(box); |
214 | else | 214 | else |
215 | return uncore_msr_fixed_ctl(box); | 215 | return uncore_msr_fixed_ctl(box); |
216 | } | 216 | } |
217 | 217 | ||
218 | static inline | 218 | static inline |
219 | unsigned uncore_fixed_ctr(struct intel_uncore_box *box) | 219 | unsigned uncore_fixed_ctr(struct intel_uncore_box *box) |
220 | { | 220 | { |
221 | if (box->pci_dev) | 221 | if (box->pci_dev) |
222 | return uncore_pci_fixed_ctr(box); | 222 | return uncore_pci_fixed_ctr(box); |
223 | else | 223 | else |
224 | return uncore_msr_fixed_ctr(box); | 224 | return uncore_msr_fixed_ctr(box); |
225 | } | 225 | } |
226 | 226 | ||
227 | static inline | 227 | static inline |
228 | unsigned uncore_event_ctl(struct intel_uncore_box *box, int idx) | 228 | unsigned uncore_event_ctl(struct intel_uncore_box *box, int idx) |
229 | { | 229 | { |
230 | if (box->pci_dev) | 230 | if (box->pci_dev) |
231 | return uncore_pci_event_ctl(box, idx); | 231 | return uncore_pci_event_ctl(box, idx); |
232 | else | 232 | else |
233 | return uncore_msr_event_ctl(box, idx); | 233 | return uncore_msr_event_ctl(box, idx); |
234 | } | 234 | } |
235 | 235 | ||
236 | static inline | 236 | static inline |
237 | unsigned uncore_perf_ctr(struct intel_uncore_box *box, int idx) | 237 | unsigned uncore_perf_ctr(struct intel_uncore_box *box, int idx) |
238 | { | 238 | { |
239 | if (box->pci_dev) | 239 | if (box->pci_dev) |
240 | return uncore_pci_perf_ctr(box, idx); | 240 | return uncore_pci_perf_ctr(box, idx); |
241 | else | 241 | else |
242 | return uncore_msr_perf_ctr(box, idx); | 242 | return uncore_msr_perf_ctr(box, idx); |
243 | } | 243 | } |
244 | 244 | ||
245 | static inline int uncore_perf_ctr_bits(struct intel_uncore_box *box) | 245 | static inline int uncore_perf_ctr_bits(struct intel_uncore_box *box) |
246 | { | 246 | { |
247 | return box->pmu->type->perf_ctr_bits; | 247 | return box->pmu->type->perf_ctr_bits; |
248 | } | 248 | } |
249 | 249 | ||
250 | static inline int uncore_fixed_ctr_bits(struct intel_uncore_box *box) | 250 | static inline int uncore_fixed_ctr_bits(struct intel_uncore_box *box) |
251 | { | 251 | { |
252 | return box->pmu->type->fixed_ctr_bits; | 252 | return box->pmu->type->fixed_ctr_bits; |
253 | } | 253 | } |
254 | 254 | ||
255 | static inline int uncore_num_counters(struct intel_uncore_box *box) | 255 | static inline int uncore_num_counters(struct intel_uncore_box *box) |
256 | { | 256 | { |
257 | return box->pmu->type->num_counters; | 257 | return box->pmu->type->num_counters; |
258 | } | 258 | } |
259 | 259 | ||
260 | static inline void uncore_disable_box(struct intel_uncore_box *box) | 260 | static inline void uncore_disable_box(struct intel_uncore_box *box) |
261 | { | 261 | { |
262 | if (box->pmu->type->ops->disable_box) | 262 | if (box->pmu->type->ops->disable_box) |
263 | box->pmu->type->ops->disable_box(box); | 263 | box->pmu->type->ops->disable_box(box); |
264 | } | 264 | } |
265 | 265 | ||
266 | static inline void uncore_enable_box(struct intel_uncore_box *box) | 266 | static inline void uncore_enable_box(struct intel_uncore_box *box) |
267 | { | 267 | { |
268 | if (box->pmu->type->ops->enable_box) | 268 | if (box->pmu->type->ops->enable_box) |
269 | box->pmu->type->ops->enable_box(box); | 269 | box->pmu->type->ops->enable_box(box); |
270 | } | 270 | } |
271 | 271 | ||
272 | static inline void uncore_disable_event(struct intel_uncore_box *box, | 272 | static inline void uncore_disable_event(struct intel_uncore_box *box, |
273 | struct perf_event *event) | 273 | struct perf_event *event) |
274 | { | 274 | { |
275 | box->pmu->type->ops->disable_event(box, event); | 275 | box->pmu->type->ops->disable_event(box, event); |
276 | } | 276 | } |
277 | 277 | ||
278 | static inline void uncore_enable_event(struct intel_uncore_box *box, | 278 | static inline void uncore_enable_event(struct intel_uncore_box *box, |
279 | struct perf_event *event) | 279 | struct perf_event *event) |
280 | { | 280 | { |
281 | box->pmu->type->ops->enable_event(box, event); | 281 | box->pmu->type->ops->enable_event(box, event); |
282 | } | 282 | } |
283 | 283 | ||
284 | static inline u64 uncore_read_counter(struct intel_uncore_box *box, | 284 | static inline u64 uncore_read_counter(struct intel_uncore_box *box, |
285 | struct perf_event *event) | 285 | struct perf_event *event) |
286 | { | 286 | { |
287 | return box->pmu->type->ops->read_counter(box, event); | 287 | return box->pmu->type->ops->read_counter(box, event); |
288 | } | 288 | } |
289 | 289 | ||
290 | static inline void uncore_box_init(struct intel_uncore_box *box) | 290 | static inline void uncore_box_init(struct intel_uncore_box *box) |
291 | { | 291 | { |
292 | if (!test_and_set_bit(UNCORE_BOX_FLAG_INITIATED, &box->flags)) { | 292 | if (!test_and_set_bit(UNCORE_BOX_FLAG_INITIATED, &box->flags)) { |
293 | if (box->pmu->type->ops->init_box) | 293 | if (box->pmu->type->ops->init_box) |
294 | box->pmu->type->ops->init_box(box); | 294 | box->pmu->type->ops->init_box(box); |
295 | } | 295 | } |
296 | } | 296 | } |
297 | 297 | ||
298 | static inline bool uncore_box_is_fake(struct intel_uncore_box *box) | 298 | static inline bool uncore_box_is_fake(struct intel_uncore_box *box) |
299 | { | 299 | { |
300 | return (box->phys_id < 0); | 300 | return (box->phys_id < 0); |
301 | } | 301 | } |
302 | 302 | ||
303 | struct intel_uncore_pmu *uncore_event_to_pmu(struct perf_event *event); | 303 | struct intel_uncore_pmu *uncore_event_to_pmu(struct perf_event *event); |
304 | struct intel_uncore_box *uncore_pmu_to_box(struct intel_uncore_pmu *pmu, int cpu); | 304 | struct intel_uncore_box *uncore_pmu_to_box(struct intel_uncore_pmu *pmu, int cpu); |
305 | struct intel_uncore_box *uncore_event_to_box(struct perf_event *event); | 305 | struct intel_uncore_box *uncore_event_to_box(struct perf_event *event); |
306 | u64 uncore_msr_read_counter(struct intel_uncore_box *box, struct perf_event *event); | 306 | u64 uncore_msr_read_counter(struct intel_uncore_box *box, struct perf_event *event); |
307 | void uncore_pmu_start_hrtimer(struct intel_uncore_box *box); | 307 | void uncore_pmu_start_hrtimer(struct intel_uncore_box *box); |
308 | void uncore_pmu_cancel_hrtimer(struct intel_uncore_box *box); | 308 | void uncore_pmu_cancel_hrtimer(struct intel_uncore_box *box); |
309 | void uncore_pmu_event_read(struct perf_event *event); | 309 | void uncore_pmu_event_read(struct perf_event *event); |
310 | void uncore_perf_event_update(struct intel_uncore_box *box, struct perf_event *event); | 310 | void uncore_perf_event_update(struct intel_uncore_box *box, struct perf_event *event); |
311 | struct event_constraint * | 311 | struct event_constraint * |
312 | uncore_get_constraint(struct intel_uncore_box *box, struct perf_event *event); | 312 | uncore_get_constraint(struct intel_uncore_box *box, struct perf_event *event); |
313 | void uncore_put_constraint(struct intel_uncore_box *box, struct perf_event *event); | 313 | void uncore_put_constraint(struct intel_uncore_box *box, struct perf_event *event); |
314 | u64 uncore_shared_reg_config(struct intel_uncore_box *box, int idx); | 314 | u64 uncore_shared_reg_config(struct intel_uncore_box *box, int idx); |
315 | 315 | ||
316 | extern struct intel_uncore_type **uncore_msr_uncores; | 316 | extern struct intel_uncore_type **uncore_msr_uncores; |
317 | extern struct intel_uncore_type **uncore_pci_uncores; | 317 | extern struct intel_uncore_type **uncore_pci_uncores; |
318 | extern struct pci_driver *uncore_pci_driver; | 318 | extern struct pci_driver *uncore_pci_driver; |
319 | extern int uncore_pcibus_to_physid[256]; | 319 | extern int uncore_pcibus_to_physid[256]; |
320 | extern struct pci_dev *uncore_extra_pci_dev[UNCORE_SOCKET_MAX][UNCORE_EXTRA_PCI_DEV_MAX]; | 320 | extern struct pci_dev *uncore_extra_pci_dev[UNCORE_SOCKET_MAX][UNCORE_EXTRA_PCI_DEV_MAX]; |
321 | extern struct event_constraint uncore_constraint_empty; | 321 | extern struct event_constraint uncore_constraint_empty; |
322 | 322 | ||
323 | /* perf_event_intel_uncore_snb.c */ | 323 | /* perf_event_intel_uncore_snb.c */ |
324 | int snb_uncore_pci_init(void); | 324 | int snb_uncore_pci_init(void); |
325 | int ivb_uncore_pci_init(void); | 325 | int ivb_uncore_pci_init(void); |
326 | int hsw_uncore_pci_init(void); | 326 | int hsw_uncore_pci_init(void); |
327 | void snb_uncore_cpu_init(void); | 327 | void snb_uncore_cpu_init(void); |
328 | void nhm_uncore_cpu_init(void); | 328 | void nhm_uncore_cpu_init(void); |
329 | 329 | ||
330 | /* perf_event_intel_uncore_snbep.c */ | 330 | /* perf_event_intel_uncore_snbep.c */ |
331 | int snbep_uncore_pci_init(void); | 331 | int snbep_uncore_pci_init(void); |
332 | void snbep_uncore_cpu_init(void); | 332 | void snbep_uncore_cpu_init(void); |
333 | int ivbep_uncore_pci_init(void); | 333 | int ivbep_uncore_pci_init(void); |
334 | void ivbep_uncore_cpu_init(void); | 334 | void ivbep_uncore_cpu_init(void); |
335 | int hswep_uncore_pci_init(void); | 335 | int hswep_uncore_pci_init(void); |
336 | void hswep_uncore_cpu_init(void); | 336 | void hswep_uncore_cpu_init(void); |
337 | 337 | ||
338 | /* perf_event_intel_uncore_nhmex.c */ | 338 | /* perf_event_intel_uncore_nhmex.c */ |
339 | void nhmex_uncore_cpu_init(void); | 339 | void nhmex_uncore_cpu_init(void); |
340 | 340 |
arch/x86/kernel/cpu/perf_event_intel_uncore_snbep.c
1 | /* SandyBridge-EP/IvyTown uncore support */ | 1 | /* SandyBridge-EP/IvyTown uncore support */ |
2 | #include "perf_event_intel_uncore.h" | 2 | #include "perf_event_intel_uncore.h" |
3 | 3 | ||
4 | 4 | ||
5 | /* SNB-EP Box level control */ | 5 | /* SNB-EP Box level control */ |
6 | #define SNBEP_PMON_BOX_CTL_RST_CTRL (1 << 0) | 6 | #define SNBEP_PMON_BOX_CTL_RST_CTRL (1 << 0) |
7 | #define SNBEP_PMON_BOX_CTL_RST_CTRS (1 << 1) | 7 | #define SNBEP_PMON_BOX_CTL_RST_CTRS (1 << 1) |
8 | #define SNBEP_PMON_BOX_CTL_FRZ (1 << 8) | 8 | #define SNBEP_PMON_BOX_CTL_FRZ (1 << 8) |
9 | #define SNBEP_PMON_BOX_CTL_FRZ_EN (1 << 16) | 9 | #define SNBEP_PMON_BOX_CTL_FRZ_EN (1 << 16) |
10 | #define SNBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \ | 10 | #define SNBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \ |
11 | SNBEP_PMON_BOX_CTL_RST_CTRS | \ | 11 | SNBEP_PMON_BOX_CTL_RST_CTRS | \ |
12 | SNBEP_PMON_BOX_CTL_FRZ_EN) | 12 | SNBEP_PMON_BOX_CTL_FRZ_EN) |
13 | /* SNB-EP event control */ | 13 | /* SNB-EP event control */ |
14 | #define SNBEP_PMON_CTL_EV_SEL_MASK 0x000000ff | 14 | #define SNBEP_PMON_CTL_EV_SEL_MASK 0x000000ff |
15 | #define SNBEP_PMON_CTL_UMASK_MASK 0x0000ff00 | 15 | #define SNBEP_PMON_CTL_UMASK_MASK 0x0000ff00 |
16 | #define SNBEP_PMON_CTL_RST (1 << 17) | 16 | #define SNBEP_PMON_CTL_RST (1 << 17) |
17 | #define SNBEP_PMON_CTL_EDGE_DET (1 << 18) | 17 | #define SNBEP_PMON_CTL_EDGE_DET (1 << 18) |
18 | #define SNBEP_PMON_CTL_EV_SEL_EXT (1 << 21) | 18 | #define SNBEP_PMON_CTL_EV_SEL_EXT (1 << 21) |
19 | #define SNBEP_PMON_CTL_EN (1 << 22) | 19 | #define SNBEP_PMON_CTL_EN (1 << 22) |
20 | #define SNBEP_PMON_CTL_INVERT (1 << 23) | 20 | #define SNBEP_PMON_CTL_INVERT (1 << 23) |
21 | #define SNBEP_PMON_CTL_TRESH_MASK 0xff000000 | 21 | #define SNBEP_PMON_CTL_TRESH_MASK 0xff000000 |
22 | #define SNBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \ | 22 | #define SNBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \ |
23 | SNBEP_PMON_CTL_UMASK_MASK | \ | 23 | SNBEP_PMON_CTL_UMASK_MASK | \ |
24 | SNBEP_PMON_CTL_EDGE_DET | \ | 24 | SNBEP_PMON_CTL_EDGE_DET | \ |
25 | SNBEP_PMON_CTL_INVERT | \ | 25 | SNBEP_PMON_CTL_INVERT | \ |
26 | SNBEP_PMON_CTL_TRESH_MASK) | 26 | SNBEP_PMON_CTL_TRESH_MASK) |
27 | 27 | ||
28 | /* SNB-EP Ubox event control */ | 28 | /* SNB-EP Ubox event control */ |
29 | #define SNBEP_U_MSR_PMON_CTL_TRESH_MASK 0x1f000000 | 29 | #define SNBEP_U_MSR_PMON_CTL_TRESH_MASK 0x1f000000 |
30 | #define SNBEP_U_MSR_PMON_RAW_EVENT_MASK \ | 30 | #define SNBEP_U_MSR_PMON_RAW_EVENT_MASK \ |
31 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ | 31 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ |
32 | SNBEP_PMON_CTL_UMASK_MASK | \ | 32 | SNBEP_PMON_CTL_UMASK_MASK | \ |
33 | SNBEP_PMON_CTL_EDGE_DET | \ | 33 | SNBEP_PMON_CTL_EDGE_DET | \ |
34 | SNBEP_PMON_CTL_INVERT | \ | 34 | SNBEP_PMON_CTL_INVERT | \ |
35 | SNBEP_U_MSR_PMON_CTL_TRESH_MASK) | 35 | SNBEP_U_MSR_PMON_CTL_TRESH_MASK) |
36 | 36 | ||
37 | #define SNBEP_CBO_PMON_CTL_TID_EN (1 << 19) | 37 | #define SNBEP_CBO_PMON_CTL_TID_EN (1 << 19) |
38 | #define SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \ | 38 | #define SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \ |
39 | SNBEP_CBO_PMON_CTL_TID_EN) | 39 | SNBEP_CBO_PMON_CTL_TID_EN) |
40 | 40 | ||
41 | /* SNB-EP PCU event control */ | 41 | /* SNB-EP PCU event control */ |
42 | #define SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK 0x0000c000 | 42 | #define SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK 0x0000c000 |
43 | #define SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK 0x1f000000 | 43 | #define SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK 0x1f000000 |
44 | #define SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT (1 << 30) | 44 | #define SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT (1 << 30) |
45 | #define SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET (1 << 31) | 45 | #define SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET (1 << 31) |
46 | #define SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK \ | 46 | #define SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK \ |
47 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ | 47 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ |
48 | SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \ | 48 | SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \ |
49 | SNBEP_PMON_CTL_EDGE_DET | \ | 49 | SNBEP_PMON_CTL_EDGE_DET | \ |
50 | SNBEP_PMON_CTL_EV_SEL_EXT | \ | 50 | SNBEP_PMON_CTL_EV_SEL_EXT | \ |
51 | SNBEP_PMON_CTL_INVERT | \ | 51 | SNBEP_PMON_CTL_INVERT | \ |
52 | SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \ | 52 | SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \ |
53 | SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \ | 53 | SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \ |
54 | SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET) | 54 | SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET) |
55 | 55 | ||
56 | #define SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK \ | 56 | #define SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK \ |
57 | (SNBEP_PMON_RAW_EVENT_MASK | \ | 57 | (SNBEP_PMON_RAW_EVENT_MASK | \ |
58 | SNBEP_PMON_CTL_EV_SEL_EXT) | 58 | SNBEP_PMON_CTL_EV_SEL_EXT) |
59 | 59 | ||
60 | /* SNB-EP pci control register */ | 60 | /* SNB-EP pci control register */ |
61 | #define SNBEP_PCI_PMON_BOX_CTL 0xf4 | 61 | #define SNBEP_PCI_PMON_BOX_CTL 0xf4 |
62 | #define SNBEP_PCI_PMON_CTL0 0xd8 | 62 | #define SNBEP_PCI_PMON_CTL0 0xd8 |
63 | /* SNB-EP pci counter register */ | 63 | /* SNB-EP pci counter register */ |
64 | #define SNBEP_PCI_PMON_CTR0 0xa0 | 64 | #define SNBEP_PCI_PMON_CTR0 0xa0 |
65 | 65 | ||
66 | /* SNB-EP home agent register */ | 66 | /* SNB-EP home agent register */ |
67 | #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH0 0x40 | 67 | #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH0 0x40 |
68 | #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH1 0x44 | 68 | #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH1 0x44 |
69 | #define SNBEP_HA_PCI_PMON_BOX_OPCODEMATCH 0x48 | 69 | #define SNBEP_HA_PCI_PMON_BOX_OPCODEMATCH 0x48 |
70 | /* SNB-EP memory controller register */ | 70 | /* SNB-EP memory controller register */ |
71 | #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTL 0xf0 | 71 | #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTL 0xf0 |
72 | #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTR 0xd0 | 72 | #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTR 0xd0 |
73 | /* SNB-EP QPI register */ | 73 | /* SNB-EP QPI register */ |
74 | #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH0 0x228 | 74 | #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH0 0x228 |
75 | #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH1 0x22c | 75 | #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH1 0x22c |
76 | #define SNBEP_Q_Py_PCI_PMON_PKT_MASK0 0x238 | 76 | #define SNBEP_Q_Py_PCI_PMON_PKT_MASK0 0x238 |
77 | #define SNBEP_Q_Py_PCI_PMON_PKT_MASK1 0x23c | 77 | #define SNBEP_Q_Py_PCI_PMON_PKT_MASK1 0x23c |
78 | 78 | ||
79 | /* SNB-EP Ubox register */ | 79 | /* SNB-EP Ubox register */ |
80 | #define SNBEP_U_MSR_PMON_CTR0 0xc16 | 80 | #define SNBEP_U_MSR_PMON_CTR0 0xc16 |
81 | #define SNBEP_U_MSR_PMON_CTL0 0xc10 | 81 | #define SNBEP_U_MSR_PMON_CTL0 0xc10 |
82 | 82 | ||
83 | #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTL 0xc08 | 83 | #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTL 0xc08 |
84 | #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTR 0xc09 | 84 | #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTR 0xc09 |
85 | 85 | ||
86 | /* SNB-EP Cbo register */ | 86 | /* SNB-EP Cbo register */ |
87 | #define SNBEP_C0_MSR_PMON_CTR0 0xd16 | 87 | #define SNBEP_C0_MSR_PMON_CTR0 0xd16 |
88 | #define SNBEP_C0_MSR_PMON_CTL0 0xd10 | 88 | #define SNBEP_C0_MSR_PMON_CTL0 0xd10 |
89 | #define SNBEP_C0_MSR_PMON_BOX_CTL 0xd04 | 89 | #define SNBEP_C0_MSR_PMON_BOX_CTL 0xd04 |
90 | #define SNBEP_C0_MSR_PMON_BOX_FILTER 0xd14 | 90 | #define SNBEP_C0_MSR_PMON_BOX_FILTER 0xd14 |
91 | #define SNBEP_CBO_MSR_OFFSET 0x20 | 91 | #define SNBEP_CBO_MSR_OFFSET 0x20 |
92 | 92 | ||
93 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_TID 0x1f | 93 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_TID 0x1f |
94 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_NID 0x3fc00 | 94 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_NID 0x3fc00 |
95 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE 0x7c0000 | 95 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE 0x7c0000 |
96 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC 0xff800000 | 96 | #define SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC 0xff800000 |
97 | 97 | ||
98 | #define SNBEP_CBO_EVENT_EXTRA_REG(e, m, i) { \ | 98 | #define SNBEP_CBO_EVENT_EXTRA_REG(e, m, i) { \ |
99 | .event = (e), \ | 99 | .event = (e), \ |
100 | .msr = SNBEP_C0_MSR_PMON_BOX_FILTER, \ | 100 | .msr = SNBEP_C0_MSR_PMON_BOX_FILTER, \ |
101 | .config_mask = (m), \ | 101 | .config_mask = (m), \ |
102 | .idx = (i) \ | 102 | .idx = (i) \ |
103 | } | 103 | } |
104 | 104 | ||
105 | /* SNB-EP PCU register */ | 105 | /* SNB-EP PCU register */ |
106 | #define SNBEP_PCU_MSR_PMON_CTR0 0xc36 | 106 | #define SNBEP_PCU_MSR_PMON_CTR0 0xc36 |
107 | #define SNBEP_PCU_MSR_PMON_CTL0 0xc30 | 107 | #define SNBEP_PCU_MSR_PMON_CTL0 0xc30 |
108 | #define SNBEP_PCU_MSR_PMON_BOX_CTL 0xc24 | 108 | #define SNBEP_PCU_MSR_PMON_BOX_CTL 0xc24 |
109 | #define SNBEP_PCU_MSR_PMON_BOX_FILTER 0xc34 | 109 | #define SNBEP_PCU_MSR_PMON_BOX_FILTER 0xc34 |
110 | #define SNBEP_PCU_MSR_PMON_BOX_FILTER_MASK 0xffffffff | 110 | #define SNBEP_PCU_MSR_PMON_BOX_FILTER_MASK 0xffffffff |
111 | #define SNBEP_PCU_MSR_CORE_C3_CTR 0x3fc | 111 | #define SNBEP_PCU_MSR_CORE_C3_CTR 0x3fc |
112 | #define SNBEP_PCU_MSR_CORE_C6_CTR 0x3fd | 112 | #define SNBEP_PCU_MSR_CORE_C6_CTR 0x3fd |
113 | 113 | ||
114 | /* IVBEP event control */ | 114 | /* IVBEP event control */ |
115 | #define IVBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \ | 115 | #define IVBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \ |
116 | SNBEP_PMON_BOX_CTL_RST_CTRS) | 116 | SNBEP_PMON_BOX_CTL_RST_CTRS) |
117 | #define IVBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \ | 117 | #define IVBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \ |
118 | SNBEP_PMON_CTL_UMASK_MASK | \ | 118 | SNBEP_PMON_CTL_UMASK_MASK | \ |
119 | SNBEP_PMON_CTL_EDGE_DET | \ | 119 | SNBEP_PMON_CTL_EDGE_DET | \ |
120 | SNBEP_PMON_CTL_TRESH_MASK) | 120 | SNBEP_PMON_CTL_TRESH_MASK) |
121 | /* IVBEP Ubox */ | 121 | /* IVBEP Ubox */ |
122 | #define IVBEP_U_MSR_PMON_GLOBAL_CTL 0xc00 | 122 | #define IVBEP_U_MSR_PMON_GLOBAL_CTL 0xc00 |
123 | #define IVBEP_U_PMON_GLOBAL_FRZ_ALL (1 << 31) | 123 | #define IVBEP_U_PMON_GLOBAL_FRZ_ALL (1 << 31) |
124 | #define IVBEP_U_PMON_GLOBAL_UNFRZ_ALL (1 << 29) | 124 | #define IVBEP_U_PMON_GLOBAL_UNFRZ_ALL (1 << 29) |
125 | 125 | ||
126 | #define IVBEP_U_MSR_PMON_RAW_EVENT_MASK \ | 126 | #define IVBEP_U_MSR_PMON_RAW_EVENT_MASK \ |
127 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ | 127 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ |
128 | SNBEP_PMON_CTL_UMASK_MASK | \ | 128 | SNBEP_PMON_CTL_UMASK_MASK | \ |
129 | SNBEP_PMON_CTL_EDGE_DET | \ | 129 | SNBEP_PMON_CTL_EDGE_DET | \ |
130 | SNBEP_U_MSR_PMON_CTL_TRESH_MASK) | 130 | SNBEP_U_MSR_PMON_CTL_TRESH_MASK) |
131 | /* IVBEP Cbo */ | 131 | /* IVBEP Cbo */ |
132 | #define IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK (IVBEP_PMON_RAW_EVENT_MASK | \ | 132 | #define IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK (IVBEP_PMON_RAW_EVENT_MASK | \ |
133 | SNBEP_CBO_PMON_CTL_TID_EN) | 133 | SNBEP_CBO_PMON_CTL_TID_EN) |
134 | 134 | ||
135 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_TID (0x1fULL << 0) | 135 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_TID (0x1fULL << 0) |
136 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 5) | 136 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 5) |
137 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x3fULL << 17) | 137 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x3fULL << 17) |
138 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32) | 138 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32) |
139 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52) | 139 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52) |
140 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61) | 140 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61) |
141 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62) | 141 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62) |
142 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63) | 142 | #define IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63) |
143 | 143 | ||
144 | /* IVBEP home agent */ | 144 | /* IVBEP home agent */ |
145 | #define IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST (1 << 16) | 145 | #define IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST (1 << 16) |
146 | #define IVBEP_HA_PCI_PMON_RAW_EVENT_MASK \ | 146 | #define IVBEP_HA_PCI_PMON_RAW_EVENT_MASK \ |
147 | (IVBEP_PMON_RAW_EVENT_MASK | \ | 147 | (IVBEP_PMON_RAW_EVENT_MASK | \ |
148 | IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST) | 148 | IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST) |
149 | /* IVBEP PCU */ | 149 | /* IVBEP PCU */ |
150 | #define IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK \ | 150 | #define IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK \ |
151 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ | 151 | (SNBEP_PMON_CTL_EV_SEL_MASK | \ |
152 | SNBEP_PMON_CTL_EV_SEL_EXT | \ | 152 | SNBEP_PMON_CTL_EV_SEL_EXT | \ |
153 | SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \ | 153 | SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \ |
154 | SNBEP_PMON_CTL_EDGE_DET | \ | 154 | SNBEP_PMON_CTL_EDGE_DET | \ |
155 | SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \ | 155 | SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \ |
156 | SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \ | 156 | SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \ |
157 | SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET) | 157 | SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET) |
158 | /* IVBEP QPI */ | 158 | /* IVBEP QPI */ |
159 | #define IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK \ | 159 | #define IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK \ |
160 | (IVBEP_PMON_RAW_EVENT_MASK | \ | 160 | (IVBEP_PMON_RAW_EVENT_MASK | \ |
161 | SNBEP_PMON_CTL_EV_SEL_EXT) | 161 | SNBEP_PMON_CTL_EV_SEL_EXT) |
162 | 162 | ||
163 | #define __BITS_VALUE(x, i, n) ((typeof(x))(((x) >> ((i) * (n))) & \ | 163 | #define __BITS_VALUE(x, i, n) ((typeof(x))(((x) >> ((i) * (n))) & \ |
164 | ((1ULL << (n)) - 1))) | 164 | ((1ULL << (n)) - 1))) |
165 | 165 | ||
166 | /* Haswell-EP Ubox */ | 166 | /* Haswell-EP Ubox */ |
167 | #define HSWEP_U_MSR_PMON_CTR0 0x705 | 167 | #define HSWEP_U_MSR_PMON_CTR0 0x705 |
168 | #define HSWEP_U_MSR_PMON_CTL0 0x709 | 168 | #define HSWEP_U_MSR_PMON_CTL0 0x709 |
169 | #define HSWEP_U_MSR_PMON_FILTER 0x707 | 169 | #define HSWEP_U_MSR_PMON_FILTER 0x707 |
170 | 170 | ||
171 | #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTL 0x703 | 171 | #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTL 0x703 |
172 | #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTR 0x704 | 172 | #define HSWEP_U_MSR_PMON_UCLK_FIXED_CTR 0x704 |
173 | 173 | ||
174 | #define HSWEP_U_MSR_PMON_BOX_FILTER_TID (0x1 << 0) | 174 | #define HSWEP_U_MSR_PMON_BOX_FILTER_TID (0x1 << 0) |
175 | #define HSWEP_U_MSR_PMON_BOX_FILTER_CID (0x1fULL << 1) | 175 | #define HSWEP_U_MSR_PMON_BOX_FILTER_CID (0x1fULL << 1) |
176 | #define HSWEP_U_MSR_PMON_BOX_FILTER_MASK \ | 176 | #define HSWEP_U_MSR_PMON_BOX_FILTER_MASK \ |
177 | (HSWEP_U_MSR_PMON_BOX_FILTER_TID | \ | 177 | (HSWEP_U_MSR_PMON_BOX_FILTER_TID | \ |
178 | HSWEP_U_MSR_PMON_BOX_FILTER_CID) | 178 | HSWEP_U_MSR_PMON_BOX_FILTER_CID) |
179 | 179 | ||
180 | /* Haswell-EP CBo */ | 180 | /* Haswell-EP CBo */ |
181 | #define HSWEP_C0_MSR_PMON_CTR0 0xe08 | 181 | #define HSWEP_C0_MSR_PMON_CTR0 0xe08 |
182 | #define HSWEP_C0_MSR_PMON_CTL0 0xe01 | 182 | #define HSWEP_C0_MSR_PMON_CTL0 0xe01 |
183 | #define HSWEP_C0_MSR_PMON_BOX_CTL 0xe00 | 183 | #define HSWEP_C0_MSR_PMON_BOX_CTL 0xe00 |
184 | #define HSWEP_C0_MSR_PMON_BOX_FILTER0 0xe05 | 184 | #define HSWEP_C0_MSR_PMON_BOX_FILTER0 0xe05 |
185 | #define HSWEP_CBO_MSR_OFFSET 0x10 | 185 | #define HSWEP_CBO_MSR_OFFSET 0x10 |
186 | 186 | ||
187 | 187 | ||
188 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_TID (0x3fULL << 0) | 188 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_TID (0x3fULL << 0) |
189 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 6) | 189 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 6) |
190 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x7fULL << 17) | 190 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x7fULL << 17) |
191 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32) | 191 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32) |
192 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52) | 192 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52) |
193 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61) | 193 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61) |
194 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62) | 194 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62) |
195 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63) | 195 | #define HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63) |
196 | 196 | ||
197 | 197 | ||
198 | /* Haswell-EP Sbox */ | 198 | /* Haswell-EP Sbox */ |
199 | #define HSWEP_S0_MSR_PMON_CTR0 0x726 | 199 | #define HSWEP_S0_MSR_PMON_CTR0 0x726 |
200 | #define HSWEP_S0_MSR_PMON_CTL0 0x721 | 200 | #define HSWEP_S0_MSR_PMON_CTL0 0x721 |
201 | #define HSWEP_S0_MSR_PMON_BOX_CTL 0x720 | 201 | #define HSWEP_S0_MSR_PMON_BOX_CTL 0x720 |
202 | #define HSWEP_SBOX_MSR_OFFSET 0xa | 202 | #define HSWEP_SBOX_MSR_OFFSET 0xa |
203 | #define HSWEP_S_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \ | 203 | #define HSWEP_S_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \ |
204 | SNBEP_CBO_PMON_CTL_TID_EN) | 204 | SNBEP_CBO_PMON_CTL_TID_EN) |
205 | 205 | ||
206 | /* Haswell-EP PCU */ | 206 | /* Haswell-EP PCU */ |
207 | #define HSWEP_PCU_MSR_PMON_CTR0 0x717 | 207 | #define HSWEP_PCU_MSR_PMON_CTR0 0x717 |
208 | #define HSWEP_PCU_MSR_PMON_CTL0 0x711 | 208 | #define HSWEP_PCU_MSR_PMON_CTL0 0x711 |
209 | #define HSWEP_PCU_MSR_PMON_BOX_CTL 0x710 | 209 | #define HSWEP_PCU_MSR_PMON_BOX_CTL 0x710 |
210 | #define HSWEP_PCU_MSR_PMON_BOX_FILTER 0x715 | 210 | #define HSWEP_PCU_MSR_PMON_BOX_FILTER 0x715 |
211 | 211 | ||
212 | 212 | ||
213 | DEFINE_UNCORE_FORMAT_ATTR(event, event, "config:0-7"); | 213 | DEFINE_UNCORE_FORMAT_ATTR(event, event, "config:0-7"); |
214 | DEFINE_UNCORE_FORMAT_ATTR(event_ext, event, "config:0-7,21"); | 214 | DEFINE_UNCORE_FORMAT_ATTR(event_ext, event, "config:0-7,21"); |
215 | DEFINE_UNCORE_FORMAT_ATTR(umask, umask, "config:8-15"); | 215 | DEFINE_UNCORE_FORMAT_ATTR(umask, umask, "config:8-15"); |
216 | DEFINE_UNCORE_FORMAT_ATTR(edge, edge, "config:18"); | 216 | DEFINE_UNCORE_FORMAT_ATTR(edge, edge, "config:18"); |
217 | DEFINE_UNCORE_FORMAT_ATTR(tid_en, tid_en, "config:19"); | 217 | DEFINE_UNCORE_FORMAT_ATTR(tid_en, tid_en, "config:19"); |
218 | DEFINE_UNCORE_FORMAT_ATTR(inv, inv, "config:23"); | 218 | DEFINE_UNCORE_FORMAT_ATTR(inv, inv, "config:23"); |
219 | DEFINE_UNCORE_FORMAT_ATTR(thresh8, thresh, "config:24-31"); | 219 | DEFINE_UNCORE_FORMAT_ATTR(thresh8, thresh, "config:24-31"); |
220 | DEFINE_UNCORE_FORMAT_ATTR(thresh5, thresh, "config:24-28"); | 220 | DEFINE_UNCORE_FORMAT_ATTR(thresh5, thresh, "config:24-28"); |
221 | DEFINE_UNCORE_FORMAT_ATTR(occ_sel, occ_sel, "config:14-15"); | 221 | DEFINE_UNCORE_FORMAT_ATTR(occ_sel, occ_sel, "config:14-15"); |
222 | DEFINE_UNCORE_FORMAT_ATTR(occ_invert, occ_invert, "config:30"); | 222 | DEFINE_UNCORE_FORMAT_ATTR(occ_invert, occ_invert, "config:30"); |
223 | DEFINE_UNCORE_FORMAT_ATTR(occ_edge, occ_edge, "config:14-51"); | 223 | DEFINE_UNCORE_FORMAT_ATTR(occ_edge, occ_edge, "config:14-51"); |
224 | DEFINE_UNCORE_FORMAT_ATTR(filter_tid, filter_tid, "config1:0-4"); | 224 | DEFINE_UNCORE_FORMAT_ATTR(filter_tid, filter_tid, "config1:0-4"); |
225 | DEFINE_UNCORE_FORMAT_ATTR(filter_tid2, filter_tid, "config1:0"); | 225 | DEFINE_UNCORE_FORMAT_ATTR(filter_tid2, filter_tid, "config1:0"); |
226 | DEFINE_UNCORE_FORMAT_ATTR(filter_tid3, filter_tid, "config1:0-5"); | 226 | DEFINE_UNCORE_FORMAT_ATTR(filter_tid3, filter_tid, "config1:0-5"); |
227 | DEFINE_UNCORE_FORMAT_ATTR(filter_cid, filter_cid, "config1:5"); | 227 | DEFINE_UNCORE_FORMAT_ATTR(filter_cid, filter_cid, "config1:5"); |
228 | DEFINE_UNCORE_FORMAT_ATTR(filter_link, filter_link, "config1:5-8"); | 228 | DEFINE_UNCORE_FORMAT_ATTR(filter_link, filter_link, "config1:5-8"); |
229 | DEFINE_UNCORE_FORMAT_ATTR(filter_link2, filter_link, "config1:6-8"); | 229 | DEFINE_UNCORE_FORMAT_ATTR(filter_link2, filter_link, "config1:6-8"); |
230 | DEFINE_UNCORE_FORMAT_ATTR(filter_nid, filter_nid, "config1:10-17"); | 230 | DEFINE_UNCORE_FORMAT_ATTR(filter_nid, filter_nid, "config1:10-17"); |
231 | DEFINE_UNCORE_FORMAT_ATTR(filter_nid2, filter_nid, "config1:32-47"); | 231 | DEFINE_UNCORE_FORMAT_ATTR(filter_nid2, filter_nid, "config1:32-47"); |
232 | DEFINE_UNCORE_FORMAT_ATTR(filter_state, filter_state, "config1:18-22"); | 232 | DEFINE_UNCORE_FORMAT_ATTR(filter_state, filter_state, "config1:18-22"); |
233 | DEFINE_UNCORE_FORMAT_ATTR(filter_state2, filter_state, "config1:17-22"); | 233 | DEFINE_UNCORE_FORMAT_ATTR(filter_state2, filter_state, "config1:17-22"); |
234 | DEFINE_UNCORE_FORMAT_ATTR(filter_state3, filter_state, "config1:17-23"); | 234 | DEFINE_UNCORE_FORMAT_ATTR(filter_state3, filter_state, "config1:17-23"); |
235 | DEFINE_UNCORE_FORMAT_ATTR(filter_opc, filter_opc, "config1:23-31"); | 235 | DEFINE_UNCORE_FORMAT_ATTR(filter_opc, filter_opc, "config1:23-31"); |
236 | DEFINE_UNCORE_FORMAT_ATTR(filter_opc2, filter_opc, "config1:52-60"); | 236 | DEFINE_UNCORE_FORMAT_ATTR(filter_opc2, filter_opc, "config1:52-60"); |
237 | DEFINE_UNCORE_FORMAT_ATTR(filter_nc, filter_nc, "config1:62"); | 237 | DEFINE_UNCORE_FORMAT_ATTR(filter_nc, filter_nc, "config1:62"); |
238 | DEFINE_UNCORE_FORMAT_ATTR(filter_c6, filter_c6, "config1:61"); | 238 | DEFINE_UNCORE_FORMAT_ATTR(filter_c6, filter_c6, "config1:61"); |
239 | DEFINE_UNCORE_FORMAT_ATTR(filter_isoc, filter_isoc, "config1:63"); | 239 | DEFINE_UNCORE_FORMAT_ATTR(filter_isoc, filter_isoc, "config1:63"); |
240 | DEFINE_UNCORE_FORMAT_ATTR(filter_band0, filter_band0, "config1:0-7"); | 240 | DEFINE_UNCORE_FORMAT_ATTR(filter_band0, filter_band0, "config1:0-7"); |
241 | DEFINE_UNCORE_FORMAT_ATTR(filter_band1, filter_band1, "config1:8-15"); | 241 | DEFINE_UNCORE_FORMAT_ATTR(filter_band1, filter_band1, "config1:8-15"); |
242 | DEFINE_UNCORE_FORMAT_ATTR(filter_band2, filter_band2, "config1:16-23"); | 242 | DEFINE_UNCORE_FORMAT_ATTR(filter_band2, filter_band2, "config1:16-23"); |
243 | DEFINE_UNCORE_FORMAT_ATTR(filter_band3, filter_band3, "config1:24-31"); | 243 | DEFINE_UNCORE_FORMAT_ATTR(filter_band3, filter_band3, "config1:24-31"); |
244 | DEFINE_UNCORE_FORMAT_ATTR(match_rds, match_rds, "config1:48-51"); | 244 | DEFINE_UNCORE_FORMAT_ATTR(match_rds, match_rds, "config1:48-51"); |
245 | DEFINE_UNCORE_FORMAT_ATTR(match_rnid30, match_rnid30, "config1:32-35"); | 245 | DEFINE_UNCORE_FORMAT_ATTR(match_rnid30, match_rnid30, "config1:32-35"); |
246 | DEFINE_UNCORE_FORMAT_ATTR(match_rnid4, match_rnid4, "config1:31"); | 246 | DEFINE_UNCORE_FORMAT_ATTR(match_rnid4, match_rnid4, "config1:31"); |
247 | DEFINE_UNCORE_FORMAT_ATTR(match_dnid, match_dnid, "config1:13-17"); | 247 | DEFINE_UNCORE_FORMAT_ATTR(match_dnid, match_dnid, "config1:13-17"); |
248 | DEFINE_UNCORE_FORMAT_ATTR(match_mc, match_mc, "config1:9-12"); | 248 | DEFINE_UNCORE_FORMAT_ATTR(match_mc, match_mc, "config1:9-12"); |
249 | DEFINE_UNCORE_FORMAT_ATTR(match_opc, match_opc, "config1:5-8"); | 249 | DEFINE_UNCORE_FORMAT_ATTR(match_opc, match_opc, "config1:5-8"); |
250 | DEFINE_UNCORE_FORMAT_ATTR(match_vnw, match_vnw, "config1:3-4"); | 250 | DEFINE_UNCORE_FORMAT_ATTR(match_vnw, match_vnw, "config1:3-4"); |
251 | DEFINE_UNCORE_FORMAT_ATTR(match0, match0, "config1:0-31"); | 251 | DEFINE_UNCORE_FORMAT_ATTR(match0, match0, "config1:0-31"); |
252 | DEFINE_UNCORE_FORMAT_ATTR(match1, match1, "config1:32-63"); | 252 | DEFINE_UNCORE_FORMAT_ATTR(match1, match1, "config1:32-63"); |
253 | DEFINE_UNCORE_FORMAT_ATTR(mask_rds, mask_rds, "config2:48-51"); | 253 | DEFINE_UNCORE_FORMAT_ATTR(mask_rds, mask_rds, "config2:48-51"); |
254 | DEFINE_UNCORE_FORMAT_ATTR(mask_rnid30, mask_rnid30, "config2:32-35"); | 254 | DEFINE_UNCORE_FORMAT_ATTR(mask_rnid30, mask_rnid30, "config2:32-35"); |
255 | DEFINE_UNCORE_FORMAT_ATTR(mask_rnid4, mask_rnid4, "config2:31"); | 255 | DEFINE_UNCORE_FORMAT_ATTR(mask_rnid4, mask_rnid4, "config2:31"); |
256 | DEFINE_UNCORE_FORMAT_ATTR(mask_dnid, mask_dnid, "config2:13-17"); | 256 | DEFINE_UNCORE_FORMAT_ATTR(mask_dnid, mask_dnid, "config2:13-17"); |
257 | DEFINE_UNCORE_FORMAT_ATTR(mask_mc, mask_mc, "config2:9-12"); | 257 | DEFINE_UNCORE_FORMAT_ATTR(mask_mc, mask_mc, "config2:9-12"); |
258 | DEFINE_UNCORE_FORMAT_ATTR(mask_opc, mask_opc, "config2:5-8"); | 258 | DEFINE_UNCORE_FORMAT_ATTR(mask_opc, mask_opc, "config2:5-8"); |
259 | DEFINE_UNCORE_FORMAT_ATTR(mask_vnw, mask_vnw, "config2:3-4"); | 259 | DEFINE_UNCORE_FORMAT_ATTR(mask_vnw, mask_vnw, "config2:3-4"); |
260 | DEFINE_UNCORE_FORMAT_ATTR(mask0, mask0, "config2:0-31"); | 260 | DEFINE_UNCORE_FORMAT_ATTR(mask0, mask0, "config2:0-31"); |
261 | DEFINE_UNCORE_FORMAT_ATTR(mask1, mask1, "config2:32-63"); | 261 | DEFINE_UNCORE_FORMAT_ATTR(mask1, mask1, "config2:32-63"); |
262 | 262 | ||
263 | static void snbep_uncore_pci_disable_box(struct intel_uncore_box *box) | 263 | static void snbep_uncore_pci_disable_box(struct intel_uncore_box *box) |
264 | { | 264 | { |
265 | struct pci_dev *pdev = box->pci_dev; | 265 | struct pci_dev *pdev = box->pci_dev; |
266 | int box_ctl = uncore_pci_box_ctl(box); | 266 | int box_ctl = uncore_pci_box_ctl(box); |
267 | u32 config = 0; | 267 | u32 config = 0; |
268 | 268 | ||
269 | if (!pci_read_config_dword(pdev, box_ctl, &config)) { | 269 | if (!pci_read_config_dword(pdev, box_ctl, &config)) { |
270 | config |= SNBEP_PMON_BOX_CTL_FRZ; | 270 | config |= SNBEP_PMON_BOX_CTL_FRZ; |
271 | pci_write_config_dword(pdev, box_ctl, config); | 271 | pci_write_config_dword(pdev, box_ctl, config); |
272 | } | 272 | } |
273 | } | 273 | } |
274 | 274 | ||
275 | static void snbep_uncore_pci_enable_box(struct intel_uncore_box *box) | 275 | static void snbep_uncore_pci_enable_box(struct intel_uncore_box *box) |
276 | { | 276 | { |
277 | struct pci_dev *pdev = box->pci_dev; | 277 | struct pci_dev *pdev = box->pci_dev; |
278 | int box_ctl = uncore_pci_box_ctl(box); | 278 | int box_ctl = uncore_pci_box_ctl(box); |
279 | u32 config = 0; | 279 | u32 config = 0; |
280 | 280 | ||
281 | if (!pci_read_config_dword(pdev, box_ctl, &config)) { | 281 | if (!pci_read_config_dword(pdev, box_ctl, &config)) { |
282 | config &= ~SNBEP_PMON_BOX_CTL_FRZ; | 282 | config &= ~SNBEP_PMON_BOX_CTL_FRZ; |
283 | pci_write_config_dword(pdev, box_ctl, config); | 283 | pci_write_config_dword(pdev, box_ctl, config); |
284 | } | 284 | } |
285 | } | 285 | } |
286 | 286 | ||
287 | static void snbep_uncore_pci_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 287 | static void snbep_uncore_pci_enable_event(struct intel_uncore_box *box, struct perf_event *event) |
288 | { | 288 | { |
289 | struct pci_dev *pdev = box->pci_dev; | 289 | struct pci_dev *pdev = box->pci_dev; |
290 | struct hw_perf_event *hwc = &event->hw; | 290 | struct hw_perf_event *hwc = &event->hw; |
291 | 291 | ||
292 | pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | 292 | pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); |
293 | } | 293 | } |
294 | 294 | ||
295 | static void snbep_uncore_pci_disable_event(struct intel_uncore_box *box, struct perf_event *event) | 295 | static void snbep_uncore_pci_disable_event(struct intel_uncore_box *box, struct perf_event *event) |
296 | { | 296 | { |
297 | struct pci_dev *pdev = box->pci_dev; | 297 | struct pci_dev *pdev = box->pci_dev; |
298 | struct hw_perf_event *hwc = &event->hw; | 298 | struct hw_perf_event *hwc = &event->hw; |
299 | 299 | ||
300 | pci_write_config_dword(pdev, hwc->config_base, hwc->config); | 300 | pci_write_config_dword(pdev, hwc->config_base, hwc->config); |
301 | } | 301 | } |
302 | 302 | ||
303 | static u64 snbep_uncore_pci_read_counter(struct intel_uncore_box *box, struct perf_event *event) | 303 | static u64 snbep_uncore_pci_read_counter(struct intel_uncore_box *box, struct perf_event *event) |
304 | { | 304 | { |
305 | struct pci_dev *pdev = box->pci_dev; | 305 | struct pci_dev *pdev = box->pci_dev; |
306 | struct hw_perf_event *hwc = &event->hw; | 306 | struct hw_perf_event *hwc = &event->hw; |
307 | u64 count = 0; | 307 | u64 count = 0; |
308 | 308 | ||
309 | pci_read_config_dword(pdev, hwc->event_base, (u32 *)&count); | 309 | pci_read_config_dword(pdev, hwc->event_base, (u32 *)&count); |
310 | pci_read_config_dword(pdev, hwc->event_base + 4, (u32 *)&count + 1); | 310 | pci_read_config_dword(pdev, hwc->event_base + 4, (u32 *)&count + 1); |
311 | 311 | ||
312 | return count; | 312 | return count; |
313 | } | 313 | } |
314 | 314 | ||
315 | static void snbep_uncore_pci_init_box(struct intel_uncore_box *box) | 315 | static void snbep_uncore_pci_init_box(struct intel_uncore_box *box) |
316 | { | 316 | { |
317 | struct pci_dev *pdev = box->pci_dev; | 317 | struct pci_dev *pdev = box->pci_dev; |
318 | 318 | ||
319 | pci_write_config_dword(pdev, SNBEP_PCI_PMON_BOX_CTL, SNBEP_PMON_BOX_CTL_INT); | 319 | pci_write_config_dword(pdev, SNBEP_PCI_PMON_BOX_CTL, SNBEP_PMON_BOX_CTL_INT); |
320 | } | 320 | } |
321 | 321 | ||
322 | static void snbep_uncore_msr_disable_box(struct intel_uncore_box *box) | 322 | static void snbep_uncore_msr_disable_box(struct intel_uncore_box *box) |
323 | { | 323 | { |
324 | u64 config; | 324 | u64 config; |
325 | unsigned msr; | 325 | unsigned msr; |
326 | 326 | ||
327 | msr = uncore_msr_box_ctl(box); | 327 | msr = uncore_msr_box_ctl(box); |
328 | if (msr) { | 328 | if (msr) { |
329 | rdmsrl(msr, config); | 329 | rdmsrl(msr, config); |
330 | config |= SNBEP_PMON_BOX_CTL_FRZ; | 330 | config |= SNBEP_PMON_BOX_CTL_FRZ; |
331 | wrmsrl(msr, config); | 331 | wrmsrl(msr, config); |
332 | } | 332 | } |
333 | } | 333 | } |
334 | 334 | ||
335 | static void snbep_uncore_msr_enable_box(struct intel_uncore_box *box) | 335 | static void snbep_uncore_msr_enable_box(struct intel_uncore_box *box) |
336 | { | 336 | { |
337 | u64 config; | 337 | u64 config; |
338 | unsigned msr; | 338 | unsigned msr; |
339 | 339 | ||
340 | msr = uncore_msr_box_ctl(box); | 340 | msr = uncore_msr_box_ctl(box); |
341 | if (msr) { | 341 | if (msr) { |
342 | rdmsrl(msr, config); | 342 | rdmsrl(msr, config); |
343 | config &= ~SNBEP_PMON_BOX_CTL_FRZ; | 343 | config &= ~SNBEP_PMON_BOX_CTL_FRZ; |
344 | wrmsrl(msr, config); | 344 | wrmsrl(msr, config); |
345 | } | 345 | } |
346 | } | 346 | } |
347 | 347 | ||
348 | static void snbep_uncore_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 348 | static void snbep_uncore_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event) |
349 | { | 349 | { |
350 | struct hw_perf_event *hwc = &event->hw; | 350 | struct hw_perf_event *hwc = &event->hw; |
351 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 351 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; |
352 | 352 | ||
353 | if (reg1->idx != EXTRA_REG_NONE) | 353 | if (reg1->idx != EXTRA_REG_NONE) |
354 | wrmsrl(reg1->reg, uncore_shared_reg_config(box, 0)); | 354 | wrmsrl(reg1->reg, uncore_shared_reg_config(box, 0)); |
355 | 355 | ||
356 | wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | 356 | wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); |
357 | } | 357 | } |
358 | 358 | ||
359 | static void snbep_uncore_msr_disable_event(struct intel_uncore_box *box, | 359 | static void snbep_uncore_msr_disable_event(struct intel_uncore_box *box, |
360 | struct perf_event *event) | 360 | struct perf_event *event) |
361 | { | 361 | { |
362 | struct hw_perf_event *hwc = &event->hw; | 362 | struct hw_perf_event *hwc = &event->hw; |
363 | 363 | ||
364 | wrmsrl(hwc->config_base, hwc->config); | 364 | wrmsrl(hwc->config_base, hwc->config); |
365 | } | 365 | } |
366 | 366 | ||
367 | static void snbep_uncore_msr_init_box(struct intel_uncore_box *box) | 367 | static void snbep_uncore_msr_init_box(struct intel_uncore_box *box) |
368 | { | 368 | { |
369 | unsigned msr = uncore_msr_box_ctl(box); | 369 | unsigned msr = uncore_msr_box_ctl(box); |
370 | 370 | ||
371 | if (msr) | 371 | if (msr) |
372 | wrmsrl(msr, SNBEP_PMON_BOX_CTL_INT); | 372 | wrmsrl(msr, SNBEP_PMON_BOX_CTL_INT); |
373 | } | 373 | } |
374 | 374 | ||
375 | static struct attribute *snbep_uncore_formats_attr[] = { | 375 | static struct attribute *snbep_uncore_formats_attr[] = { |
376 | &format_attr_event.attr, | 376 | &format_attr_event.attr, |
377 | &format_attr_umask.attr, | 377 | &format_attr_umask.attr, |
378 | &format_attr_edge.attr, | 378 | &format_attr_edge.attr, |
379 | &format_attr_inv.attr, | 379 | &format_attr_inv.attr, |
380 | &format_attr_thresh8.attr, | 380 | &format_attr_thresh8.attr, |
381 | NULL, | 381 | NULL, |
382 | }; | 382 | }; |
383 | 383 | ||
384 | static struct attribute *snbep_uncore_ubox_formats_attr[] = { | 384 | static struct attribute *snbep_uncore_ubox_formats_attr[] = { |
385 | &format_attr_event.attr, | 385 | &format_attr_event.attr, |
386 | &format_attr_umask.attr, | 386 | &format_attr_umask.attr, |
387 | &format_attr_edge.attr, | 387 | &format_attr_edge.attr, |
388 | &format_attr_inv.attr, | 388 | &format_attr_inv.attr, |
389 | &format_attr_thresh5.attr, | 389 | &format_attr_thresh5.attr, |
390 | NULL, | 390 | NULL, |
391 | }; | 391 | }; |
392 | 392 | ||
393 | static struct attribute *snbep_uncore_cbox_formats_attr[] = { | 393 | static struct attribute *snbep_uncore_cbox_formats_attr[] = { |
394 | &format_attr_event.attr, | 394 | &format_attr_event.attr, |
395 | &format_attr_umask.attr, | 395 | &format_attr_umask.attr, |
396 | &format_attr_edge.attr, | 396 | &format_attr_edge.attr, |
397 | &format_attr_tid_en.attr, | 397 | &format_attr_tid_en.attr, |
398 | &format_attr_inv.attr, | 398 | &format_attr_inv.attr, |
399 | &format_attr_thresh8.attr, | 399 | &format_attr_thresh8.attr, |
400 | &format_attr_filter_tid.attr, | 400 | &format_attr_filter_tid.attr, |
401 | &format_attr_filter_nid.attr, | 401 | &format_attr_filter_nid.attr, |
402 | &format_attr_filter_state.attr, | 402 | &format_attr_filter_state.attr, |
403 | &format_attr_filter_opc.attr, | 403 | &format_attr_filter_opc.attr, |
404 | NULL, | 404 | NULL, |
405 | }; | 405 | }; |
406 | 406 | ||
407 | static struct attribute *snbep_uncore_pcu_formats_attr[] = { | 407 | static struct attribute *snbep_uncore_pcu_formats_attr[] = { |
408 | &format_attr_event_ext.attr, | 408 | &format_attr_event_ext.attr, |
409 | &format_attr_occ_sel.attr, | 409 | &format_attr_occ_sel.attr, |
410 | &format_attr_edge.attr, | 410 | &format_attr_edge.attr, |
411 | &format_attr_inv.attr, | 411 | &format_attr_inv.attr, |
412 | &format_attr_thresh5.attr, | 412 | &format_attr_thresh5.attr, |
413 | &format_attr_occ_invert.attr, | 413 | &format_attr_occ_invert.attr, |
414 | &format_attr_occ_edge.attr, | 414 | &format_attr_occ_edge.attr, |
415 | &format_attr_filter_band0.attr, | 415 | &format_attr_filter_band0.attr, |
416 | &format_attr_filter_band1.attr, | 416 | &format_attr_filter_band1.attr, |
417 | &format_attr_filter_band2.attr, | 417 | &format_attr_filter_band2.attr, |
418 | &format_attr_filter_band3.attr, | 418 | &format_attr_filter_band3.attr, |
419 | NULL, | 419 | NULL, |
420 | }; | 420 | }; |
421 | 421 | ||
422 | static struct attribute *snbep_uncore_qpi_formats_attr[] = { | 422 | static struct attribute *snbep_uncore_qpi_formats_attr[] = { |
423 | &format_attr_event_ext.attr, | 423 | &format_attr_event_ext.attr, |
424 | &format_attr_umask.attr, | 424 | &format_attr_umask.attr, |
425 | &format_attr_edge.attr, | 425 | &format_attr_edge.attr, |
426 | &format_attr_inv.attr, | 426 | &format_attr_inv.attr, |
427 | &format_attr_thresh8.attr, | 427 | &format_attr_thresh8.attr, |
428 | &format_attr_match_rds.attr, | 428 | &format_attr_match_rds.attr, |
429 | &format_attr_match_rnid30.attr, | 429 | &format_attr_match_rnid30.attr, |
430 | &format_attr_match_rnid4.attr, | 430 | &format_attr_match_rnid4.attr, |
431 | &format_attr_match_dnid.attr, | 431 | &format_attr_match_dnid.attr, |
432 | &format_attr_match_mc.attr, | 432 | &format_attr_match_mc.attr, |
433 | &format_attr_match_opc.attr, | 433 | &format_attr_match_opc.attr, |
434 | &format_attr_match_vnw.attr, | 434 | &format_attr_match_vnw.attr, |
435 | &format_attr_match0.attr, | 435 | &format_attr_match0.attr, |
436 | &format_attr_match1.attr, | 436 | &format_attr_match1.attr, |
437 | &format_attr_mask_rds.attr, | 437 | &format_attr_mask_rds.attr, |
438 | &format_attr_mask_rnid30.attr, | 438 | &format_attr_mask_rnid30.attr, |
439 | &format_attr_mask_rnid4.attr, | 439 | &format_attr_mask_rnid4.attr, |
440 | &format_attr_mask_dnid.attr, | 440 | &format_attr_mask_dnid.attr, |
441 | &format_attr_mask_mc.attr, | 441 | &format_attr_mask_mc.attr, |
442 | &format_attr_mask_opc.attr, | 442 | &format_attr_mask_opc.attr, |
443 | &format_attr_mask_vnw.attr, | 443 | &format_attr_mask_vnw.attr, |
444 | &format_attr_mask0.attr, | 444 | &format_attr_mask0.attr, |
445 | &format_attr_mask1.attr, | 445 | &format_attr_mask1.attr, |
446 | NULL, | 446 | NULL, |
447 | }; | 447 | }; |
448 | 448 | ||
449 | static struct uncore_event_desc snbep_uncore_imc_events[] = { | 449 | static struct uncore_event_desc snbep_uncore_imc_events[] = { |
450 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0xff,umask=0x00"), | 450 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0xff,umask=0x00"), |
451 | INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"), | 451 | INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"), |
452 | INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"), | 452 | INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"), |
453 | INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"), | 453 | INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"), |
454 | INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"), | 454 | INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"), |
455 | INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"), | 455 | INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"), |
456 | INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"), | 456 | INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"), |
457 | { /* end: all zeroes */ }, | 457 | { /* end: all zeroes */ }, |
458 | }; | 458 | }; |
459 | 459 | ||
460 | static struct uncore_event_desc snbep_uncore_qpi_events[] = { | 460 | static struct uncore_event_desc snbep_uncore_qpi_events[] = { |
461 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x14"), | 461 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x14"), |
462 | INTEL_UNCORE_EVENT_DESC(txl_flits_active, "event=0x00,umask=0x06"), | 462 | INTEL_UNCORE_EVENT_DESC(txl_flits_active, "event=0x00,umask=0x06"), |
463 | INTEL_UNCORE_EVENT_DESC(drs_data, "event=0x102,umask=0x08"), | 463 | INTEL_UNCORE_EVENT_DESC(drs_data, "event=0x102,umask=0x08"), |
464 | INTEL_UNCORE_EVENT_DESC(ncb_data, "event=0x103,umask=0x04"), | 464 | INTEL_UNCORE_EVENT_DESC(ncb_data, "event=0x103,umask=0x04"), |
465 | { /* end: all zeroes */ }, | 465 | { /* end: all zeroes */ }, |
466 | }; | 466 | }; |
467 | 467 | ||
468 | static struct attribute_group snbep_uncore_format_group = { | 468 | static struct attribute_group snbep_uncore_format_group = { |
469 | .name = "format", | 469 | .name = "format", |
470 | .attrs = snbep_uncore_formats_attr, | 470 | .attrs = snbep_uncore_formats_attr, |
471 | }; | 471 | }; |
472 | 472 | ||
473 | static struct attribute_group snbep_uncore_ubox_format_group = { | 473 | static struct attribute_group snbep_uncore_ubox_format_group = { |
474 | .name = "format", | 474 | .name = "format", |
475 | .attrs = snbep_uncore_ubox_formats_attr, | 475 | .attrs = snbep_uncore_ubox_formats_attr, |
476 | }; | 476 | }; |
477 | 477 | ||
478 | static struct attribute_group snbep_uncore_cbox_format_group = { | 478 | static struct attribute_group snbep_uncore_cbox_format_group = { |
479 | .name = "format", | 479 | .name = "format", |
480 | .attrs = snbep_uncore_cbox_formats_attr, | 480 | .attrs = snbep_uncore_cbox_formats_attr, |
481 | }; | 481 | }; |
482 | 482 | ||
483 | static struct attribute_group snbep_uncore_pcu_format_group = { | 483 | static struct attribute_group snbep_uncore_pcu_format_group = { |
484 | .name = "format", | 484 | .name = "format", |
485 | .attrs = snbep_uncore_pcu_formats_attr, | 485 | .attrs = snbep_uncore_pcu_formats_attr, |
486 | }; | 486 | }; |
487 | 487 | ||
488 | static struct attribute_group snbep_uncore_qpi_format_group = { | 488 | static struct attribute_group snbep_uncore_qpi_format_group = { |
489 | .name = "format", | 489 | .name = "format", |
490 | .attrs = snbep_uncore_qpi_formats_attr, | 490 | .attrs = snbep_uncore_qpi_formats_attr, |
491 | }; | 491 | }; |
492 | 492 | ||
493 | #define __SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \ | 493 | #define __SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \ |
494 | .disable_box = snbep_uncore_msr_disable_box, \ | 494 | .disable_box = snbep_uncore_msr_disable_box, \ |
495 | .enable_box = snbep_uncore_msr_enable_box, \ | 495 | .enable_box = snbep_uncore_msr_enable_box, \ |
496 | .disable_event = snbep_uncore_msr_disable_event, \ | 496 | .disable_event = snbep_uncore_msr_disable_event, \ |
497 | .enable_event = snbep_uncore_msr_enable_event, \ | 497 | .enable_event = snbep_uncore_msr_enable_event, \ |
498 | .read_counter = uncore_msr_read_counter | 498 | .read_counter = uncore_msr_read_counter |
499 | 499 | ||
500 | #define SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \ | 500 | #define SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \ |
501 | __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), \ | 501 | __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), \ |
502 | .init_box = snbep_uncore_msr_init_box \ | 502 | .init_box = snbep_uncore_msr_init_box \ |
503 | 503 | ||
504 | static struct intel_uncore_ops snbep_uncore_msr_ops = { | 504 | static struct intel_uncore_ops snbep_uncore_msr_ops = { |
505 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | 505 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), |
506 | }; | 506 | }; |
507 | 507 | ||
508 | #define SNBEP_UNCORE_PCI_OPS_COMMON_INIT() \ | 508 | #define SNBEP_UNCORE_PCI_OPS_COMMON_INIT() \ |
509 | .init_box = snbep_uncore_pci_init_box, \ | 509 | .init_box = snbep_uncore_pci_init_box, \ |
510 | .disable_box = snbep_uncore_pci_disable_box, \ | 510 | .disable_box = snbep_uncore_pci_disable_box, \ |
511 | .enable_box = snbep_uncore_pci_enable_box, \ | 511 | .enable_box = snbep_uncore_pci_enable_box, \ |
512 | .disable_event = snbep_uncore_pci_disable_event, \ | 512 | .disable_event = snbep_uncore_pci_disable_event, \ |
513 | .read_counter = snbep_uncore_pci_read_counter | 513 | .read_counter = snbep_uncore_pci_read_counter |
514 | 514 | ||
515 | static struct intel_uncore_ops snbep_uncore_pci_ops = { | 515 | static struct intel_uncore_ops snbep_uncore_pci_ops = { |
516 | SNBEP_UNCORE_PCI_OPS_COMMON_INIT(), | 516 | SNBEP_UNCORE_PCI_OPS_COMMON_INIT(), |
517 | .enable_event = snbep_uncore_pci_enable_event, \ | 517 | .enable_event = snbep_uncore_pci_enable_event, \ |
518 | }; | 518 | }; |
519 | 519 | ||
520 | static struct event_constraint snbep_uncore_cbox_constraints[] = { | 520 | static struct event_constraint snbep_uncore_cbox_constraints[] = { |
521 | UNCORE_EVENT_CONSTRAINT(0x01, 0x1), | 521 | UNCORE_EVENT_CONSTRAINT(0x01, 0x1), |
522 | UNCORE_EVENT_CONSTRAINT(0x02, 0x3), | 522 | UNCORE_EVENT_CONSTRAINT(0x02, 0x3), |
523 | UNCORE_EVENT_CONSTRAINT(0x04, 0x3), | 523 | UNCORE_EVENT_CONSTRAINT(0x04, 0x3), |
524 | UNCORE_EVENT_CONSTRAINT(0x05, 0x3), | 524 | UNCORE_EVENT_CONSTRAINT(0x05, 0x3), |
525 | UNCORE_EVENT_CONSTRAINT(0x07, 0x3), | 525 | UNCORE_EVENT_CONSTRAINT(0x07, 0x3), |
526 | UNCORE_EVENT_CONSTRAINT(0x09, 0x3), | 526 | UNCORE_EVENT_CONSTRAINT(0x09, 0x3), |
527 | UNCORE_EVENT_CONSTRAINT(0x11, 0x1), | 527 | UNCORE_EVENT_CONSTRAINT(0x11, 0x1), |
528 | UNCORE_EVENT_CONSTRAINT(0x12, 0x3), | 528 | UNCORE_EVENT_CONSTRAINT(0x12, 0x3), |
529 | UNCORE_EVENT_CONSTRAINT(0x13, 0x3), | 529 | UNCORE_EVENT_CONSTRAINT(0x13, 0x3), |
530 | UNCORE_EVENT_CONSTRAINT(0x1b, 0xc), | 530 | UNCORE_EVENT_CONSTRAINT(0x1b, 0xc), |
531 | UNCORE_EVENT_CONSTRAINT(0x1c, 0xc), | 531 | UNCORE_EVENT_CONSTRAINT(0x1c, 0xc), |
532 | UNCORE_EVENT_CONSTRAINT(0x1d, 0xc), | 532 | UNCORE_EVENT_CONSTRAINT(0x1d, 0xc), |
533 | UNCORE_EVENT_CONSTRAINT(0x1e, 0xc), | 533 | UNCORE_EVENT_CONSTRAINT(0x1e, 0xc), |
534 | EVENT_CONSTRAINT_OVERLAP(0x1f, 0xe, 0xff), | 534 | EVENT_CONSTRAINT_OVERLAP(0x1f, 0xe, 0xff), |
535 | UNCORE_EVENT_CONSTRAINT(0x21, 0x3), | 535 | UNCORE_EVENT_CONSTRAINT(0x21, 0x3), |
536 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), | 536 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), |
537 | UNCORE_EVENT_CONSTRAINT(0x31, 0x3), | 537 | UNCORE_EVENT_CONSTRAINT(0x31, 0x3), |
538 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | 538 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), |
539 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | 539 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), |
540 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | 540 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), |
541 | UNCORE_EVENT_CONSTRAINT(0x35, 0x3), | 541 | UNCORE_EVENT_CONSTRAINT(0x35, 0x3), |
542 | UNCORE_EVENT_CONSTRAINT(0x36, 0x1), | 542 | UNCORE_EVENT_CONSTRAINT(0x36, 0x1), |
543 | UNCORE_EVENT_CONSTRAINT(0x37, 0x3), | 543 | UNCORE_EVENT_CONSTRAINT(0x37, 0x3), |
544 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), | 544 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), |
545 | UNCORE_EVENT_CONSTRAINT(0x39, 0x3), | 545 | UNCORE_EVENT_CONSTRAINT(0x39, 0x3), |
546 | UNCORE_EVENT_CONSTRAINT(0x3b, 0x1), | 546 | UNCORE_EVENT_CONSTRAINT(0x3b, 0x1), |
547 | EVENT_CONSTRAINT_END | 547 | EVENT_CONSTRAINT_END |
548 | }; | 548 | }; |
549 | 549 | ||
550 | static struct event_constraint snbep_uncore_r2pcie_constraints[] = { | 550 | static struct event_constraint snbep_uncore_r2pcie_constraints[] = { |
551 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), | 551 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), |
552 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), | 552 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), |
553 | UNCORE_EVENT_CONSTRAINT(0x12, 0x1), | 553 | UNCORE_EVENT_CONSTRAINT(0x12, 0x1), |
554 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), | 554 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), |
555 | UNCORE_EVENT_CONSTRAINT(0x24, 0x3), | 555 | UNCORE_EVENT_CONSTRAINT(0x24, 0x3), |
556 | UNCORE_EVENT_CONSTRAINT(0x25, 0x3), | 556 | UNCORE_EVENT_CONSTRAINT(0x25, 0x3), |
557 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), | 557 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), |
558 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | 558 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), |
559 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | 559 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), |
560 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | 560 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), |
561 | EVENT_CONSTRAINT_END | 561 | EVENT_CONSTRAINT_END |
562 | }; | 562 | }; |
563 | 563 | ||
564 | static struct event_constraint snbep_uncore_r3qpi_constraints[] = { | 564 | static struct event_constraint snbep_uncore_r3qpi_constraints[] = { |
565 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), | 565 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), |
566 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), | 566 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), |
567 | UNCORE_EVENT_CONSTRAINT(0x12, 0x3), | 567 | UNCORE_EVENT_CONSTRAINT(0x12, 0x3), |
568 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), | 568 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), |
569 | UNCORE_EVENT_CONSTRAINT(0x20, 0x3), | 569 | UNCORE_EVENT_CONSTRAINT(0x20, 0x3), |
570 | UNCORE_EVENT_CONSTRAINT(0x21, 0x3), | 570 | UNCORE_EVENT_CONSTRAINT(0x21, 0x3), |
571 | UNCORE_EVENT_CONSTRAINT(0x22, 0x3), | 571 | UNCORE_EVENT_CONSTRAINT(0x22, 0x3), |
572 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), | 572 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), |
573 | UNCORE_EVENT_CONSTRAINT(0x24, 0x3), | 573 | UNCORE_EVENT_CONSTRAINT(0x24, 0x3), |
574 | UNCORE_EVENT_CONSTRAINT(0x25, 0x3), | 574 | UNCORE_EVENT_CONSTRAINT(0x25, 0x3), |
575 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), | 575 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), |
576 | UNCORE_EVENT_CONSTRAINT(0x28, 0x3), | 576 | UNCORE_EVENT_CONSTRAINT(0x28, 0x3), |
577 | UNCORE_EVENT_CONSTRAINT(0x29, 0x3), | 577 | UNCORE_EVENT_CONSTRAINT(0x29, 0x3), |
578 | UNCORE_EVENT_CONSTRAINT(0x2a, 0x3), | 578 | UNCORE_EVENT_CONSTRAINT(0x2a, 0x3), |
579 | UNCORE_EVENT_CONSTRAINT(0x2b, 0x3), | 579 | UNCORE_EVENT_CONSTRAINT(0x2b, 0x3), |
580 | UNCORE_EVENT_CONSTRAINT(0x2c, 0x3), | 580 | UNCORE_EVENT_CONSTRAINT(0x2c, 0x3), |
581 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), | 581 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), |
582 | UNCORE_EVENT_CONSTRAINT(0x2e, 0x3), | 582 | UNCORE_EVENT_CONSTRAINT(0x2e, 0x3), |
583 | UNCORE_EVENT_CONSTRAINT(0x2f, 0x3), | 583 | UNCORE_EVENT_CONSTRAINT(0x2f, 0x3), |
584 | UNCORE_EVENT_CONSTRAINT(0x30, 0x3), | 584 | UNCORE_EVENT_CONSTRAINT(0x30, 0x3), |
585 | UNCORE_EVENT_CONSTRAINT(0x31, 0x3), | 585 | UNCORE_EVENT_CONSTRAINT(0x31, 0x3), |
586 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | 586 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), |
587 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | 587 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), |
588 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | 588 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), |
589 | UNCORE_EVENT_CONSTRAINT(0x36, 0x3), | 589 | UNCORE_EVENT_CONSTRAINT(0x36, 0x3), |
590 | UNCORE_EVENT_CONSTRAINT(0x37, 0x3), | 590 | UNCORE_EVENT_CONSTRAINT(0x37, 0x3), |
591 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), | 591 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), |
592 | UNCORE_EVENT_CONSTRAINT(0x39, 0x3), | 592 | UNCORE_EVENT_CONSTRAINT(0x39, 0x3), |
593 | EVENT_CONSTRAINT_END | 593 | EVENT_CONSTRAINT_END |
594 | }; | 594 | }; |
595 | 595 | ||
596 | static struct intel_uncore_type snbep_uncore_ubox = { | 596 | static struct intel_uncore_type snbep_uncore_ubox = { |
597 | .name = "ubox", | 597 | .name = "ubox", |
598 | .num_counters = 2, | 598 | .num_counters = 2, |
599 | .num_boxes = 1, | 599 | .num_boxes = 1, |
600 | .perf_ctr_bits = 44, | 600 | .perf_ctr_bits = 44, |
601 | .fixed_ctr_bits = 48, | 601 | .fixed_ctr_bits = 48, |
602 | .perf_ctr = SNBEP_U_MSR_PMON_CTR0, | 602 | .perf_ctr = SNBEP_U_MSR_PMON_CTR0, |
603 | .event_ctl = SNBEP_U_MSR_PMON_CTL0, | 603 | .event_ctl = SNBEP_U_MSR_PMON_CTL0, |
604 | .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK, | 604 | .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK, |
605 | .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR, | 605 | .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR, |
606 | .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL, | 606 | .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL, |
607 | .ops = &snbep_uncore_msr_ops, | 607 | .ops = &snbep_uncore_msr_ops, |
608 | .format_group = &snbep_uncore_ubox_format_group, | 608 | .format_group = &snbep_uncore_ubox_format_group, |
609 | }; | 609 | }; |
610 | 610 | ||
611 | static struct extra_reg snbep_uncore_cbox_extra_regs[] = { | 611 | static struct extra_reg snbep_uncore_cbox_extra_regs[] = { |
612 | SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN, | 612 | SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN, |
613 | SNBEP_CBO_PMON_CTL_TID_EN, 0x1), | 613 | SNBEP_CBO_PMON_CTL_TID_EN, 0x1), |
614 | SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4), | 614 | SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4), |
615 | SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0x6), | 615 | SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0x6), |
616 | SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4), | 616 | SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4), |
617 | SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0x6), | 617 | SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0x6), |
618 | SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4), | 618 | SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4), |
619 | SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0x6), | 619 | SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0x6), |
620 | SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x6), | 620 | SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x6), |
621 | SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x8), | 621 | SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x8), |
622 | SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x8), | 622 | SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x8), |
623 | SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0xa), | 623 | SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0xa), |
624 | SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0xa), | 624 | SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0xa), |
625 | SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x2), | 625 | SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x2), |
626 | SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x2), | 626 | SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x2), |
627 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x2), | 627 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x2), |
628 | SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x2), | 628 | SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x2), |
629 | SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x8), | 629 | SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x8), |
630 | SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x8), | 630 | SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x8), |
631 | SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0xa), | 631 | SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0xa), |
632 | SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0xa), | 632 | SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0xa), |
633 | SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x2), | 633 | SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x2), |
634 | SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x2), | 634 | SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x2), |
635 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x2), | 635 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x2), |
636 | SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x2), | 636 | SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x2), |
637 | EVENT_EXTRA_END | 637 | EVENT_EXTRA_END |
638 | }; | 638 | }; |
639 | 639 | ||
640 | static void snbep_cbox_put_constraint(struct intel_uncore_box *box, struct perf_event *event) | 640 | static void snbep_cbox_put_constraint(struct intel_uncore_box *box, struct perf_event *event) |
641 | { | 641 | { |
642 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 642 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; |
643 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; | 643 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; |
644 | int i; | 644 | int i; |
645 | 645 | ||
646 | if (uncore_box_is_fake(box)) | 646 | if (uncore_box_is_fake(box)) |
647 | return; | 647 | return; |
648 | 648 | ||
649 | for (i = 0; i < 5; i++) { | 649 | for (i = 0; i < 5; i++) { |
650 | if (reg1->alloc & (0x1 << i)) | 650 | if (reg1->alloc & (0x1 << i)) |
651 | atomic_sub(1 << (i * 6), &er->ref); | 651 | atomic_sub(1 << (i * 6), &er->ref); |
652 | } | 652 | } |
653 | reg1->alloc = 0; | 653 | reg1->alloc = 0; |
654 | } | 654 | } |
655 | 655 | ||
656 | static struct event_constraint * | 656 | static struct event_constraint * |
657 | __snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event, | 657 | __snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event, |
658 | u64 (*cbox_filter_mask)(int fields)) | 658 | u64 (*cbox_filter_mask)(int fields)) |
659 | { | 659 | { |
660 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 660 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; |
661 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; | 661 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; |
662 | int i, alloc = 0; | 662 | int i, alloc = 0; |
663 | unsigned long flags; | 663 | unsigned long flags; |
664 | u64 mask; | 664 | u64 mask; |
665 | 665 | ||
666 | if (reg1->idx == EXTRA_REG_NONE) | 666 | if (reg1->idx == EXTRA_REG_NONE) |
667 | return NULL; | 667 | return NULL; |
668 | 668 | ||
669 | raw_spin_lock_irqsave(&er->lock, flags); | 669 | raw_spin_lock_irqsave(&er->lock, flags); |
670 | for (i = 0; i < 5; i++) { | 670 | for (i = 0; i < 5; i++) { |
671 | if (!(reg1->idx & (0x1 << i))) | 671 | if (!(reg1->idx & (0x1 << i))) |
672 | continue; | 672 | continue; |
673 | if (!uncore_box_is_fake(box) && (reg1->alloc & (0x1 << i))) | 673 | if (!uncore_box_is_fake(box) && (reg1->alloc & (0x1 << i))) |
674 | continue; | 674 | continue; |
675 | 675 | ||
676 | mask = cbox_filter_mask(0x1 << i); | 676 | mask = cbox_filter_mask(0x1 << i); |
677 | if (!__BITS_VALUE(atomic_read(&er->ref), i, 6) || | 677 | if (!__BITS_VALUE(atomic_read(&er->ref), i, 6) || |
678 | !((reg1->config ^ er->config) & mask)) { | 678 | !((reg1->config ^ er->config) & mask)) { |
679 | atomic_add(1 << (i * 6), &er->ref); | 679 | atomic_add(1 << (i * 6), &er->ref); |
680 | er->config &= ~mask; | 680 | er->config &= ~mask; |
681 | er->config |= reg1->config & mask; | 681 | er->config |= reg1->config & mask; |
682 | alloc |= (0x1 << i); | 682 | alloc |= (0x1 << i); |
683 | } else { | 683 | } else { |
684 | break; | 684 | break; |
685 | } | 685 | } |
686 | } | 686 | } |
687 | raw_spin_unlock_irqrestore(&er->lock, flags); | 687 | raw_spin_unlock_irqrestore(&er->lock, flags); |
688 | if (i < 5) | 688 | if (i < 5) |
689 | goto fail; | 689 | goto fail; |
690 | 690 | ||
691 | if (!uncore_box_is_fake(box)) | 691 | if (!uncore_box_is_fake(box)) |
692 | reg1->alloc |= alloc; | 692 | reg1->alloc |= alloc; |
693 | 693 | ||
694 | return NULL; | 694 | return NULL; |
695 | fail: | 695 | fail: |
696 | for (; i >= 0; i--) { | 696 | for (; i >= 0; i--) { |
697 | if (alloc & (0x1 << i)) | 697 | if (alloc & (0x1 << i)) |
698 | atomic_sub(1 << (i * 6), &er->ref); | 698 | atomic_sub(1 << (i * 6), &er->ref); |
699 | } | 699 | } |
700 | return &uncore_constraint_empty; | 700 | return &uncore_constraint_empty; |
701 | } | 701 | } |
702 | 702 | ||
703 | static u64 snbep_cbox_filter_mask(int fields) | 703 | static u64 snbep_cbox_filter_mask(int fields) |
704 | { | 704 | { |
705 | u64 mask = 0; | 705 | u64 mask = 0; |
706 | 706 | ||
707 | if (fields & 0x1) | 707 | if (fields & 0x1) |
708 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_TID; | 708 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_TID; |
709 | if (fields & 0x2) | 709 | if (fields & 0x2) |
710 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_NID; | 710 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_NID; |
711 | if (fields & 0x4) | 711 | if (fields & 0x4) |
712 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE; | 712 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE; |
713 | if (fields & 0x8) | 713 | if (fields & 0x8) |
714 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC; | 714 | mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC; |
715 | 715 | ||
716 | return mask; | 716 | return mask; |
717 | } | 717 | } |
718 | 718 | ||
719 | static struct event_constraint * | 719 | static struct event_constraint * |
720 | snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) | 720 | snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) |
721 | { | 721 | { |
722 | return __snbep_cbox_get_constraint(box, event, snbep_cbox_filter_mask); | 722 | return __snbep_cbox_get_constraint(box, event, snbep_cbox_filter_mask); |
723 | } | 723 | } |
724 | 724 | ||
725 | static int snbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 725 | static int snbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) |
726 | { | 726 | { |
727 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 727 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; |
728 | struct extra_reg *er; | 728 | struct extra_reg *er; |
729 | int idx = 0; | 729 | int idx = 0; |
730 | 730 | ||
731 | for (er = snbep_uncore_cbox_extra_regs; er->msr; er++) { | 731 | for (er = snbep_uncore_cbox_extra_regs; er->msr; er++) { |
732 | if (er->event != (event->hw.config & er->config_mask)) | 732 | if (er->event != (event->hw.config & er->config_mask)) |
733 | continue; | 733 | continue; |
734 | idx |= er->idx; | 734 | idx |= er->idx; |
735 | } | 735 | } |
736 | 736 | ||
737 | if (idx) { | 737 | if (idx) { |
738 | reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER + | 738 | reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER + |
739 | SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx; | 739 | SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx; |
740 | reg1->config = event->attr.config1 & snbep_cbox_filter_mask(idx); | 740 | reg1->config = event->attr.config1 & snbep_cbox_filter_mask(idx); |
741 | reg1->idx = idx; | 741 | reg1->idx = idx; |
742 | } | 742 | } |
743 | return 0; | 743 | return 0; |
744 | } | 744 | } |
745 | 745 | ||
746 | static struct intel_uncore_ops snbep_uncore_cbox_ops = { | 746 | static struct intel_uncore_ops snbep_uncore_cbox_ops = { |
747 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | 747 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), |
748 | .hw_config = snbep_cbox_hw_config, | 748 | .hw_config = snbep_cbox_hw_config, |
749 | .get_constraint = snbep_cbox_get_constraint, | 749 | .get_constraint = snbep_cbox_get_constraint, |
750 | .put_constraint = snbep_cbox_put_constraint, | 750 | .put_constraint = snbep_cbox_put_constraint, |
751 | }; | 751 | }; |
752 | 752 | ||
753 | static struct intel_uncore_type snbep_uncore_cbox = { | 753 | static struct intel_uncore_type snbep_uncore_cbox = { |
754 | .name = "cbox", | 754 | .name = "cbox", |
755 | .num_counters = 4, | 755 | .num_counters = 4, |
756 | .num_boxes = 8, | 756 | .num_boxes = 8, |
757 | .perf_ctr_bits = 44, | 757 | .perf_ctr_bits = 44, |
758 | .event_ctl = SNBEP_C0_MSR_PMON_CTL0, | 758 | .event_ctl = SNBEP_C0_MSR_PMON_CTL0, |
759 | .perf_ctr = SNBEP_C0_MSR_PMON_CTR0, | 759 | .perf_ctr = SNBEP_C0_MSR_PMON_CTR0, |
760 | .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK, | 760 | .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK, |
761 | .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL, | 761 | .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL, |
762 | .msr_offset = SNBEP_CBO_MSR_OFFSET, | 762 | .msr_offset = SNBEP_CBO_MSR_OFFSET, |
763 | .num_shared_regs = 1, | 763 | .num_shared_regs = 1, |
764 | .constraints = snbep_uncore_cbox_constraints, | 764 | .constraints = snbep_uncore_cbox_constraints, |
765 | .ops = &snbep_uncore_cbox_ops, | 765 | .ops = &snbep_uncore_cbox_ops, |
766 | .format_group = &snbep_uncore_cbox_format_group, | 766 | .format_group = &snbep_uncore_cbox_format_group, |
767 | }; | 767 | }; |
768 | 768 | ||
769 | static u64 snbep_pcu_alter_er(struct perf_event *event, int new_idx, bool modify) | 769 | static u64 snbep_pcu_alter_er(struct perf_event *event, int new_idx, bool modify) |
770 | { | 770 | { |
771 | struct hw_perf_event *hwc = &event->hw; | 771 | struct hw_perf_event *hwc = &event->hw; |
772 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 772 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; |
773 | u64 config = reg1->config; | 773 | u64 config = reg1->config; |
774 | 774 | ||
775 | if (new_idx > reg1->idx) | 775 | if (new_idx > reg1->idx) |
776 | config <<= 8 * (new_idx - reg1->idx); | 776 | config <<= 8 * (new_idx - reg1->idx); |
777 | else | 777 | else |
778 | config >>= 8 * (reg1->idx - new_idx); | 778 | config >>= 8 * (reg1->idx - new_idx); |
779 | 779 | ||
780 | if (modify) { | 780 | if (modify) { |
781 | hwc->config += new_idx - reg1->idx; | 781 | hwc->config += new_idx - reg1->idx; |
782 | reg1->config = config; | 782 | reg1->config = config; |
783 | reg1->idx = new_idx; | 783 | reg1->idx = new_idx; |
784 | } | 784 | } |
785 | return config; | 785 | return config; |
786 | } | 786 | } |
787 | 787 | ||
788 | static struct event_constraint * | 788 | static struct event_constraint * |
789 | snbep_pcu_get_constraint(struct intel_uncore_box *box, struct perf_event *event) | 789 | snbep_pcu_get_constraint(struct intel_uncore_box *box, struct perf_event *event) |
790 | { | 790 | { |
791 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 791 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; |
792 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; | 792 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; |
793 | unsigned long flags; | 793 | unsigned long flags; |
794 | int idx = reg1->idx; | 794 | int idx = reg1->idx; |
795 | u64 mask, config1 = reg1->config; | 795 | u64 mask, config1 = reg1->config; |
796 | bool ok = false; | 796 | bool ok = false; |
797 | 797 | ||
798 | if (reg1->idx == EXTRA_REG_NONE || | 798 | if (reg1->idx == EXTRA_REG_NONE || |
799 | (!uncore_box_is_fake(box) && reg1->alloc)) | 799 | (!uncore_box_is_fake(box) && reg1->alloc)) |
800 | return NULL; | 800 | return NULL; |
801 | again: | 801 | again: |
802 | mask = 0xffULL << (idx * 8); | 802 | mask = 0xffULL << (idx * 8); |
803 | raw_spin_lock_irqsave(&er->lock, flags); | 803 | raw_spin_lock_irqsave(&er->lock, flags); |
804 | if (!__BITS_VALUE(atomic_read(&er->ref), idx, 8) || | 804 | if (!__BITS_VALUE(atomic_read(&er->ref), idx, 8) || |
805 | !((config1 ^ er->config) & mask)) { | 805 | !((config1 ^ er->config) & mask)) { |
806 | atomic_add(1 << (idx * 8), &er->ref); | 806 | atomic_add(1 << (idx * 8), &er->ref); |
807 | er->config &= ~mask; | 807 | er->config &= ~mask; |
808 | er->config |= config1 & mask; | 808 | er->config |= config1 & mask; |
809 | ok = true; | 809 | ok = true; |
810 | } | 810 | } |
811 | raw_spin_unlock_irqrestore(&er->lock, flags); | 811 | raw_spin_unlock_irqrestore(&er->lock, flags); |
812 | 812 | ||
813 | if (!ok) { | 813 | if (!ok) { |
814 | idx = (idx + 1) % 4; | 814 | idx = (idx + 1) % 4; |
815 | if (idx != reg1->idx) { | 815 | if (idx != reg1->idx) { |
816 | config1 = snbep_pcu_alter_er(event, idx, false); | 816 | config1 = snbep_pcu_alter_er(event, idx, false); |
817 | goto again; | 817 | goto again; |
818 | } | 818 | } |
819 | return &uncore_constraint_empty; | 819 | return &uncore_constraint_empty; |
820 | } | 820 | } |
821 | 821 | ||
822 | if (!uncore_box_is_fake(box)) { | 822 | if (!uncore_box_is_fake(box)) { |
823 | if (idx != reg1->idx) | 823 | if (idx != reg1->idx) |
824 | snbep_pcu_alter_er(event, idx, true); | 824 | snbep_pcu_alter_er(event, idx, true); |
825 | reg1->alloc = 1; | 825 | reg1->alloc = 1; |
826 | } | 826 | } |
827 | return NULL; | 827 | return NULL; |
828 | } | 828 | } |
829 | 829 | ||
830 | static void snbep_pcu_put_constraint(struct intel_uncore_box *box, struct perf_event *event) | 830 | static void snbep_pcu_put_constraint(struct intel_uncore_box *box, struct perf_event *event) |
831 | { | 831 | { |
832 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 832 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; |
833 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; | 833 | struct intel_uncore_extra_reg *er = &box->shared_regs[0]; |
834 | 834 | ||
835 | if (uncore_box_is_fake(box) || !reg1->alloc) | 835 | if (uncore_box_is_fake(box) || !reg1->alloc) |
836 | return; | 836 | return; |
837 | 837 | ||
838 | atomic_sub(1 << (reg1->idx * 8), &er->ref); | 838 | atomic_sub(1 << (reg1->idx * 8), &er->ref); |
839 | reg1->alloc = 0; | 839 | reg1->alloc = 0; |
840 | } | 840 | } |
841 | 841 | ||
842 | static int snbep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 842 | static int snbep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event) |
843 | { | 843 | { |
844 | struct hw_perf_event *hwc = &event->hw; | 844 | struct hw_perf_event *hwc = &event->hw; |
845 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 845 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; |
846 | int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK; | 846 | int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK; |
847 | 847 | ||
848 | if (ev_sel >= 0xb && ev_sel <= 0xe) { | 848 | if (ev_sel >= 0xb && ev_sel <= 0xe) { |
849 | reg1->reg = SNBEP_PCU_MSR_PMON_BOX_FILTER; | 849 | reg1->reg = SNBEP_PCU_MSR_PMON_BOX_FILTER; |
850 | reg1->idx = ev_sel - 0xb; | 850 | reg1->idx = ev_sel - 0xb; |
851 | reg1->config = event->attr.config1 & (0xff << (reg1->idx * 8)); | 851 | reg1->config = event->attr.config1 & (0xff << (reg1->idx * 8)); |
852 | } | 852 | } |
853 | return 0; | 853 | return 0; |
854 | } | 854 | } |
855 | 855 | ||
856 | static struct intel_uncore_ops snbep_uncore_pcu_ops = { | 856 | static struct intel_uncore_ops snbep_uncore_pcu_ops = { |
857 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | 857 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), |
858 | .hw_config = snbep_pcu_hw_config, | 858 | .hw_config = snbep_pcu_hw_config, |
859 | .get_constraint = snbep_pcu_get_constraint, | 859 | .get_constraint = snbep_pcu_get_constraint, |
860 | .put_constraint = snbep_pcu_put_constraint, | 860 | .put_constraint = snbep_pcu_put_constraint, |
861 | }; | 861 | }; |
862 | 862 | ||
863 | static struct intel_uncore_type snbep_uncore_pcu = { | 863 | static struct intel_uncore_type snbep_uncore_pcu = { |
864 | .name = "pcu", | 864 | .name = "pcu", |
865 | .num_counters = 4, | 865 | .num_counters = 4, |
866 | .num_boxes = 1, | 866 | .num_boxes = 1, |
867 | .perf_ctr_bits = 48, | 867 | .perf_ctr_bits = 48, |
868 | .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0, | 868 | .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0, |
869 | .event_ctl = SNBEP_PCU_MSR_PMON_CTL0, | 869 | .event_ctl = SNBEP_PCU_MSR_PMON_CTL0, |
870 | .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK, | 870 | .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK, |
871 | .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL, | 871 | .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL, |
872 | .num_shared_regs = 1, | 872 | .num_shared_regs = 1, |
873 | .ops = &snbep_uncore_pcu_ops, | 873 | .ops = &snbep_uncore_pcu_ops, |
874 | .format_group = &snbep_uncore_pcu_format_group, | 874 | .format_group = &snbep_uncore_pcu_format_group, |
875 | }; | 875 | }; |
876 | 876 | ||
877 | static struct intel_uncore_type *snbep_msr_uncores[] = { | 877 | static struct intel_uncore_type *snbep_msr_uncores[] = { |
878 | &snbep_uncore_ubox, | 878 | &snbep_uncore_ubox, |
879 | &snbep_uncore_cbox, | 879 | &snbep_uncore_cbox, |
880 | &snbep_uncore_pcu, | 880 | &snbep_uncore_pcu, |
881 | NULL, | 881 | NULL, |
882 | }; | 882 | }; |
883 | 883 | ||
884 | void snbep_uncore_cpu_init(void) | 884 | void snbep_uncore_cpu_init(void) |
885 | { | 885 | { |
886 | if (snbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) | 886 | if (snbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) |
887 | snbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; | 887 | snbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; |
888 | uncore_msr_uncores = snbep_msr_uncores; | 888 | uncore_msr_uncores = snbep_msr_uncores; |
889 | } | 889 | } |
890 | 890 | ||
891 | enum { | 891 | enum { |
892 | SNBEP_PCI_QPI_PORT0_FILTER, | 892 | SNBEP_PCI_QPI_PORT0_FILTER, |
893 | SNBEP_PCI_QPI_PORT1_FILTER, | 893 | SNBEP_PCI_QPI_PORT1_FILTER, |
894 | HSWEP_PCI_PCU_3, | ||
894 | }; | 895 | }; |
895 | 896 | ||
896 | static int snbep_qpi_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 897 | static int snbep_qpi_hw_config(struct intel_uncore_box *box, struct perf_event *event) |
897 | { | 898 | { |
898 | struct hw_perf_event *hwc = &event->hw; | 899 | struct hw_perf_event *hwc = &event->hw; |
899 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 900 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; |
900 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | 901 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; |
901 | 902 | ||
902 | if ((hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK) == 0x38) { | 903 | if ((hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK) == 0x38) { |
903 | reg1->idx = 0; | 904 | reg1->idx = 0; |
904 | reg1->reg = SNBEP_Q_Py_PCI_PMON_PKT_MATCH0; | 905 | reg1->reg = SNBEP_Q_Py_PCI_PMON_PKT_MATCH0; |
905 | reg1->config = event->attr.config1; | 906 | reg1->config = event->attr.config1; |
906 | reg2->reg = SNBEP_Q_Py_PCI_PMON_PKT_MASK0; | 907 | reg2->reg = SNBEP_Q_Py_PCI_PMON_PKT_MASK0; |
907 | reg2->config = event->attr.config2; | 908 | reg2->config = event->attr.config2; |
908 | } | 909 | } |
909 | return 0; | 910 | return 0; |
910 | } | 911 | } |
911 | 912 | ||
912 | static void snbep_qpi_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 913 | static void snbep_qpi_enable_event(struct intel_uncore_box *box, struct perf_event *event) |
913 | { | 914 | { |
914 | struct pci_dev *pdev = box->pci_dev; | 915 | struct pci_dev *pdev = box->pci_dev; |
915 | struct hw_perf_event *hwc = &event->hw; | 916 | struct hw_perf_event *hwc = &event->hw; |
916 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 917 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; |
917 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | 918 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; |
918 | 919 | ||
919 | if (reg1->idx != EXTRA_REG_NONE) { | 920 | if (reg1->idx != EXTRA_REG_NONE) { |
920 | int idx = box->pmu->pmu_idx + SNBEP_PCI_QPI_PORT0_FILTER; | 921 | int idx = box->pmu->pmu_idx + SNBEP_PCI_QPI_PORT0_FILTER; |
921 | struct pci_dev *filter_pdev = uncore_extra_pci_dev[box->phys_id][idx]; | 922 | struct pci_dev *filter_pdev = uncore_extra_pci_dev[box->phys_id][idx]; |
922 | if (filter_pdev) { | 923 | if (filter_pdev) { |
923 | pci_write_config_dword(filter_pdev, reg1->reg, | 924 | pci_write_config_dword(filter_pdev, reg1->reg, |
924 | (u32)reg1->config); | 925 | (u32)reg1->config); |
925 | pci_write_config_dword(filter_pdev, reg1->reg + 4, | 926 | pci_write_config_dword(filter_pdev, reg1->reg + 4, |
926 | (u32)(reg1->config >> 32)); | 927 | (u32)(reg1->config >> 32)); |
927 | pci_write_config_dword(filter_pdev, reg2->reg, | 928 | pci_write_config_dword(filter_pdev, reg2->reg, |
928 | (u32)reg2->config); | 929 | (u32)reg2->config); |
929 | pci_write_config_dword(filter_pdev, reg2->reg + 4, | 930 | pci_write_config_dword(filter_pdev, reg2->reg + 4, |
930 | (u32)(reg2->config >> 32)); | 931 | (u32)(reg2->config >> 32)); |
931 | } | 932 | } |
932 | } | 933 | } |
933 | 934 | ||
934 | pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | 935 | pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); |
935 | } | 936 | } |
936 | 937 | ||
937 | static struct intel_uncore_ops snbep_uncore_qpi_ops = { | 938 | static struct intel_uncore_ops snbep_uncore_qpi_ops = { |
938 | SNBEP_UNCORE_PCI_OPS_COMMON_INIT(), | 939 | SNBEP_UNCORE_PCI_OPS_COMMON_INIT(), |
939 | .enable_event = snbep_qpi_enable_event, | 940 | .enable_event = snbep_qpi_enable_event, |
940 | .hw_config = snbep_qpi_hw_config, | 941 | .hw_config = snbep_qpi_hw_config, |
941 | .get_constraint = uncore_get_constraint, | 942 | .get_constraint = uncore_get_constraint, |
942 | .put_constraint = uncore_put_constraint, | 943 | .put_constraint = uncore_put_constraint, |
943 | }; | 944 | }; |
944 | 945 | ||
945 | #define SNBEP_UNCORE_PCI_COMMON_INIT() \ | 946 | #define SNBEP_UNCORE_PCI_COMMON_INIT() \ |
946 | .perf_ctr = SNBEP_PCI_PMON_CTR0, \ | 947 | .perf_ctr = SNBEP_PCI_PMON_CTR0, \ |
947 | .event_ctl = SNBEP_PCI_PMON_CTL0, \ | 948 | .event_ctl = SNBEP_PCI_PMON_CTL0, \ |
948 | .event_mask = SNBEP_PMON_RAW_EVENT_MASK, \ | 949 | .event_mask = SNBEP_PMON_RAW_EVENT_MASK, \ |
949 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \ | 950 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \ |
950 | .ops = &snbep_uncore_pci_ops, \ | 951 | .ops = &snbep_uncore_pci_ops, \ |
951 | .format_group = &snbep_uncore_format_group | 952 | .format_group = &snbep_uncore_format_group |
952 | 953 | ||
953 | static struct intel_uncore_type snbep_uncore_ha = { | 954 | static struct intel_uncore_type snbep_uncore_ha = { |
954 | .name = "ha", | 955 | .name = "ha", |
955 | .num_counters = 4, | 956 | .num_counters = 4, |
956 | .num_boxes = 1, | 957 | .num_boxes = 1, |
957 | .perf_ctr_bits = 48, | 958 | .perf_ctr_bits = 48, |
958 | SNBEP_UNCORE_PCI_COMMON_INIT(), | 959 | SNBEP_UNCORE_PCI_COMMON_INIT(), |
959 | }; | 960 | }; |
960 | 961 | ||
961 | static struct intel_uncore_type snbep_uncore_imc = { | 962 | static struct intel_uncore_type snbep_uncore_imc = { |
962 | .name = "imc", | 963 | .name = "imc", |
963 | .num_counters = 4, | 964 | .num_counters = 4, |
964 | .num_boxes = 4, | 965 | .num_boxes = 4, |
965 | .perf_ctr_bits = 48, | 966 | .perf_ctr_bits = 48, |
966 | .fixed_ctr_bits = 48, | 967 | .fixed_ctr_bits = 48, |
967 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, | 968 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, |
968 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, | 969 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, |
969 | .event_descs = snbep_uncore_imc_events, | 970 | .event_descs = snbep_uncore_imc_events, |
970 | SNBEP_UNCORE_PCI_COMMON_INIT(), | 971 | SNBEP_UNCORE_PCI_COMMON_INIT(), |
971 | }; | 972 | }; |
972 | 973 | ||
973 | static struct intel_uncore_type snbep_uncore_qpi = { | 974 | static struct intel_uncore_type snbep_uncore_qpi = { |
974 | .name = "qpi", | 975 | .name = "qpi", |
975 | .num_counters = 4, | 976 | .num_counters = 4, |
976 | .num_boxes = 2, | 977 | .num_boxes = 2, |
977 | .perf_ctr_bits = 48, | 978 | .perf_ctr_bits = 48, |
978 | .perf_ctr = SNBEP_PCI_PMON_CTR0, | 979 | .perf_ctr = SNBEP_PCI_PMON_CTR0, |
979 | .event_ctl = SNBEP_PCI_PMON_CTL0, | 980 | .event_ctl = SNBEP_PCI_PMON_CTL0, |
980 | .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK, | 981 | .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK, |
981 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, | 982 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, |
982 | .num_shared_regs = 1, | 983 | .num_shared_regs = 1, |
983 | .ops = &snbep_uncore_qpi_ops, | 984 | .ops = &snbep_uncore_qpi_ops, |
984 | .event_descs = snbep_uncore_qpi_events, | 985 | .event_descs = snbep_uncore_qpi_events, |
985 | .format_group = &snbep_uncore_qpi_format_group, | 986 | .format_group = &snbep_uncore_qpi_format_group, |
986 | }; | 987 | }; |
987 | 988 | ||
988 | 989 | ||
989 | static struct intel_uncore_type snbep_uncore_r2pcie = { | 990 | static struct intel_uncore_type snbep_uncore_r2pcie = { |
990 | .name = "r2pcie", | 991 | .name = "r2pcie", |
991 | .num_counters = 4, | 992 | .num_counters = 4, |
992 | .num_boxes = 1, | 993 | .num_boxes = 1, |
993 | .perf_ctr_bits = 44, | 994 | .perf_ctr_bits = 44, |
994 | .constraints = snbep_uncore_r2pcie_constraints, | 995 | .constraints = snbep_uncore_r2pcie_constraints, |
995 | SNBEP_UNCORE_PCI_COMMON_INIT(), | 996 | SNBEP_UNCORE_PCI_COMMON_INIT(), |
996 | }; | 997 | }; |
997 | 998 | ||
998 | static struct intel_uncore_type snbep_uncore_r3qpi = { | 999 | static struct intel_uncore_type snbep_uncore_r3qpi = { |
999 | .name = "r3qpi", | 1000 | .name = "r3qpi", |
1000 | .num_counters = 3, | 1001 | .num_counters = 3, |
1001 | .num_boxes = 2, | 1002 | .num_boxes = 2, |
1002 | .perf_ctr_bits = 44, | 1003 | .perf_ctr_bits = 44, |
1003 | .constraints = snbep_uncore_r3qpi_constraints, | 1004 | .constraints = snbep_uncore_r3qpi_constraints, |
1004 | SNBEP_UNCORE_PCI_COMMON_INIT(), | 1005 | SNBEP_UNCORE_PCI_COMMON_INIT(), |
1005 | }; | 1006 | }; |
1006 | 1007 | ||
1007 | enum { | 1008 | enum { |
1008 | SNBEP_PCI_UNCORE_HA, | 1009 | SNBEP_PCI_UNCORE_HA, |
1009 | SNBEP_PCI_UNCORE_IMC, | 1010 | SNBEP_PCI_UNCORE_IMC, |
1010 | SNBEP_PCI_UNCORE_QPI, | 1011 | SNBEP_PCI_UNCORE_QPI, |
1011 | SNBEP_PCI_UNCORE_R2PCIE, | 1012 | SNBEP_PCI_UNCORE_R2PCIE, |
1012 | SNBEP_PCI_UNCORE_R3QPI, | 1013 | SNBEP_PCI_UNCORE_R3QPI, |
1013 | }; | 1014 | }; |
1014 | 1015 | ||
1015 | static struct intel_uncore_type *snbep_pci_uncores[] = { | 1016 | static struct intel_uncore_type *snbep_pci_uncores[] = { |
1016 | [SNBEP_PCI_UNCORE_HA] = &snbep_uncore_ha, | 1017 | [SNBEP_PCI_UNCORE_HA] = &snbep_uncore_ha, |
1017 | [SNBEP_PCI_UNCORE_IMC] = &snbep_uncore_imc, | 1018 | [SNBEP_PCI_UNCORE_IMC] = &snbep_uncore_imc, |
1018 | [SNBEP_PCI_UNCORE_QPI] = &snbep_uncore_qpi, | 1019 | [SNBEP_PCI_UNCORE_QPI] = &snbep_uncore_qpi, |
1019 | [SNBEP_PCI_UNCORE_R2PCIE] = &snbep_uncore_r2pcie, | 1020 | [SNBEP_PCI_UNCORE_R2PCIE] = &snbep_uncore_r2pcie, |
1020 | [SNBEP_PCI_UNCORE_R3QPI] = &snbep_uncore_r3qpi, | 1021 | [SNBEP_PCI_UNCORE_R3QPI] = &snbep_uncore_r3qpi, |
1021 | NULL, | 1022 | NULL, |
1022 | }; | 1023 | }; |
1023 | 1024 | ||
1024 | static const struct pci_device_id snbep_uncore_pci_ids[] = { | 1025 | static const struct pci_device_id snbep_uncore_pci_ids[] = { |
1025 | { /* Home Agent */ | 1026 | { /* Home Agent */ |
1026 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_HA), | 1027 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_HA), |
1027 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_HA, 0), | 1028 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_HA, 0), |
1028 | }, | 1029 | }, |
1029 | { /* MC Channel 0 */ | 1030 | { /* MC Channel 0 */ |
1030 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC0), | 1031 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC0), |
1031 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 0), | 1032 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 0), |
1032 | }, | 1033 | }, |
1033 | { /* MC Channel 1 */ | 1034 | { /* MC Channel 1 */ |
1034 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC1), | 1035 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC1), |
1035 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 1), | 1036 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 1), |
1036 | }, | 1037 | }, |
1037 | { /* MC Channel 2 */ | 1038 | { /* MC Channel 2 */ |
1038 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC2), | 1039 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC2), |
1039 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 2), | 1040 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 2), |
1040 | }, | 1041 | }, |
1041 | { /* MC Channel 3 */ | 1042 | { /* MC Channel 3 */ |
1042 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC3), | 1043 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC3), |
1043 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 3), | 1044 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 3), |
1044 | }, | 1045 | }, |
1045 | { /* QPI Port 0 */ | 1046 | { /* QPI Port 0 */ |
1046 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI0), | 1047 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI0), |
1047 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 0), | 1048 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 0), |
1048 | }, | 1049 | }, |
1049 | { /* QPI Port 1 */ | 1050 | { /* QPI Port 1 */ |
1050 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI1), | 1051 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI1), |
1051 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 1), | 1052 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 1), |
1052 | }, | 1053 | }, |
1053 | { /* R2PCIe */ | 1054 | { /* R2PCIe */ |
1054 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R2PCIE), | 1055 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R2PCIE), |
1055 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R2PCIE, 0), | 1056 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R2PCIE, 0), |
1056 | }, | 1057 | }, |
1057 | { /* R3QPI Link 0 */ | 1058 | { /* R3QPI Link 0 */ |
1058 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI0), | 1059 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI0), |
1059 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 0), | 1060 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 0), |
1060 | }, | 1061 | }, |
1061 | { /* R3QPI Link 1 */ | 1062 | { /* R3QPI Link 1 */ |
1062 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI1), | 1063 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI1), |
1063 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 1), | 1064 | .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 1), |
1064 | }, | 1065 | }, |
1065 | { /* QPI Port 0 filter */ | 1066 | { /* QPI Port 0 filter */ |
1066 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c86), | 1067 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c86), |
1067 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | 1068 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, |
1068 | SNBEP_PCI_QPI_PORT0_FILTER), | 1069 | SNBEP_PCI_QPI_PORT0_FILTER), |
1069 | }, | 1070 | }, |
1070 | { /* QPI Port 0 filter */ | 1071 | { /* QPI Port 0 filter */ |
1071 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c96), | 1072 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c96), |
1072 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | 1073 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, |
1073 | SNBEP_PCI_QPI_PORT1_FILTER), | 1074 | SNBEP_PCI_QPI_PORT1_FILTER), |
1074 | }, | 1075 | }, |
1075 | { /* end: all zeroes */ } | 1076 | { /* end: all zeroes */ } |
1076 | }; | 1077 | }; |
1077 | 1078 | ||
1078 | static struct pci_driver snbep_uncore_pci_driver = { | 1079 | static struct pci_driver snbep_uncore_pci_driver = { |
1079 | .name = "snbep_uncore", | 1080 | .name = "snbep_uncore", |
1080 | .id_table = snbep_uncore_pci_ids, | 1081 | .id_table = snbep_uncore_pci_ids, |
1081 | }; | 1082 | }; |
1082 | 1083 | ||
1083 | /* | 1084 | /* |
1084 | * build pci bus to socket mapping | 1085 | * build pci bus to socket mapping |
1085 | */ | 1086 | */ |
1086 | static int snbep_pci2phy_map_init(int devid) | 1087 | static int snbep_pci2phy_map_init(int devid) |
1087 | { | 1088 | { |
1088 | struct pci_dev *ubox_dev = NULL; | 1089 | struct pci_dev *ubox_dev = NULL; |
1089 | int i, bus, nodeid; | 1090 | int i, bus, nodeid; |
1090 | int err = 0; | 1091 | int err = 0; |
1091 | u32 config = 0; | 1092 | u32 config = 0; |
1092 | 1093 | ||
1093 | while (1) { | 1094 | while (1) { |
1094 | /* find the UBOX device */ | 1095 | /* find the UBOX device */ |
1095 | ubox_dev = pci_get_device(PCI_VENDOR_ID_INTEL, devid, ubox_dev); | 1096 | ubox_dev = pci_get_device(PCI_VENDOR_ID_INTEL, devid, ubox_dev); |
1096 | if (!ubox_dev) | 1097 | if (!ubox_dev) |
1097 | break; | 1098 | break; |
1098 | bus = ubox_dev->bus->number; | 1099 | bus = ubox_dev->bus->number; |
1099 | /* get the Node ID of the local register */ | 1100 | /* get the Node ID of the local register */ |
1100 | err = pci_read_config_dword(ubox_dev, 0x40, &config); | 1101 | err = pci_read_config_dword(ubox_dev, 0x40, &config); |
1101 | if (err) | 1102 | if (err) |
1102 | break; | 1103 | break; |
1103 | nodeid = config; | 1104 | nodeid = config; |
1104 | /* get the Node ID mapping */ | 1105 | /* get the Node ID mapping */ |
1105 | err = pci_read_config_dword(ubox_dev, 0x54, &config); | 1106 | err = pci_read_config_dword(ubox_dev, 0x54, &config); |
1106 | if (err) | 1107 | if (err) |
1107 | break; | 1108 | break; |
1108 | /* | 1109 | /* |
1109 | * every three bits in the Node ID mapping register maps | 1110 | * every three bits in the Node ID mapping register maps |
1110 | * to a particular node. | 1111 | * to a particular node. |
1111 | */ | 1112 | */ |
1112 | for (i = 0; i < 8; i++) { | 1113 | for (i = 0; i < 8; i++) { |
1113 | if (nodeid == ((config >> (3 * i)) & 0x7)) { | 1114 | if (nodeid == ((config >> (3 * i)) & 0x7)) { |
1114 | uncore_pcibus_to_physid[bus] = i; | 1115 | uncore_pcibus_to_physid[bus] = i; |
1115 | break; | 1116 | break; |
1116 | } | 1117 | } |
1117 | } | 1118 | } |
1118 | } | 1119 | } |
1119 | 1120 | ||
1120 | if (!err) { | 1121 | if (!err) { |
1121 | /* | 1122 | /* |
1122 | * For PCI bus with no UBOX device, find the next bus | 1123 | * For PCI bus with no UBOX device, find the next bus |
1123 | * that has UBOX device and use its mapping. | 1124 | * that has UBOX device and use its mapping. |
1124 | */ | 1125 | */ |
1125 | i = -1; | 1126 | i = -1; |
1126 | for (bus = 255; bus >= 0; bus--) { | 1127 | for (bus = 255; bus >= 0; bus--) { |
1127 | if (uncore_pcibus_to_physid[bus] >= 0) | 1128 | if (uncore_pcibus_to_physid[bus] >= 0) |
1128 | i = uncore_pcibus_to_physid[bus]; | 1129 | i = uncore_pcibus_to_physid[bus]; |
1129 | else | 1130 | else |
1130 | uncore_pcibus_to_physid[bus] = i; | 1131 | uncore_pcibus_to_physid[bus] = i; |
1131 | } | 1132 | } |
1132 | } | 1133 | } |
1133 | 1134 | ||
1134 | if (ubox_dev) | 1135 | if (ubox_dev) |
1135 | pci_dev_put(ubox_dev); | 1136 | pci_dev_put(ubox_dev); |
1136 | 1137 | ||
1137 | return err ? pcibios_err_to_errno(err) : 0; | 1138 | return err ? pcibios_err_to_errno(err) : 0; |
1138 | } | 1139 | } |
1139 | 1140 | ||
1140 | int snbep_uncore_pci_init(void) | 1141 | int snbep_uncore_pci_init(void) |
1141 | { | 1142 | { |
1142 | int ret = snbep_pci2phy_map_init(0x3ce0); | 1143 | int ret = snbep_pci2phy_map_init(0x3ce0); |
1143 | if (ret) | 1144 | if (ret) |
1144 | return ret; | 1145 | return ret; |
1145 | uncore_pci_uncores = snbep_pci_uncores; | 1146 | uncore_pci_uncores = snbep_pci_uncores; |
1146 | uncore_pci_driver = &snbep_uncore_pci_driver; | 1147 | uncore_pci_driver = &snbep_uncore_pci_driver; |
1147 | return 0; | 1148 | return 0; |
1148 | } | 1149 | } |
1149 | /* end of Sandy Bridge-EP uncore support */ | 1150 | /* end of Sandy Bridge-EP uncore support */ |
1150 | 1151 | ||
1151 | /* IvyTown uncore support */ | 1152 | /* IvyTown uncore support */ |
1152 | static void ivbep_uncore_msr_init_box(struct intel_uncore_box *box) | 1153 | static void ivbep_uncore_msr_init_box(struct intel_uncore_box *box) |
1153 | { | 1154 | { |
1154 | unsigned msr = uncore_msr_box_ctl(box); | 1155 | unsigned msr = uncore_msr_box_ctl(box); |
1155 | if (msr) | 1156 | if (msr) |
1156 | wrmsrl(msr, IVBEP_PMON_BOX_CTL_INT); | 1157 | wrmsrl(msr, IVBEP_PMON_BOX_CTL_INT); |
1157 | } | 1158 | } |
1158 | 1159 | ||
1159 | static void ivbep_uncore_pci_init_box(struct intel_uncore_box *box) | 1160 | static void ivbep_uncore_pci_init_box(struct intel_uncore_box *box) |
1160 | { | 1161 | { |
1161 | struct pci_dev *pdev = box->pci_dev; | 1162 | struct pci_dev *pdev = box->pci_dev; |
1162 | 1163 | ||
1163 | pci_write_config_dword(pdev, SNBEP_PCI_PMON_BOX_CTL, IVBEP_PMON_BOX_CTL_INT); | 1164 | pci_write_config_dword(pdev, SNBEP_PCI_PMON_BOX_CTL, IVBEP_PMON_BOX_CTL_INT); |
1164 | } | 1165 | } |
1165 | 1166 | ||
1166 | #define IVBEP_UNCORE_MSR_OPS_COMMON_INIT() \ | 1167 | #define IVBEP_UNCORE_MSR_OPS_COMMON_INIT() \ |
1167 | .init_box = ivbep_uncore_msr_init_box, \ | 1168 | .init_box = ivbep_uncore_msr_init_box, \ |
1168 | .disable_box = snbep_uncore_msr_disable_box, \ | 1169 | .disable_box = snbep_uncore_msr_disable_box, \ |
1169 | .enable_box = snbep_uncore_msr_enable_box, \ | 1170 | .enable_box = snbep_uncore_msr_enable_box, \ |
1170 | .disable_event = snbep_uncore_msr_disable_event, \ | 1171 | .disable_event = snbep_uncore_msr_disable_event, \ |
1171 | .enable_event = snbep_uncore_msr_enable_event, \ | 1172 | .enable_event = snbep_uncore_msr_enable_event, \ |
1172 | .read_counter = uncore_msr_read_counter | 1173 | .read_counter = uncore_msr_read_counter |
1173 | 1174 | ||
1174 | static struct intel_uncore_ops ivbep_uncore_msr_ops = { | 1175 | static struct intel_uncore_ops ivbep_uncore_msr_ops = { |
1175 | IVBEP_UNCORE_MSR_OPS_COMMON_INIT(), | 1176 | IVBEP_UNCORE_MSR_OPS_COMMON_INIT(), |
1176 | }; | 1177 | }; |
1177 | 1178 | ||
1178 | static struct intel_uncore_ops ivbep_uncore_pci_ops = { | 1179 | static struct intel_uncore_ops ivbep_uncore_pci_ops = { |
1179 | .init_box = ivbep_uncore_pci_init_box, | 1180 | .init_box = ivbep_uncore_pci_init_box, |
1180 | .disable_box = snbep_uncore_pci_disable_box, | 1181 | .disable_box = snbep_uncore_pci_disable_box, |
1181 | .enable_box = snbep_uncore_pci_enable_box, | 1182 | .enable_box = snbep_uncore_pci_enable_box, |
1182 | .disable_event = snbep_uncore_pci_disable_event, | 1183 | .disable_event = snbep_uncore_pci_disable_event, |
1183 | .enable_event = snbep_uncore_pci_enable_event, | 1184 | .enable_event = snbep_uncore_pci_enable_event, |
1184 | .read_counter = snbep_uncore_pci_read_counter, | 1185 | .read_counter = snbep_uncore_pci_read_counter, |
1185 | }; | 1186 | }; |
1186 | 1187 | ||
1187 | #define IVBEP_UNCORE_PCI_COMMON_INIT() \ | 1188 | #define IVBEP_UNCORE_PCI_COMMON_INIT() \ |
1188 | .perf_ctr = SNBEP_PCI_PMON_CTR0, \ | 1189 | .perf_ctr = SNBEP_PCI_PMON_CTR0, \ |
1189 | .event_ctl = SNBEP_PCI_PMON_CTL0, \ | 1190 | .event_ctl = SNBEP_PCI_PMON_CTL0, \ |
1190 | .event_mask = IVBEP_PMON_RAW_EVENT_MASK, \ | 1191 | .event_mask = IVBEP_PMON_RAW_EVENT_MASK, \ |
1191 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \ | 1192 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \ |
1192 | .ops = &ivbep_uncore_pci_ops, \ | 1193 | .ops = &ivbep_uncore_pci_ops, \ |
1193 | .format_group = &ivbep_uncore_format_group | 1194 | .format_group = &ivbep_uncore_format_group |
1194 | 1195 | ||
1195 | static struct attribute *ivbep_uncore_formats_attr[] = { | 1196 | static struct attribute *ivbep_uncore_formats_attr[] = { |
1196 | &format_attr_event.attr, | 1197 | &format_attr_event.attr, |
1197 | &format_attr_umask.attr, | 1198 | &format_attr_umask.attr, |
1198 | &format_attr_edge.attr, | 1199 | &format_attr_edge.attr, |
1199 | &format_attr_inv.attr, | 1200 | &format_attr_inv.attr, |
1200 | &format_attr_thresh8.attr, | 1201 | &format_attr_thresh8.attr, |
1201 | NULL, | 1202 | NULL, |
1202 | }; | 1203 | }; |
1203 | 1204 | ||
1204 | static struct attribute *ivbep_uncore_ubox_formats_attr[] = { | 1205 | static struct attribute *ivbep_uncore_ubox_formats_attr[] = { |
1205 | &format_attr_event.attr, | 1206 | &format_attr_event.attr, |
1206 | &format_attr_umask.attr, | 1207 | &format_attr_umask.attr, |
1207 | &format_attr_edge.attr, | 1208 | &format_attr_edge.attr, |
1208 | &format_attr_inv.attr, | 1209 | &format_attr_inv.attr, |
1209 | &format_attr_thresh5.attr, | 1210 | &format_attr_thresh5.attr, |
1210 | NULL, | 1211 | NULL, |
1211 | }; | 1212 | }; |
1212 | 1213 | ||
1213 | static struct attribute *ivbep_uncore_cbox_formats_attr[] = { | 1214 | static struct attribute *ivbep_uncore_cbox_formats_attr[] = { |
1214 | &format_attr_event.attr, | 1215 | &format_attr_event.attr, |
1215 | &format_attr_umask.attr, | 1216 | &format_attr_umask.attr, |
1216 | &format_attr_edge.attr, | 1217 | &format_attr_edge.attr, |
1217 | &format_attr_tid_en.attr, | 1218 | &format_attr_tid_en.attr, |
1218 | &format_attr_thresh8.attr, | 1219 | &format_attr_thresh8.attr, |
1219 | &format_attr_filter_tid.attr, | 1220 | &format_attr_filter_tid.attr, |
1220 | &format_attr_filter_link.attr, | 1221 | &format_attr_filter_link.attr, |
1221 | &format_attr_filter_state2.attr, | 1222 | &format_attr_filter_state2.attr, |
1222 | &format_attr_filter_nid2.attr, | 1223 | &format_attr_filter_nid2.attr, |
1223 | &format_attr_filter_opc2.attr, | 1224 | &format_attr_filter_opc2.attr, |
1224 | &format_attr_filter_nc.attr, | 1225 | &format_attr_filter_nc.attr, |
1225 | &format_attr_filter_c6.attr, | 1226 | &format_attr_filter_c6.attr, |
1226 | &format_attr_filter_isoc.attr, | 1227 | &format_attr_filter_isoc.attr, |
1227 | NULL, | 1228 | NULL, |
1228 | }; | 1229 | }; |
1229 | 1230 | ||
1230 | static struct attribute *ivbep_uncore_pcu_formats_attr[] = { | 1231 | static struct attribute *ivbep_uncore_pcu_formats_attr[] = { |
1231 | &format_attr_event_ext.attr, | 1232 | &format_attr_event_ext.attr, |
1232 | &format_attr_occ_sel.attr, | 1233 | &format_attr_occ_sel.attr, |
1233 | &format_attr_edge.attr, | 1234 | &format_attr_edge.attr, |
1234 | &format_attr_thresh5.attr, | 1235 | &format_attr_thresh5.attr, |
1235 | &format_attr_occ_invert.attr, | 1236 | &format_attr_occ_invert.attr, |
1236 | &format_attr_occ_edge.attr, | 1237 | &format_attr_occ_edge.attr, |
1237 | &format_attr_filter_band0.attr, | 1238 | &format_attr_filter_band0.attr, |
1238 | &format_attr_filter_band1.attr, | 1239 | &format_attr_filter_band1.attr, |
1239 | &format_attr_filter_band2.attr, | 1240 | &format_attr_filter_band2.attr, |
1240 | &format_attr_filter_band3.attr, | 1241 | &format_attr_filter_band3.attr, |
1241 | NULL, | 1242 | NULL, |
1242 | }; | 1243 | }; |
1243 | 1244 | ||
1244 | static struct attribute *ivbep_uncore_qpi_formats_attr[] = { | 1245 | static struct attribute *ivbep_uncore_qpi_formats_attr[] = { |
1245 | &format_attr_event_ext.attr, | 1246 | &format_attr_event_ext.attr, |
1246 | &format_attr_umask.attr, | 1247 | &format_attr_umask.attr, |
1247 | &format_attr_edge.attr, | 1248 | &format_attr_edge.attr, |
1248 | &format_attr_thresh8.attr, | 1249 | &format_attr_thresh8.attr, |
1249 | &format_attr_match_rds.attr, | 1250 | &format_attr_match_rds.attr, |
1250 | &format_attr_match_rnid30.attr, | 1251 | &format_attr_match_rnid30.attr, |
1251 | &format_attr_match_rnid4.attr, | 1252 | &format_attr_match_rnid4.attr, |
1252 | &format_attr_match_dnid.attr, | 1253 | &format_attr_match_dnid.attr, |
1253 | &format_attr_match_mc.attr, | 1254 | &format_attr_match_mc.attr, |
1254 | &format_attr_match_opc.attr, | 1255 | &format_attr_match_opc.attr, |
1255 | &format_attr_match_vnw.attr, | 1256 | &format_attr_match_vnw.attr, |
1256 | &format_attr_match0.attr, | 1257 | &format_attr_match0.attr, |
1257 | &format_attr_match1.attr, | 1258 | &format_attr_match1.attr, |
1258 | &format_attr_mask_rds.attr, | 1259 | &format_attr_mask_rds.attr, |
1259 | &format_attr_mask_rnid30.attr, | 1260 | &format_attr_mask_rnid30.attr, |
1260 | &format_attr_mask_rnid4.attr, | 1261 | &format_attr_mask_rnid4.attr, |
1261 | &format_attr_mask_dnid.attr, | 1262 | &format_attr_mask_dnid.attr, |
1262 | &format_attr_mask_mc.attr, | 1263 | &format_attr_mask_mc.attr, |
1263 | &format_attr_mask_opc.attr, | 1264 | &format_attr_mask_opc.attr, |
1264 | &format_attr_mask_vnw.attr, | 1265 | &format_attr_mask_vnw.attr, |
1265 | &format_attr_mask0.attr, | 1266 | &format_attr_mask0.attr, |
1266 | &format_attr_mask1.attr, | 1267 | &format_attr_mask1.attr, |
1267 | NULL, | 1268 | NULL, |
1268 | }; | 1269 | }; |
1269 | 1270 | ||
1270 | static struct attribute_group ivbep_uncore_format_group = { | 1271 | static struct attribute_group ivbep_uncore_format_group = { |
1271 | .name = "format", | 1272 | .name = "format", |
1272 | .attrs = ivbep_uncore_formats_attr, | 1273 | .attrs = ivbep_uncore_formats_attr, |
1273 | }; | 1274 | }; |
1274 | 1275 | ||
1275 | static struct attribute_group ivbep_uncore_ubox_format_group = { | 1276 | static struct attribute_group ivbep_uncore_ubox_format_group = { |
1276 | .name = "format", | 1277 | .name = "format", |
1277 | .attrs = ivbep_uncore_ubox_formats_attr, | 1278 | .attrs = ivbep_uncore_ubox_formats_attr, |
1278 | }; | 1279 | }; |
1279 | 1280 | ||
1280 | static struct attribute_group ivbep_uncore_cbox_format_group = { | 1281 | static struct attribute_group ivbep_uncore_cbox_format_group = { |
1281 | .name = "format", | 1282 | .name = "format", |
1282 | .attrs = ivbep_uncore_cbox_formats_attr, | 1283 | .attrs = ivbep_uncore_cbox_formats_attr, |
1283 | }; | 1284 | }; |
1284 | 1285 | ||
1285 | static struct attribute_group ivbep_uncore_pcu_format_group = { | 1286 | static struct attribute_group ivbep_uncore_pcu_format_group = { |
1286 | .name = "format", | 1287 | .name = "format", |
1287 | .attrs = ivbep_uncore_pcu_formats_attr, | 1288 | .attrs = ivbep_uncore_pcu_formats_attr, |
1288 | }; | 1289 | }; |
1289 | 1290 | ||
1290 | static struct attribute_group ivbep_uncore_qpi_format_group = { | 1291 | static struct attribute_group ivbep_uncore_qpi_format_group = { |
1291 | .name = "format", | 1292 | .name = "format", |
1292 | .attrs = ivbep_uncore_qpi_formats_attr, | 1293 | .attrs = ivbep_uncore_qpi_formats_attr, |
1293 | }; | 1294 | }; |
1294 | 1295 | ||
1295 | static struct intel_uncore_type ivbep_uncore_ubox = { | 1296 | static struct intel_uncore_type ivbep_uncore_ubox = { |
1296 | .name = "ubox", | 1297 | .name = "ubox", |
1297 | .num_counters = 2, | 1298 | .num_counters = 2, |
1298 | .num_boxes = 1, | 1299 | .num_boxes = 1, |
1299 | .perf_ctr_bits = 44, | 1300 | .perf_ctr_bits = 44, |
1300 | .fixed_ctr_bits = 48, | 1301 | .fixed_ctr_bits = 48, |
1301 | .perf_ctr = SNBEP_U_MSR_PMON_CTR0, | 1302 | .perf_ctr = SNBEP_U_MSR_PMON_CTR0, |
1302 | .event_ctl = SNBEP_U_MSR_PMON_CTL0, | 1303 | .event_ctl = SNBEP_U_MSR_PMON_CTL0, |
1303 | .event_mask = IVBEP_U_MSR_PMON_RAW_EVENT_MASK, | 1304 | .event_mask = IVBEP_U_MSR_PMON_RAW_EVENT_MASK, |
1304 | .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR, | 1305 | .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR, |
1305 | .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL, | 1306 | .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL, |
1306 | .ops = &ivbep_uncore_msr_ops, | 1307 | .ops = &ivbep_uncore_msr_ops, |
1307 | .format_group = &ivbep_uncore_ubox_format_group, | 1308 | .format_group = &ivbep_uncore_ubox_format_group, |
1308 | }; | 1309 | }; |
1309 | 1310 | ||
1310 | static struct extra_reg ivbep_uncore_cbox_extra_regs[] = { | 1311 | static struct extra_reg ivbep_uncore_cbox_extra_regs[] = { |
1311 | SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN, | 1312 | SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN, |
1312 | SNBEP_CBO_PMON_CTL_TID_EN, 0x1), | 1313 | SNBEP_CBO_PMON_CTL_TID_EN, 0x1), |
1313 | SNBEP_CBO_EVENT_EXTRA_REG(0x1031, 0x10ff, 0x2), | 1314 | SNBEP_CBO_EVENT_EXTRA_REG(0x1031, 0x10ff, 0x2), |
1314 | SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4), | 1315 | SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4), |
1315 | SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0xc), | 1316 | SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0xc), |
1316 | SNBEP_CBO_EVENT_EXTRA_REG(0x5134, 0xffff, 0xc), | 1317 | SNBEP_CBO_EVENT_EXTRA_REG(0x5134, 0xffff, 0xc), |
1317 | SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4), | 1318 | SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4), |
1318 | SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0xc), | 1319 | SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0xc), |
1319 | SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4), | 1320 | SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4), |
1320 | SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0xc), | 1321 | SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0xc), |
1321 | SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4), | 1322 | SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4), |
1322 | SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0xc), | 1323 | SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0xc), |
1323 | SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x10), | 1324 | SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x10), |
1324 | SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10), | 1325 | SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10), |
1325 | SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10), | 1326 | SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10), |
1326 | SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10), | 1327 | SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10), |
1327 | SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18), | 1328 | SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18), |
1328 | SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18), | 1329 | SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18), |
1329 | SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8), | 1330 | SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8), |
1330 | SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8), | 1331 | SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8), |
1331 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8), | 1332 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8), |
1332 | SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8), | 1333 | SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8), |
1333 | SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10), | 1334 | SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10), |
1334 | SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10), | 1335 | SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10), |
1335 | SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10), | 1336 | SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10), |
1336 | SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10), | 1337 | SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10), |
1337 | SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10), | 1338 | SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10), |
1338 | SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10), | 1339 | SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10), |
1339 | SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18), | 1340 | SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18), |
1340 | SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18), | 1341 | SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18), |
1341 | SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8), | 1342 | SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8), |
1342 | SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8), | 1343 | SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8), |
1343 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8), | 1344 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8), |
1344 | SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8), | 1345 | SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8), |
1345 | SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10), | 1346 | SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10), |
1346 | SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10), | 1347 | SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10), |
1347 | SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8), | 1348 | SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8), |
1348 | EVENT_EXTRA_END | 1349 | EVENT_EXTRA_END |
1349 | }; | 1350 | }; |
1350 | 1351 | ||
1351 | static u64 ivbep_cbox_filter_mask(int fields) | 1352 | static u64 ivbep_cbox_filter_mask(int fields) |
1352 | { | 1353 | { |
1353 | u64 mask = 0; | 1354 | u64 mask = 0; |
1354 | 1355 | ||
1355 | if (fields & 0x1) | 1356 | if (fields & 0x1) |
1356 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_TID; | 1357 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_TID; |
1357 | if (fields & 0x2) | 1358 | if (fields & 0x2) |
1358 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK; | 1359 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK; |
1359 | if (fields & 0x4) | 1360 | if (fields & 0x4) |
1360 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE; | 1361 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE; |
1361 | if (fields & 0x8) | 1362 | if (fields & 0x8) |
1362 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NID; | 1363 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NID; |
1363 | if (fields & 0x10) { | 1364 | if (fields & 0x10) { |
1364 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC; | 1365 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC; |
1365 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NC; | 1366 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NC; |
1366 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_C6; | 1367 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_C6; |
1367 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC; | 1368 | mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC; |
1368 | } | 1369 | } |
1369 | 1370 | ||
1370 | return mask; | 1371 | return mask; |
1371 | } | 1372 | } |
1372 | 1373 | ||
1373 | static struct event_constraint * | 1374 | static struct event_constraint * |
1374 | ivbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) | 1375 | ivbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) |
1375 | { | 1376 | { |
1376 | return __snbep_cbox_get_constraint(box, event, ivbep_cbox_filter_mask); | 1377 | return __snbep_cbox_get_constraint(box, event, ivbep_cbox_filter_mask); |
1377 | } | 1378 | } |
1378 | 1379 | ||
1379 | static int ivbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 1380 | static int ivbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) |
1380 | { | 1381 | { |
1381 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 1382 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; |
1382 | struct extra_reg *er; | 1383 | struct extra_reg *er; |
1383 | int idx = 0; | 1384 | int idx = 0; |
1384 | 1385 | ||
1385 | for (er = ivbep_uncore_cbox_extra_regs; er->msr; er++) { | 1386 | for (er = ivbep_uncore_cbox_extra_regs; er->msr; er++) { |
1386 | if (er->event != (event->hw.config & er->config_mask)) | 1387 | if (er->event != (event->hw.config & er->config_mask)) |
1387 | continue; | 1388 | continue; |
1388 | idx |= er->idx; | 1389 | idx |= er->idx; |
1389 | } | 1390 | } |
1390 | 1391 | ||
1391 | if (idx) { | 1392 | if (idx) { |
1392 | reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER + | 1393 | reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER + |
1393 | SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx; | 1394 | SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx; |
1394 | reg1->config = event->attr.config1 & ivbep_cbox_filter_mask(idx); | 1395 | reg1->config = event->attr.config1 & ivbep_cbox_filter_mask(idx); |
1395 | reg1->idx = idx; | 1396 | reg1->idx = idx; |
1396 | } | 1397 | } |
1397 | return 0; | 1398 | return 0; |
1398 | } | 1399 | } |
1399 | 1400 | ||
1400 | static void ivbep_cbox_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 1401 | static void ivbep_cbox_enable_event(struct intel_uncore_box *box, struct perf_event *event) |
1401 | { | 1402 | { |
1402 | struct hw_perf_event *hwc = &event->hw; | 1403 | struct hw_perf_event *hwc = &event->hw; |
1403 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 1404 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; |
1404 | 1405 | ||
1405 | if (reg1->idx != EXTRA_REG_NONE) { | 1406 | if (reg1->idx != EXTRA_REG_NONE) { |
1406 | u64 filter = uncore_shared_reg_config(box, 0); | 1407 | u64 filter = uncore_shared_reg_config(box, 0); |
1407 | wrmsrl(reg1->reg, filter & 0xffffffff); | 1408 | wrmsrl(reg1->reg, filter & 0xffffffff); |
1408 | wrmsrl(reg1->reg + 6, filter >> 32); | 1409 | wrmsrl(reg1->reg + 6, filter >> 32); |
1409 | } | 1410 | } |
1410 | 1411 | ||
1411 | wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | 1412 | wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); |
1412 | } | 1413 | } |
1413 | 1414 | ||
1414 | static struct intel_uncore_ops ivbep_uncore_cbox_ops = { | 1415 | static struct intel_uncore_ops ivbep_uncore_cbox_ops = { |
1415 | .init_box = ivbep_uncore_msr_init_box, | 1416 | .init_box = ivbep_uncore_msr_init_box, |
1416 | .disable_box = snbep_uncore_msr_disable_box, | 1417 | .disable_box = snbep_uncore_msr_disable_box, |
1417 | .enable_box = snbep_uncore_msr_enable_box, | 1418 | .enable_box = snbep_uncore_msr_enable_box, |
1418 | .disable_event = snbep_uncore_msr_disable_event, | 1419 | .disable_event = snbep_uncore_msr_disable_event, |
1419 | .enable_event = ivbep_cbox_enable_event, | 1420 | .enable_event = ivbep_cbox_enable_event, |
1420 | .read_counter = uncore_msr_read_counter, | 1421 | .read_counter = uncore_msr_read_counter, |
1421 | .hw_config = ivbep_cbox_hw_config, | 1422 | .hw_config = ivbep_cbox_hw_config, |
1422 | .get_constraint = ivbep_cbox_get_constraint, | 1423 | .get_constraint = ivbep_cbox_get_constraint, |
1423 | .put_constraint = snbep_cbox_put_constraint, | 1424 | .put_constraint = snbep_cbox_put_constraint, |
1424 | }; | 1425 | }; |
1425 | 1426 | ||
1426 | static struct intel_uncore_type ivbep_uncore_cbox = { | 1427 | static struct intel_uncore_type ivbep_uncore_cbox = { |
1427 | .name = "cbox", | 1428 | .name = "cbox", |
1428 | .num_counters = 4, | 1429 | .num_counters = 4, |
1429 | .num_boxes = 15, | 1430 | .num_boxes = 15, |
1430 | .perf_ctr_bits = 44, | 1431 | .perf_ctr_bits = 44, |
1431 | .event_ctl = SNBEP_C0_MSR_PMON_CTL0, | 1432 | .event_ctl = SNBEP_C0_MSR_PMON_CTL0, |
1432 | .perf_ctr = SNBEP_C0_MSR_PMON_CTR0, | 1433 | .perf_ctr = SNBEP_C0_MSR_PMON_CTR0, |
1433 | .event_mask = IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK, | 1434 | .event_mask = IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK, |
1434 | .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL, | 1435 | .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL, |
1435 | .msr_offset = SNBEP_CBO_MSR_OFFSET, | 1436 | .msr_offset = SNBEP_CBO_MSR_OFFSET, |
1436 | .num_shared_regs = 1, | 1437 | .num_shared_regs = 1, |
1437 | .constraints = snbep_uncore_cbox_constraints, | 1438 | .constraints = snbep_uncore_cbox_constraints, |
1438 | .ops = &ivbep_uncore_cbox_ops, | 1439 | .ops = &ivbep_uncore_cbox_ops, |
1439 | .format_group = &ivbep_uncore_cbox_format_group, | 1440 | .format_group = &ivbep_uncore_cbox_format_group, |
1440 | }; | 1441 | }; |
1441 | 1442 | ||
1442 | static struct intel_uncore_ops ivbep_uncore_pcu_ops = { | 1443 | static struct intel_uncore_ops ivbep_uncore_pcu_ops = { |
1443 | IVBEP_UNCORE_MSR_OPS_COMMON_INIT(), | 1444 | IVBEP_UNCORE_MSR_OPS_COMMON_INIT(), |
1444 | .hw_config = snbep_pcu_hw_config, | 1445 | .hw_config = snbep_pcu_hw_config, |
1445 | .get_constraint = snbep_pcu_get_constraint, | 1446 | .get_constraint = snbep_pcu_get_constraint, |
1446 | .put_constraint = snbep_pcu_put_constraint, | 1447 | .put_constraint = snbep_pcu_put_constraint, |
1447 | }; | 1448 | }; |
1448 | 1449 | ||
1449 | static struct intel_uncore_type ivbep_uncore_pcu = { | 1450 | static struct intel_uncore_type ivbep_uncore_pcu = { |
1450 | .name = "pcu", | 1451 | .name = "pcu", |
1451 | .num_counters = 4, | 1452 | .num_counters = 4, |
1452 | .num_boxes = 1, | 1453 | .num_boxes = 1, |
1453 | .perf_ctr_bits = 48, | 1454 | .perf_ctr_bits = 48, |
1454 | .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0, | 1455 | .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0, |
1455 | .event_ctl = SNBEP_PCU_MSR_PMON_CTL0, | 1456 | .event_ctl = SNBEP_PCU_MSR_PMON_CTL0, |
1456 | .event_mask = IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK, | 1457 | .event_mask = IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK, |
1457 | .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL, | 1458 | .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL, |
1458 | .num_shared_regs = 1, | 1459 | .num_shared_regs = 1, |
1459 | .ops = &ivbep_uncore_pcu_ops, | 1460 | .ops = &ivbep_uncore_pcu_ops, |
1460 | .format_group = &ivbep_uncore_pcu_format_group, | 1461 | .format_group = &ivbep_uncore_pcu_format_group, |
1461 | }; | 1462 | }; |
1462 | 1463 | ||
1463 | static struct intel_uncore_type *ivbep_msr_uncores[] = { | 1464 | static struct intel_uncore_type *ivbep_msr_uncores[] = { |
1464 | &ivbep_uncore_ubox, | 1465 | &ivbep_uncore_ubox, |
1465 | &ivbep_uncore_cbox, | 1466 | &ivbep_uncore_cbox, |
1466 | &ivbep_uncore_pcu, | 1467 | &ivbep_uncore_pcu, |
1467 | NULL, | 1468 | NULL, |
1468 | }; | 1469 | }; |
1469 | 1470 | ||
1470 | void ivbep_uncore_cpu_init(void) | 1471 | void ivbep_uncore_cpu_init(void) |
1471 | { | 1472 | { |
1472 | if (ivbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) | 1473 | if (ivbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) |
1473 | ivbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; | 1474 | ivbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; |
1474 | uncore_msr_uncores = ivbep_msr_uncores; | 1475 | uncore_msr_uncores = ivbep_msr_uncores; |
1475 | } | 1476 | } |
1476 | 1477 | ||
1477 | static struct intel_uncore_type ivbep_uncore_ha = { | 1478 | static struct intel_uncore_type ivbep_uncore_ha = { |
1478 | .name = "ha", | 1479 | .name = "ha", |
1479 | .num_counters = 4, | 1480 | .num_counters = 4, |
1480 | .num_boxes = 2, | 1481 | .num_boxes = 2, |
1481 | .perf_ctr_bits = 48, | 1482 | .perf_ctr_bits = 48, |
1482 | IVBEP_UNCORE_PCI_COMMON_INIT(), | 1483 | IVBEP_UNCORE_PCI_COMMON_INIT(), |
1483 | }; | 1484 | }; |
1484 | 1485 | ||
1485 | static struct intel_uncore_type ivbep_uncore_imc = { | 1486 | static struct intel_uncore_type ivbep_uncore_imc = { |
1486 | .name = "imc", | 1487 | .name = "imc", |
1487 | .num_counters = 4, | 1488 | .num_counters = 4, |
1488 | .num_boxes = 8, | 1489 | .num_boxes = 8, |
1489 | .perf_ctr_bits = 48, | 1490 | .perf_ctr_bits = 48, |
1490 | .fixed_ctr_bits = 48, | 1491 | .fixed_ctr_bits = 48, |
1491 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, | 1492 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, |
1492 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, | 1493 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, |
1493 | .event_descs = snbep_uncore_imc_events, | 1494 | .event_descs = snbep_uncore_imc_events, |
1494 | IVBEP_UNCORE_PCI_COMMON_INIT(), | 1495 | IVBEP_UNCORE_PCI_COMMON_INIT(), |
1495 | }; | 1496 | }; |
1496 | 1497 | ||
1497 | /* registers in IRP boxes are not properly aligned */ | 1498 | /* registers in IRP boxes are not properly aligned */ |
1498 | static unsigned ivbep_uncore_irp_ctls[] = {0xd8, 0xdc, 0xe0, 0xe4}; | 1499 | static unsigned ivbep_uncore_irp_ctls[] = {0xd8, 0xdc, 0xe0, 0xe4}; |
1499 | static unsigned ivbep_uncore_irp_ctrs[] = {0xa0, 0xb0, 0xb8, 0xc0}; | 1500 | static unsigned ivbep_uncore_irp_ctrs[] = {0xa0, 0xb0, 0xb8, 0xc0}; |
1500 | 1501 | ||
1501 | static void ivbep_uncore_irp_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 1502 | static void ivbep_uncore_irp_enable_event(struct intel_uncore_box *box, struct perf_event *event) |
1502 | { | 1503 | { |
1503 | struct pci_dev *pdev = box->pci_dev; | 1504 | struct pci_dev *pdev = box->pci_dev; |
1504 | struct hw_perf_event *hwc = &event->hw; | 1505 | struct hw_perf_event *hwc = &event->hw; |
1505 | 1506 | ||
1506 | pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], | 1507 | pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], |
1507 | hwc->config | SNBEP_PMON_CTL_EN); | 1508 | hwc->config | SNBEP_PMON_CTL_EN); |
1508 | } | 1509 | } |
1509 | 1510 | ||
1510 | static void ivbep_uncore_irp_disable_event(struct intel_uncore_box *box, struct perf_event *event) | 1511 | static void ivbep_uncore_irp_disable_event(struct intel_uncore_box *box, struct perf_event *event) |
1511 | { | 1512 | { |
1512 | struct pci_dev *pdev = box->pci_dev; | 1513 | struct pci_dev *pdev = box->pci_dev; |
1513 | struct hw_perf_event *hwc = &event->hw; | 1514 | struct hw_perf_event *hwc = &event->hw; |
1514 | 1515 | ||
1515 | pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], hwc->config); | 1516 | pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], hwc->config); |
1516 | } | 1517 | } |
1517 | 1518 | ||
1518 | static u64 ivbep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event) | 1519 | static u64 ivbep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event) |
1519 | { | 1520 | { |
1520 | struct pci_dev *pdev = box->pci_dev; | 1521 | struct pci_dev *pdev = box->pci_dev; |
1521 | struct hw_perf_event *hwc = &event->hw; | 1522 | struct hw_perf_event *hwc = &event->hw; |
1522 | u64 count = 0; | 1523 | u64 count = 0; |
1523 | 1524 | ||
1524 | pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx], (u32 *)&count); | 1525 | pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx], (u32 *)&count); |
1525 | pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1); | 1526 | pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1); |
1526 | 1527 | ||
1527 | return count; | 1528 | return count; |
1528 | } | 1529 | } |
1529 | 1530 | ||
1530 | static struct intel_uncore_ops ivbep_uncore_irp_ops = { | 1531 | static struct intel_uncore_ops ivbep_uncore_irp_ops = { |
1531 | .init_box = ivbep_uncore_pci_init_box, | 1532 | .init_box = ivbep_uncore_pci_init_box, |
1532 | .disable_box = snbep_uncore_pci_disable_box, | 1533 | .disable_box = snbep_uncore_pci_disable_box, |
1533 | .enable_box = snbep_uncore_pci_enable_box, | 1534 | .enable_box = snbep_uncore_pci_enable_box, |
1534 | .disable_event = ivbep_uncore_irp_disable_event, | 1535 | .disable_event = ivbep_uncore_irp_disable_event, |
1535 | .enable_event = ivbep_uncore_irp_enable_event, | 1536 | .enable_event = ivbep_uncore_irp_enable_event, |
1536 | .read_counter = ivbep_uncore_irp_read_counter, | 1537 | .read_counter = ivbep_uncore_irp_read_counter, |
1537 | }; | 1538 | }; |
1538 | 1539 | ||
1539 | static struct intel_uncore_type ivbep_uncore_irp = { | 1540 | static struct intel_uncore_type ivbep_uncore_irp = { |
1540 | .name = "irp", | 1541 | .name = "irp", |
1541 | .num_counters = 4, | 1542 | .num_counters = 4, |
1542 | .num_boxes = 1, | 1543 | .num_boxes = 1, |
1543 | .perf_ctr_bits = 48, | 1544 | .perf_ctr_bits = 48, |
1544 | .event_mask = IVBEP_PMON_RAW_EVENT_MASK, | 1545 | .event_mask = IVBEP_PMON_RAW_EVENT_MASK, |
1545 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, | 1546 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, |
1546 | .ops = &ivbep_uncore_irp_ops, | 1547 | .ops = &ivbep_uncore_irp_ops, |
1547 | .format_group = &ivbep_uncore_format_group, | 1548 | .format_group = &ivbep_uncore_format_group, |
1548 | }; | 1549 | }; |
1549 | 1550 | ||
1550 | static struct intel_uncore_ops ivbep_uncore_qpi_ops = { | 1551 | static struct intel_uncore_ops ivbep_uncore_qpi_ops = { |
1551 | .init_box = ivbep_uncore_pci_init_box, | 1552 | .init_box = ivbep_uncore_pci_init_box, |
1552 | .disable_box = snbep_uncore_pci_disable_box, | 1553 | .disable_box = snbep_uncore_pci_disable_box, |
1553 | .enable_box = snbep_uncore_pci_enable_box, | 1554 | .enable_box = snbep_uncore_pci_enable_box, |
1554 | .disable_event = snbep_uncore_pci_disable_event, | 1555 | .disable_event = snbep_uncore_pci_disable_event, |
1555 | .enable_event = snbep_qpi_enable_event, | 1556 | .enable_event = snbep_qpi_enable_event, |
1556 | .read_counter = snbep_uncore_pci_read_counter, | 1557 | .read_counter = snbep_uncore_pci_read_counter, |
1557 | .hw_config = snbep_qpi_hw_config, | 1558 | .hw_config = snbep_qpi_hw_config, |
1558 | .get_constraint = uncore_get_constraint, | 1559 | .get_constraint = uncore_get_constraint, |
1559 | .put_constraint = uncore_put_constraint, | 1560 | .put_constraint = uncore_put_constraint, |
1560 | }; | 1561 | }; |
1561 | 1562 | ||
1562 | static struct intel_uncore_type ivbep_uncore_qpi = { | 1563 | static struct intel_uncore_type ivbep_uncore_qpi = { |
1563 | .name = "qpi", | 1564 | .name = "qpi", |
1564 | .num_counters = 4, | 1565 | .num_counters = 4, |
1565 | .num_boxes = 3, | 1566 | .num_boxes = 3, |
1566 | .perf_ctr_bits = 48, | 1567 | .perf_ctr_bits = 48, |
1567 | .perf_ctr = SNBEP_PCI_PMON_CTR0, | 1568 | .perf_ctr = SNBEP_PCI_PMON_CTR0, |
1568 | .event_ctl = SNBEP_PCI_PMON_CTL0, | 1569 | .event_ctl = SNBEP_PCI_PMON_CTL0, |
1569 | .event_mask = IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK, | 1570 | .event_mask = IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK, |
1570 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, | 1571 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, |
1571 | .num_shared_regs = 1, | 1572 | .num_shared_regs = 1, |
1572 | .ops = &ivbep_uncore_qpi_ops, | 1573 | .ops = &ivbep_uncore_qpi_ops, |
1573 | .format_group = &ivbep_uncore_qpi_format_group, | 1574 | .format_group = &ivbep_uncore_qpi_format_group, |
1574 | }; | 1575 | }; |
1575 | 1576 | ||
1576 | static struct intel_uncore_type ivbep_uncore_r2pcie = { | 1577 | static struct intel_uncore_type ivbep_uncore_r2pcie = { |
1577 | .name = "r2pcie", | 1578 | .name = "r2pcie", |
1578 | .num_counters = 4, | 1579 | .num_counters = 4, |
1579 | .num_boxes = 1, | 1580 | .num_boxes = 1, |
1580 | .perf_ctr_bits = 44, | 1581 | .perf_ctr_bits = 44, |
1581 | .constraints = snbep_uncore_r2pcie_constraints, | 1582 | .constraints = snbep_uncore_r2pcie_constraints, |
1582 | IVBEP_UNCORE_PCI_COMMON_INIT(), | 1583 | IVBEP_UNCORE_PCI_COMMON_INIT(), |
1583 | }; | 1584 | }; |
1584 | 1585 | ||
1585 | static struct intel_uncore_type ivbep_uncore_r3qpi = { | 1586 | static struct intel_uncore_type ivbep_uncore_r3qpi = { |
1586 | .name = "r3qpi", | 1587 | .name = "r3qpi", |
1587 | .num_counters = 3, | 1588 | .num_counters = 3, |
1588 | .num_boxes = 2, | 1589 | .num_boxes = 2, |
1589 | .perf_ctr_bits = 44, | 1590 | .perf_ctr_bits = 44, |
1590 | .constraints = snbep_uncore_r3qpi_constraints, | 1591 | .constraints = snbep_uncore_r3qpi_constraints, |
1591 | IVBEP_UNCORE_PCI_COMMON_INIT(), | 1592 | IVBEP_UNCORE_PCI_COMMON_INIT(), |
1592 | }; | 1593 | }; |
1593 | 1594 | ||
1594 | enum { | 1595 | enum { |
1595 | IVBEP_PCI_UNCORE_HA, | 1596 | IVBEP_PCI_UNCORE_HA, |
1596 | IVBEP_PCI_UNCORE_IMC, | 1597 | IVBEP_PCI_UNCORE_IMC, |
1597 | IVBEP_PCI_UNCORE_IRP, | 1598 | IVBEP_PCI_UNCORE_IRP, |
1598 | IVBEP_PCI_UNCORE_QPI, | 1599 | IVBEP_PCI_UNCORE_QPI, |
1599 | IVBEP_PCI_UNCORE_R2PCIE, | 1600 | IVBEP_PCI_UNCORE_R2PCIE, |
1600 | IVBEP_PCI_UNCORE_R3QPI, | 1601 | IVBEP_PCI_UNCORE_R3QPI, |
1601 | }; | 1602 | }; |
1602 | 1603 | ||
1603 | static struct intel_uncore_type *ivbep_pci_uncores[] = { | 1604 | static struct intel_uncore_type *ivbep_pci_uncores[] = { |
1604 | [IVBEP_PCI_UNCORE_HA] = &ivbep_uncore_ha, | 1605 | [IVBEP_PCI_UNCORE_HA] = &ivbep_uncore_ha, |
1605 | [IVBEP_PCI_UNCORE_IMC] = &ivbep_uncore_imc, | 1606 | [IVBEP_PCI_UNCORE_IMC] = &ivbep_uncore_imc, |
1606 | [IVBEP_PCI_UNCORE_IRP] = &ivbep_uncore_irp, | 1607 | [IVBEP_PCI_UNCORE_IRP] = &ivbep_uncore_irp, |
1607 | [IVBEP_PCI_UNCORE_QPI] = &ivbep_uncore_qpi, | 1608 | [IVBEP_PCI_UNCORE_QPI] = &ivbep_uncore_qpi, |
1608 | [IVBEP_PCI_UNCORE_R2PCIE] = &ivbep_uncore_r2pcie, | 1609 | [IVBEP_PCI_UNCORE_R2PCIE] = &ivbep_uncore_r2pcie, |
1609 | [IVBEP_PCI_UNCORE_R3QPI] = &ivbep_uncore_r3qpi, | 1610 | [IVBEP_PCI_UNCORE_R3QPI] = &ivbep_uncore_r3qpi, |
1610 | NULL, | 1611 | NULL, |
1611 | }; | 1612 | }; |
1612 | 1613 | ||
1613 | static const struct pci_device_id ivbep_uncore_pci_ids[] = { | 1614 | static const struct pci_device_id ivbep_uncore_pci_ids[] = { |
1614 | { /* Home Agent 0 */ | 1615 | { /* Home Agent 0 */ |
1615 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe30), | 1616 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe30), |
1616 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 0), | 1617 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 0), |
1617 | }, | 1618 | }, |
1618 | { /* Home Agent 1 */ | 1619 | { /* Home Agent 1 */ |
1619 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe38), | 1620 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe38), |
1620 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 1), | 1621 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 1), |
1621 | }, | 1622 | }, |
1622 | { /* MC0 Channel 0 */ | 1623 | { /* MC0 Channel 0 */ |
1623 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb4), | 1624 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb4), |
1624 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 0), | 1625 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 0), |
1625 | }, | 1626 | }, |
1626 | { /* MC0 Channel 1 */ | 1627 | { /* MC0 Channel 1 */ |
1627 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb5), | 1628 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb5), |
1628 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 1), | 1629 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 1), |
1629 | }, | 1630 | }, |
1630 | { /* MC0 Channel 3 */ | 1631 | { /* MC0 Channel 3 */ |
1631 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb0), | 1632 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb0), |
1632 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 2), | 1633 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 2), |
1633 | }, | 1634 | }, |
1634 | { /* MC0 Channel 4 */ | 1635 | { /* MC0 Channel 4 */ |
1635 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb1), | 1636 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb1), |
1636 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 3), | 1637 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 3), |
1637 | }, | 1638 | }, |
1638 | { /* MC1 Channel 0 */ | 1639 | { /* MC1 Channel 0 */ |
1639 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef4), | 1640 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef4), |
1640 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 4), | 1641 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 4), |
1641 | }, | 1642 | }, |
1642 | { /* MC1 Channel 1 */ | 1643 | { /* MC1 Channel 1 */ |
1643 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef5), | 1644 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef5), |
1644 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 5), | 1645 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 5), |
1645 | }, | 1646 | }, |
1646 | { /* MC1 Channel 3 */ | 1647 | { /* MC1 Channel 3 */ |
1647 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef0), | 1648 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef0), |
1648 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 6), | 1649 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 6), |
1649 | }, | 1650 | }, |
1650 | { /* MC1 Channel 4 */ | 1651 | { /* MC1 Channel 4 */ |
1651 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef1), | 1652 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef1), |
1652 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 7), | 1653 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 7), |
1653 | }, | 1654 | }, |
1654 | { /* IRP */ | 1655 | { /* IRP */ |
1655 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe39), | 1656 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe39), |
1656 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IRP, 0), | 1657 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IRP, 0), |
1657 | }, | 1658 | }, |
1658 | { /* QPI0 Port 0 */ | 1659 | { /* QPI0 Port 0 */ |
1659 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe32), | 1660 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe32), |
1660 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 0), | 1661 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 0), |
1661 | }, | 1662 | }, |
1662 | { /* QPI0 Port 1 */ | 1663 | { /* QPI0 Port 1 */ |
1663 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe33), | 1664 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe33), |
1664 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 1), | 1665 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 1), |
1665 | }, | 1666 | }, |
1666 | { /* QPI1 Port 2 */ | 1667 | { /* QPI1 Port 2 */ |
1667 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3a), | 1668 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3a), |
1668 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 2), | 1669 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 2), |
1669 | }, | 1670 | }, |
1670 | { /* R2PCIe */ | 1671 | { /* R2PCIe */ |
1671 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe34), | 1672 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe34), |
1672 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R2PCIE, 0), | 1673 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R2PCIE, 0), |
1673 | }, | 1674 | }, |
1674 | { /* R3QPI0 Link 0 */ | 1675 | { /* R3QPI0 Link 0 */ |
1675 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe36), | 1676 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe36), |
1676 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 0), | 1677 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 0), |
1677 | }, | 1678 | }, |
1678 | { /* R3QPI0 Link 1 */ | 1679 | { /* R3QPI0 Link 1 */ |
1679 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe37), | 1680 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe37), |
1680 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 1), | 1681 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 1), |
1681 | }, | 1682 | }, |
1682 | { /* R3QPI1 Link 2 */ | 1683 | { /* R3QPI1 Link 2 */ |
1683 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3e), | 1684 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3e), |
1684 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 2), | 1685 | .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 2), |
1685 | }, | 1686 | }, |
1686 | { /* QPI Port 0 filter */ | 1687 | { /* QPI Port 0 filter */ |
1687 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe86), | 1688 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe86), |
1688 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | 1689 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, |
1689 | SNBEP_PCI_QPI_PORT0_FILTER), | 1690 | SNBEP_PCI_QPI_PORT0_FILTER), |
1690 | }, | 1691 | }, |
1691 | { /* QPI Port 0 filter */ | 1692 | { /* QPI Port 0 filter */ |
1692 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe96), | 1693 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe96), |
1693 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | 1694 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, |
1694 | SNBEP_PCI_QPI_PORT1_FILTER), | 1695 | SNBEP_PCI_QPI_PORT1_FILTER), |
1695 | }, | 1696 | }, |
1696 | { /* end: all zeroes */ } | 1697 | { /* end: all zeroes */ } |
1697 | }; | 1698 | }; |
1698 | 1699 | ||
1699 | static struct pci_driver ivbep_uncore_pci_driver = { | 1700 | static struct pci_driver ivbep_uncore_pci_driver = { |
1700 | .name = "ivbep_uncore", | 1701 | .name = "ivbep_uncore", |
1701 | .id_table = ivbep_uncore_pci_ids, | 1702 | .id_table = ivbep_uncore_pci_ids, |
1702 | }; | 1703 | }; |
1703 | 1704 | ||
1704 | int ivbep_uncore_pci_init(void) | 1705 | int ivbep_uncore_pci_init(void) |
1705 | { | 1706 | { |
1706 | int ret = snbep_pci2phy_map_init(0x0e1e); | 1707 | int ret = snbep_pci2phy_map_init(0x0e1e); |
1707 | if (ret) | 1708 | if (ret) |
1708 | return ret; | 1709 | return ret; |
1709 | uncore_pci_uncores = ivbep_pci_uncores; | 1710 | uncore_pci_uncores = ivbep_pci_uncores; |
1710 | uncore_pci_driver = &ivbep_uncore_pci_driver; | 1711 | uncore_pci_driver = &ivbep_uncore_pci_driver; |
1711 | return 0; | 1712 | return 0; |
1712 | } | 1713 | } |
1713 | /* end of IvyTown uncore support */ | 1714 | /* end of IvyTown uncore support */ |
1714 | 1715 | ||
1715 | /* Haswell-EP uncore support */ | 1716 | /* Haswell-EP uncore support */ |
1716 | static struct attribute *hswep_uncore_ubox_formats_attr[] = { | 1717 | static struct attribute *hswep_uncore_ubox_formats_attr[] = { |
1717 | &format_attr_event.attr, | 1718 | &format_attr_event.attr, |
1718 | &format_attr_umask.attr, | 1719 | &format_attr_umask.attr, |
1719 | &format_attr_edge.attr, | 1720 | &format_attr_edge.attr, |
1720 | &format_attr_inv.attr, | 1721 | &format_attr_inv.attr, |
1721 | &format_attr_thresh5.attr, | 1722 | &format_attr_thresh5.attr, |
1722 | &format_attr_filter_tid2.attr, | 1723 | &format_attr_filter_tid2.attr, |
1723 | &format_attr_filter_cid.attr, | 1724 | &format_attr_filter_cid.attr, |
1724 | NULL, | 1725 | NULL, |
1725 | }; | 1726 | }; |
1726 | 1727 | ||
1727 | static struct attribute_group hswep_uncore_ubox_format_group = { | 1728 | static struct attribute_group hswep_uncore_ubox_format_group = { |
1728 | .name = "format", | 1729 | .name = "format", |
1729 | .attrs = hswep_uncore_ubox_formats_attr, | 1730 | .attrs = hswep_uncore_ubox_formats_attr, |
1730 | }; | 1731 | }; |
1731 | 1732 | ||
1732 | static int hswep_ubox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 1733 | static int hswep_ubox_hw_config(struct intel_uncore_box *box, struct perf_event *event) |
1733 | { | 1734 | { |
1734 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 1735 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; |
1735 | reg1->reg = HSWEP_U_MSR_PMON_FILTER; | 1736 | reg1->reg = HSWEP_U_MSR_PMON_FILTER; |
1736 | reg1->config = event->attr.config1 & HSWEP_U_MSR_PMON_BOX_FILTER_MASK; | 1737 | reg1->config = event->attr.config1 & HSWEP_U_MSR_PMON_BOX_FILTER_MASK; |
1737 | reg1->idx = 0; | 1738 | reg1->idx = 0; |
1738 | return 0; | 1739 | return 0; |
1739 | } | 1740 | } |
1740 | 1741 | ||
1741 | static struct intel_uncore_ops hswep_uncore_ubox_ops = { | 1742 | static struct intel_uncore_ops hswep_uncore_ubox_ops = { |
1742 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | 1743 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), |
1743 | .hw_config = hswep_ubox_hw_config, | 1744 | .hw_config = hswep_ubox_hw_config, |
1744 | .get_constraint = uncore_get_constraint, | 1745 | .get_constraint = uncore_get_constraint, |
1745 | .put_constraint = uncore_put_constraint, | 1746 | .put_constraint = uncore_put_constraint, |
1746 | }; | 1747 | }; |
1747 | 1748 | ||
1748 | static struct intel_uncore_type hswep_uncore_ubox = { | 1749 | static struct intel_uncore_type hswep_uncore_ubox = { |
1749 | .name = "ubox", | 1750 | .name = "ubox", |
1750 | .num_counters = 2, | 1751 | .num_counters = 2, |
1751 | .num_boxes = 1, | 1752 | .num_boxes = 1, |
1752 | .perf_ctr_bits = 44, | 1753 | .perf_ctr_bits = 44, |
1753 | .fixed_ctr_bits = 48, | 1754 | .fixed_ctr_bits = 48, |
1754 | .perf_ctr = HSWEP_U_MSR_PMON_CTR0, | 1755 | .perf_ctr = HSWEP_U_MSR_PMON_CTR0, |
1755 | .event_ctl = HSWEP_U_MSR_PMON_CTL0, | 1756 | .event_ctl = HSWEP_U_MSR_PMON_CTL0, |
1756 | .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK, | 1757 | .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK, |
1757 | .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR, | 1758 | .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR, |
1758 | .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL, | 1759 | .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL, |
1759 | .num_shared_regs = 1, | 1760 | .num_shared_regs = 1, |
1760 | .ops = &hswep_uncore_ubox_ops, | 1761 | .ops = &hswep_uncore_ubox_ops, |
1761 | .format_group = &hswep_uncore_ubox_format_group, | 1762 | .format_group = &hswep_uncore_ubox_format_group, |
1762 | }; | 1763 | }; |
1763 | 1764 | ||
1764 | static struct attribute *hswep_uncore_cbox_formats_attr[] = { | 1765 | static struct attribute *hswep_uncore_cbox_formats_attr[] = { |
1765 | &format_attr_event.attr, | 1766 | &format_attr_event.attr, |
1766 | &format_attr_umask.attr, | 1767 | &format_attr_umask.attr, |
1767 | &format_attr_edge.attr, | 1768 | &format_attr_edge.attr, |
1768 | &format_attr_tid_en.attr, | 1769 | &format_attr_tid_en.attr, |
1769 | &format_attr_thresh8.attr, | 1770 | &format_attr_thresh8.attr, |
1770 | &format_attr_filter_tid3.attr, | 1771 | &format_attr_filter_tid3.attr, |
1771 | &format_attr_filter_link2.attr, | 1772 | &format_attr_filter_link2.attr, |
1772 | &format_attr_filter_state3.attr, | 1773 | &format_attr_filter_state3.attr, |
1773 | &format_attr_filter_nid2.attr, | 1774 | &format_attr_filter_nid2.attr, |
1774 | &format_attr_filter_opc2.attr, | 1775 | &format_attr_filter_opc2.attr, |
1775 | &format_attr_filter_nc.attr, | 1776 | &format_attr_filter_nc.attr, |
1776 | &format_attr_filter_c6.attr, | 1777 | &format_attr_filter_c6.attr, |
1777 | &format_attr_filter_isoc.attr, | 1778 | &format_attr_filter_isoc.attr, |
1778 | NULL, | 1779 | NULL, |
1779 | }; | 1780 | }; |
1780 | 1781 | ||
1781 | static struct attribute_group hswep_uncore_cbox_format_group = { | 1782 | static struct attribute_group hswep_uncore_cbox_format_group = { |
1782 | .name = "format", | 1783 | .name = "format", |
1783 | .attrs = hswep_uncore_cbox_formats_attr, | 1784 | .attrs = hswep_uncore_cbox_formats_attr, |
1784 | }; | 1785 | }; |
1785 | 1786 | ||
1786 | static struct event_constraint hswep_uncore_cbox_constraints[] = { | 1787 | static struct event_constraint hswep_uncore_cbox_constraints[] = { |
1787 | UNCORE_EVENT_CONSTRAINT(0x01, 0x1), | 1788 | UNCORE_EVENT_CONSTRAINT(0x01, 0x1), |
1788 | UNCORE_EVENT_CONSTRAINT(0x09, 0x1), | 1789 | UNCORE_EVENT_CONSTRAINT(0x09, 0x1), |
1789 | UNCORE_EVENT_CONSTRAINT(0x11, 0x1), | 1790 | UNCORE_EVENT_CONSTRAINT(0x11, 0x1), |
1790 | UNCORE_EVENT_CONSTRAINT(0x36, 0x1), | 1791 | UNCORE_EVENT_CONSTRAINT(0x36, 0x1), |
1791 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), | 1792 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), |
1792 | UNCORE_EVENT_CONSTRAINT(0x3b, 0x1), | 1793 | UNCORE_EVENT_CONSTRAINT(0x3b, 0x1), |
1793 | UNCORE_EVENT_CONSTRAINT(0x3e, 0x1), | 1794 | UNCORE_EVENT_CONSTRAINT(0x3e, 0x1), |
1794 | EVENT_CONSTRAINT_END | 1795 | EVENT_CONSTRAINT_END |
1795 | }; | 1796 | }; |
1796 | 1797 | ||
1797 | static struct extra_reg hswep_uncore_cbox_extra_regs[] = { | 1798 | static struct extra_reg hswep_uncore_cbox_extra_regs[] = { |
1798 | SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN, | 1799 | SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN, |
1799 | SNBEP_CBO_PMON_CTL_TID_EN, 0x1), | 1800 | SNBEP_CBO_PMON_CTL_TID_EN, 0x1), |
1800 | SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4), | 1801 | SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4), |
1801 | SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4), | 1802 | SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4), |
1802 | SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4), | 1803 | SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4), |
1803 | SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4), | 1804 | SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4), |
1804 | SNBEP_CBO_EVENT_EXTRA_REG(0x2134, 0xffff, 0x4), | 1805 | SNBEP_CBO_EVENT_EXTRA_REG(0x2134, 0xffff, 0x4), |
1805 | SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x4), | 1806 | SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x4), |
1806 | SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8), | 1807 | SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8), |
1807 | SNBEP_CBO_EVENT_EXTRA_REG(0x4028, 0x40ff, 0x8), | 1808 | SNBEP_CBO_EVENT_EXTRA_REG(0x4028, 0x40ff, 0x8), |
1808 | SNBEP_CBO_EVENT_EXTRA_REG(0x4032, 0x40ff, 0x8), | 1809 | SNBEP_CBO_EVENT_EXTRA_REG(0x4032, 0x40ff, 0x8), |
1809 | SNBEP_CBO_EVENT_EXTRA_REG(0x4029, 0x40ff, 0x8), | 1810 | SNBEP_CBO_EVENT_EXTRA_REG(0x4029, 0x40ff, 0x8), |
1810 | SNBEP_CBO_EVENT_EXTRA_REG(0x4033, 0x40ff, 0x8), | 1811 | SNBEP_CBO_EVENT_EXTRA_REG(0x4033, 0x40ff, 0x8), |
1811 | SNBEP_CBO_EVENT_EXTRA_REG(0x402A, 0x40ff, 0x8), | 1812 | SNBEP_CBO_EVENT_EXTRA_REG(0x402A, 0x40ff, 0x8), |
1812 | SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x12), | 1813 | SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x12), |
1813 | SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10), | 1814 | SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10), |
1814 | SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18), | 1815 | SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18), |
1815 | SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8), | 1816 | SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8), |
1816 | SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8), | 1817 | SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8), |
1817 | SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8), | 1818 | SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8), |
1818 | SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18), | 1819 | SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18), |
1819 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8), | 1820 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8), |
1820 | SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10), | 1821 | SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10), |
1821 | SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10), | 1822 | SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10), |
1822 | SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10), | 1823 | SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10), |
1823 | SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10), | 1824 | SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10), |
1824 | SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10), | 1825 | SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10), |
1825 | SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10), | 1826 | SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10), |
1826 | SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18), | 1827 | SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18), |
1827 | SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8), | 1828 | SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8), |
1828 | SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8), | 1829 | SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8), |
1829 | SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18), | 1830 | SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18), |
1830 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8), | 1831 | SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8), |
1831 | SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10), | 1832 | SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10), |
1832 | SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10), | 1833 | SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10), |
1833 | SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10), | 1834 | SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10), |
1834 | SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10), | 1835 | SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10), |
1835 | SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8), | 1836 | SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8), |
1836 | EVENT_EXTRA_END | 1837 | EVENT_EXTRA_END |
1837 | }; | 1838 | }; |
1838 | 1839 | ||
1839 | static u64 hswep_cbox_filter_mask(int fields) | 1840 | static u64 hswep_cbox_filter_mask(int fields) |
1840 | { | 1841 | { |
1841 | u64 mask = 0; | 1842 | u64 mask = 0; |
1842 | if (fields & 0x1) | 1843 | if (fields & 0x1) |
1843 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_TID; | 1844 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_TID; |
1844 | if (fields & 0x2) | 1845 | if (fields & 0x2) |
1845 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK; | 1846 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK; |
1846 | if (fields & 0x4) | 1847 | if (fields & 0x4) |
1847 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE; | 1848 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE; |
1848 | if (fields & 0x8) | 1849 | if (fields & 0x8) |
1849 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NID; | 1850 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NID; |
1850 | if (fields & 0x10) { | 1851 | if (fields & 0x10) { |
1851 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC; | 1852 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC; |
1852 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NC; | 1853 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NC; |
1853 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_C6; | 1854 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_C6; |
1854 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC; | 1855 | mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC; |
1855 | } | 1856 | } |
1856 | return mask; | 1857 | return mask; |
1857 | } | 1858 | } |
1858 | 1859 | ||
1859 | static struct event_constraint * | 1860 | static struct event_constraint * |
1860 | hswep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) | 1861 | hswep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) |
1861 | { | 1862 | { |
1862 | return __snbep_cbox_get_constraint(box, event, hswep_cbox_filter_mask); | 1863 | return __snbep_cbox_get_constraint(box, event, hswep_cbox_filter_mask); |
1863 | } | 1864 | } |
1864 | 1865 | ||
1865 | static int hswep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 1866 | static int hswep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) |
1866 | { | 1867 | { |
1867 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 1868 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; |
1868 | struct extra_reg *er; | 1869 | struct extra_reg *er; |
1869 | int idx = 0; | 1870 | int idx = 0; |
1870 | 1871 | ||
1871 | for (er = hswep_uncore_cbox_extra_regs; er->msr; er++) { | 1872 | for (er = hswep_uncore_cbox_extra_regs; er->msr; er++) { |
1872 | if (er->event != (event->hw.config & er->config_mask)) | 1873 | if (er->event != (event->hw.config & er->config_mask)) |
1873 | continue; | 1874 | continue; |
1874 | idx |= er->idx; | 1875 | idx |= er->idx; |
1875 | } | 1876 | } |
1876 | 1877 | ||
1877 | if (idx) { | 1878 | if (idx) { |
1878 | reg1->reg = HSWEP_C0_MSR_PMON_BOX_FILTER0 + | 1879 | reg1->reg = HSWEP_C0_MSR_PMON_BOX_FILTER0 + |
1879 | HSWEP_CBO_MSR_OFFSET * box->pmu->pmu_idx; | 1880 | HSWEP_CBO_MSR_OFFSET * box->pmu->pmu_idx; |
1880 | reg1->config = event->attr.config1 & hswep_cbox_filter_mask(idx); | 1881 | reg1->config = event->attr.config1 & hswep_cbox_filter_mask(idx); |
1881 | reg1->idx = idx; | 1882 | reg1->idx = idx; |
1882 | } | 1883 | } |
1883 | return 0; | 1884 | return 0; |
1884 | } | 1885 | } |
1885 | 1886 | ||
1886 | static void hswep_cbox_enable_event(struct intel_uncore_box *box, | 1887 | static void hswep_cbox_enable_event(struct intel_uncore_box *box, |
1887 | struct perf_event *event) | 1888 | struct perf_event *event) |
1888 | { | 1889 | { |
1889 | struct hw_perf_event *hwc = &event->hw; | 1890 | struct hw_perf_event *hwc = &event->hw; |
1890 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 1891 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; |
1891 | 1892 | ||
1892 | if (reg1->idx != EXTRA_REG_NONE) { | 1893 | if (reg1->idx != EXTRA_REG_NONE) { |
1893 | u64 filter = uncore_shared_reg_config(box, 0); | 1894 | u64 filter = uncore_shared_reg_config(box, 0); |
1894 | wrmsrl(reg1->reg, filter & 0xffffffff); | 1895 | wrmsrl(reg1->reg, filter & 0xffffffff); |
1895 | wrmsrl(reg1->reg + 1, filter >> 32); | 1896 | wrmsrl(reg1->reg + 1, filter >> 32); |
1896 | } | 1897 | } |
1897 | 1898 | ||
1898 | wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); | 1899 | wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN); |
1899 | } | 1900 | } |
1900 | 1901 | ||
1901 | static struct intel_uncore_ops hswep_uncore_cbox_ops = { | 1902 | static struct intel_uncore_ops hswep_uncore_cbox_ops = { |
1902 | .init_box = snbep_uncore_msr_init_box, | 1903 | .init_box = snbep_uncore_msr_init_box, |
1903 | .disable_box = snbep_uncore_msr_disable_box, | 1904 | .disable_box = snbep_uncore_msr_disable_box, |
1904 | .enable_box = snbep_uncore_msr_enable_box, | 1905 | .enable_box = snbep_uncore_msr_enable_box, |
1905 | .disable_event = snbep_uncore_msr_disable_event, | 1906 | .disable_event = snbep_uncore_msr_disable_event, |
1906 | .enable_event = hswep_cbox_enable_event, | 1907 | .enable_event = hswep_cbox_enable_event, |
1907 | .read_counter = uncore_msr_read_counter, | 1908 | .read_counter = uncore_msr_read_counter, |
1908 | .hw_config = hswep_cbox_hw_config, | 1909 | .hw_config = hswep_cbox_hw_config, |
1909 | .get_constraint = hswep_cbox_get_constraint, | 1910 | .get_constraint = hswep_cbox_get_constraint, |
1910 | .put_constraint = snbep_cbox_put_constraint, | 1911 | .put_constraint = snbep_cbox_put_constraint, |
1911 | }; | 1912 | }; |
1912 | 1913 | ||
1913 | static struct intel_uncore_type hswep_uncore_cbox = { | 1914 | static struct intel_uncore_type hswep_uncore_cbox = { |
1914 | .name = "cbox", | 1915 | .name = "cbox", |
1915 | .num_counters = 4, | 1916 | .num_counters = 4, |
1916 | .num_boxes = 18, | 1917 | .num_boxes = 18, |
1917 | .perf_ctr_bits = 44, | 1918 | .perf_ctr_bits = 44, |
1918 | .event_ctl = HSWEP_C0_MSR_PMON_CTL0, | 1919 | .event_ctl = HSWEP_C0_MSR_PMON_CTL0, |
1919 | .perf_ctr = HSWEP_C0_MSR_PMON_CTR0, | 1920 | .perf_ctr = HSWEP_C0_MSR_PMON_CTR0, |
1920 | .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK, | 1921 | .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK, |
1921 | .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL, | 1922 | .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL, |
1922 | .msr_offset = HSWEP_CBO_MSR_OFFSET, | 1923 | .msr_offset = HSWEP_CBO_MSR_OFFSET, |
1923 | .num_shared_regs = 1, | 1924 | .num_shared_regs = 1, |
1924 | .constraints = hswep_uncore_cbox_constraints, | 1925 | .constraints = hswep_uncore_cbox_constraints, |
1925 | .ops = &hswep_uncore_cbox_ops, | 1926 | .ops = &hswep_uncore_cbox_ops, |
1926 | .format_group = &hswep_uncore_cbox_format_group, | 1927 | .format_group = &hswep_uncore_cbox_format_group, |
1927 | }; | 1928 | }; |
1928 | 1929 | ||
1929 | /* | 1930 | /* |
1930 | * Write SBOX Initialization register bit by bit to avoid spurious #GPs | 1931 | * Write SBOX Initialization register bit by bit to avoid spurious #GPs |
1931 | */ | 1932 | */ |
1932 | static void hswep_uncore_sbox_msr_init_box(struct intel_uncore_box *box) | 1933 | static void hswep_uncore_sbox_msr_init_box(struct intel_uncore_box *box) |
1933 | { | 1934 | { |
1934 | unsigned msr = uncore_msr_box_ctl(box); | 1935 | unsigned msr = uncore_msr_box_ctl(box); |
1935 | 1936 | ||
1936 | if (msr) { | 1937 | if (msr) { |
1937 | u64 init = SNBEP_PMON_BOX_CTL_INT; | 1938 | u64 init = SNBEP_PMON_BOX_CTL_INT; |
1938 | u64 flags = 0; | 1939 | u64 flags = 0; |
1939 | int i; | 1940 | int i; |
1940 | 1941 | ||
1941 | for_each_set_bit(i, (unsigned long *)&init, 64) { | 1942 | for_each_set_bit(i, (unsigned long *)&init, 64) { |
1942 | flags |= (1ULL << i); | 1943 | flags |= (1ULL << i); |
1943 | wrmsrl(msr, flags); | 1944 | wrmsrl(msr, flags); |
1944 | } | 1945 | } |
1945 | } | 1946 | } |
1946 | } | 1947 | } |
1947 | 1948 | ||
1948 | static struct intel_uncore_ops hswep_uncore_sbox_msr_ops = { | 1949 | static struct intel_uncore_ops hswep_uncore_sbox_msr_ops = { |
1949 | __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | 1950 | __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), |
1950 | .init_box = hswep_uncore_sbox_msr_init_box | 1951 | .init_box = hswep_uncore_sbox_msr_init_box |
1951 | }; | 1952 | }; |
1952 | 1953 | ||
1953 | static struct attribute *hswep_uncore_sbox_formats_attr[] = { | 1954 | static struct attribute *hswep_uncore_sbox_formats_attr[] = { |
1954 | &format_attr_event.attr, | 1955 | &format_attr_event.attr, |
1955 | &format_attr_umask.attr, | 1956 | &format_attr_umask.attr, |
1956 | &format_attr_edge.attr, | 1957 | &format_attr_edge.attr, |
1957 | &format_attr_tid_en.attr, | 1958 | &format_attr_tid_en.attr, |
1958 | &format_attr_inv.attr, | 1959 | &format_attr_inv.attr, |
1959 | &format_attr_thresh8.attr, | 1960 | &format_attr_thresh8.attr, |
1960 | NULL, | 1961 | NULL, |
1961 | }; | 1962 | }; |
1962 | 1963 | ||
1963 | static struct attribute_group hswep_uncore_sbox_format_group = { | 1964 | static struct attribute_group hswep_uncore_sbox_format_group = { |
1964 | .name = "format", | 1965 | .name = "format", |
1965 | .attrs = hswep_uncore_sbox_formats_attr, | 1966 | .attrs = hswep_uncore_sbox_formats_attr, |
1966 | }; | 1967 | }; |
1967 | 1968 | ||
1968 | static struct intel_uncore_type hswep_uncore_sbox = { | 1969 | static struct intel_uncore_type hswep_uncore_sbox = { |
1969 | .name = "sbox", | 1970 | .name = "sbox", |
1970 | .num_counters = 4, | 1971 | .num_counters = 4, |
1971 | .num_boxes = 4, | 1972 | .num_boxes = 4, |
1972 | .perf_ctr_bits = 44, | 1973 | .perf_ctr_bits = 44, |
1973 | .event_ctl = HSWEP_S0_MSR_PMON_CTL0, | 1974 | .event_ctl = HSWEP_S0_MSR_PMON_CTL0, |
1974 | .perf_ctr = HSWEP_S0_MSR_PMON_CTR0, | 1975 | .perf_ctr = HSWEP_S0_MSR_PMON_CTR0, |
1975 | .event_mask = HSWEP_S_MSR_PMON_RAW_EVENT_MASK, | 1976 | .event_mask = HSWEP_S_MSR_PMON_RAW_EVENT_MASK, |
1976 | .box_ctl = HSWEP_S0_MSR_PMON_BOX_CTL, | 1977 | .box_ctl = HSWEP_S0_MSR_PMON_BOX_CTL, |
1977 | .msr_offset = HSWEP_SBOX_MSR_OFFSET, | 1978 | .msr_offset = HSWEP_SBOX_MSR_OFFSET, |
1978 | .ops = &hswep_uncore_sbox_msr_ops, | 1979 | .ops = &hswep_uncore_sbox_msr_ops, |
1979 | .format_group = &hswep_uncore_sbox_format_group, | 1980 | .format_group = &hswep_uncore_sbox_format_group, |
1980 | }; | 1981 | }; |
1981 | 1982 | ||
1982 | static int hswep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 1983 | static int hswep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event) |
1983 | { | 1984 | { |
1984 | struct hw_perf_event *hwc = &event->hw; | 1985 | struct hw_perf_event *hwc = &event->hw; |
1985 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 1986 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; |
1986 | int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK; | 1987 | int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK; |
1987 | 1988 | ||
1988 | if (ev_sel >= 0xb && ev_sel <= 0xe) { | 1989 | if (ev_sel >= 0xb && ev_sel <= 0xe) { |
1989 | reg1->reg = HSWEP_PCU_MSR_PMON_BOX_FILTER; | 1990 | reg1->reg = HSWEP_PCU_MSR_PMON_BOX_FILTER; |
1990 | reg1->idx = ev_sel - 0xb; | 1991 | reg1->idx = ev_sel - 0xb; |
1991 | reg1->config = event->attr.config1 & (0xff << reg1->idx); | 1992 | reg1->config = event->attr.config1 & (0xff << reg1->idx); |
1992 | } | 1993 | } |
1993 | return 0; | 1994 | return 0; |
1994 | } | 1995 | } |
1995 | 1996 | ||
1996 | static struct intel_uncore_ops hswep_uncore_pcu_ops = { | 1997 | static struct intel_uncore_ops hswep_uncore_pcu_ops = { |
1997 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), | 1998 | SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), |
1998 | .hw_config = hswep_pcu_hw_config, | 1999 | .hw_config = hswep_pcu_hw_config, |
1999 | .get_constraint = snbep_pcu_get_constraint, | 2000 | .get_constraint = snbep_pcu_get_constraint, |
2000 | .put_constraint = snbep_pcu_put_constraint, | 2001 | .put_constraint = snbep_pcu_put_constraint, |
2001 | }; | 2002 | }; |
2002 | 2003 | ||
2003 | static struct intel_uncore_type hswep_uncore_pcu = { | 2004 | static struct intel_uncore_type hswep_uncore_pcu = { |
2004 | .name = "pcu", | 2005 | .name = "pcu", |
2005 | .num_counters = 4, | 2006 | .num_counters = 4, |
2006 | .num_boxes = 1, | 2007 | .num_boxes = 1, |
2007 | .perf_ctr_bits = 48, | 2008 | .perf_ctr_bits = 48, |
2008 | .perf_ctr = HSWEP_PCU_MSR_PMON_CTR0, | 2009 | .perf_ctr = HSWEP_PCU_MSR_PMON_CTR0, |
2009 | .event_ctl = HSWEP_PCU_MSR_PMON_CTL0, | 2010 | .event_ctl = HSWEP_PCU_MSR_PMON_CTL0, |
2010 | .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK, | 2011 | .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK, |
2011 | .box_ctl = HSWEP_PCU_MSR_PMON_BOX_CTL, | 2012 | .box_ctl = HSWEP_PCU_MSR_PMON_BOX_CTL, |
2012 | .num_shared_regs = 1, | 2013 | .num_shared_regs = 1, |
2013 | .ops = &hswep_uncore_pcu_ops, | 2014 | .ops = &hswep_uncore_pcu_ops, |
2014 | .format_group = &snbep_uncore_pcu_format_group, | 2015 | .format_group = &snbep_uncore_pcu_format_group, |
2015 | }; | 2016 | }; |
2016 | 2017 | ||
2017 | static struct intel_uncore_type *hswep_msr_uncores[] = { | 2018 | static struct intel_uncore_type *hswep_msr_uncores[] = { |
2018 | &hswep_uncore_ubox, | 2019 | &hswep_uncore_ubox, |
2019 | &hswep_uncore_cbox, | 2020 | &hswep_uncore_cbox, |
2020 | &hswep_uncore_sbox, | 2021 | &hswep_uncore_sbox, |
2021 | &hswep_uncore_pcu, | 2022 | &hswep_uncore_pcu, |
2022 | NULL, | 2023 | NULL, |
2023 | }; | 2024 | }; |
2024 | 2025 | ||
2025 | void hswep_uncore_cpu_init(void) | 2026 | void hswep_uncore_cpu_init(void) |
2026 | { | 2027 | { |
2027 | if (hswep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) | 2028 | if (hswep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores) |
2028 | hswep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; | 2029 | hswep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores; |
2030 | |||
2031 | /* Detect 6-8 core systems with only two SBOXes */ | ||
2032 | if (uncore_extra_pci_dev[0][HSWEP_PCI_PCU_3]) { | ||
2033 | u32 capid4; | ||
2034 | |||
2035 | pci_read_config_dword(uncore_extra_pci_dev[0][HSWEP_PCI_PCU_3], | ||
2036 | 0x94, &capid4); | ||
2037 | if (((capid4 >> 6) & 0x3) == 0) | ||
2038 | hswep_uncore_sbox.num_boxes = 2; | ||
2039 | } | ||
2040 | |||
2029 | uncore_msr_uncores = hswep_msr_uncores; | 2041 | uncore_msr_uncores = hswep_msr_uncores; |
2030 | } | 2042 | } |
2031 | 2043 | ||
2032 | static struct intel_uncore_type hswep_uncore_ha = { | 2044 | static struct intel_uncore_type hswep_uncore_ha = { |
2033 | .name = "ha", | 2045 | .name = "ha", |
2034 | .num_counters = 5, | 2046 | .num_counters = 5, |
2035 | .num_boxes = 2, | 2047 | .num_boxes = 2, |
2036 | .perf_ctr_bits = 48, | 2048 | .perf_ctr_bits = 48, |
2037 | SNBEP_UNCORE_PCI_COMMON_INIT(), | 2049 | SNBEP_UNCORE_PCI_COMMON_INIT(), |
2038 | }; | 2050 | }; |
2039 | 2051 | ||
2040 | static struct uncore_event_desc hswep_uncore_imc_events[] = { | 2052 | static struct uncore_event_desc hswep_uncore_imc_events[] = { |
2041 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x00,umask=0x00"), | 2053 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x00,umask=0x00"), |
2042 | INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"), | 2054 | INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"), |
2043 | INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"), | 2055 | INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"), |
2044 | INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"), | 2056 | INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"), |
2045 | INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"), | 2057 | INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"), |
2046 | INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"), | 2058 | INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"), |
2047 | INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"), | 2059 | INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"), |
2048 | { /* end: all zeroes */ }, | 2060 | { /* end: all zeroes */ }, |
2049 | }; | 2061 | }; |
2050 | 2062 | ||
2051 | static struct intel_uncore_type hswep_uncore_imc = { | 2063 | static struct intel_uncore_type hswep_uncore_imc = { |
2052 | .name = "imc", | 2064 | .name = "imc", |
2053 | .num_counters = 5, | 2065 | .num_counters = 5, |
2054 | .num_boxes = 8, | 2066 | .num_boxes = 8, |
2055 | .perf_ctr_bits = 48, | 2067 | .perf_ctr_bits = 48, |
2056 | .fixed_ctr_bits = 48, | 2068 | .fixed_ctr_bits = 48, |
2057 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, | 2069 | .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR, |
2058 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, | 2070 | .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL, |
2059 | .event_descs = hswep_uncore_imc_events, | 2071 | .event_descs = hswep_uncore_imc_events, |
2060 | SNBEP_UNCORE_PCI_COMMON_INIT(), | 2072 | SNBEP_UNCORE_PCI_COMMON_INIT(), |
2061 | }; | 2073 | }; |
2062 | 2074 | ||
2063 | static unsigned hswep_uncore_irp_ctrs[] = {0xa0, 0xa8, 0xb0, 0xb8}; | 2075 | static unsigned hswep_uncore_irp_ctrs[] = {0xa0, 0xa8, 0xb0, 0xb8}; |
2064 | 2076 | ||
2065 | static u64 hswep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event) | 2077 | static u64 hswep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event) |
2066 | { | 2078 | { |
2067 | struct pci_dev *pdev = box->pci_dev; | 2079 | struct pci_dev *pdev = box->pci_dev; |
2068 | struct hw_perf_event *hwc = &event->hw; | 2080 | struct hw_perf_event *hwc = &event->hw; |
2069 | u64 count = 0; | 2081 | u64 count = 0; |
2070 | 2082 | ||
2071 | pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx], (u32 *)&count); | 2083 | pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx], (u32 *)&count); |
2072 | pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1); | 2084 | pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1); |
2073 | 2085 | ||
2074 | return count; | 2086 | return count; |
2075 | } | 2087 | } |
2076 | 2088 | ||
2077 | static struct intel_uncore_ops hswep_uncore_irp_ops = { | 2089 | static struct intel_uncore_ops hswep_uncore_irp_ops = { |
2078 | .init_box = snbep_uncore_pci_init_box, | 2090 | .init_box = snbep_uncore_pci_init_box, |
2079 | .disable_box = snbep_uncore_pci_disable_box, | 2091 | .disable_box = snbep_uncore_pci_disable_box, |
2080 | .enable_box = snbep_uncore_pci_enable_box, | 2092 | .enable_box = snbep_uncore_pci_enable_box, |
2081 | .disable_event = ivbep_uncore_irp_disable_event, | 2093 | .disable_event = ivbep_uncore_irp_disable_event, |
2082 | .enable_event = ivbep_uncore_irp_enable_event, | 2094 | .enable_event = ivbep_uncore_irp_enable_event, |
2083 | .read_counter = hswep_uncore_irp_read_counter, | 2095 | .read_counter = hswep_uncore_irp_read_counter, |
2084 | }; | 2096 | }; |
2085 | 2097 | ||
2086 | static struct intel_uncore_type hswep_uncore_irp = { | 2098 | static struct intel_uncore_type hswep_uncore_irp = { |
2087 | .name = "irp", | 2099 | .name = "irp", |
2088 | .num_counters = 4, | 2100 | .num_counters = 4, |
2089 | .num_boxes = 1, | 2101 | .num_boxes = 1, |
2090 | .perf_ctr_bits = 48, | 2102 | .perf_ctr_bits = 48, |
2091 | .event_mask = SNBEP_PMON_RAW_EVENT_MASK, | 2103 | .event_mask = SNBEP_PMON_RAW_EVENT_MASK, |
2092 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, | 2104 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, |
2093 | .ops = &hswep_uncore_irp_ops, | 2105 | .ops = &hswep_uncore_irp_ops, |
2094 | .format_group = &snbep_uncore_format_group, | 2106 | .format_group = &snbep_uncore_format_group, |
2095 | }; | 2107 | }; |
2096 | 2108 | ||
2097 | static struct intel_uncore_type hswep_uncore_qpi = { | 2109 | static struct intel_uncore_type hswep_uncore_qpi = { |
2098 | .name = "qpi", | 2110 | .name = "qpi", |
2099 | .num_counters = 5, | 2111 | .num_counters = 5, |
2100 | .num_boxes = 3, | 2112 | .num_boxes = 3, |
2101 | .perf_ctr_bits = 48, | 2113 | .perf_ctr_bits = 48, |
2102 | .perf_ctr = SNBEP_PCI_PMON_CTR0, | 2114 | .perf_ctr = SNBEP_PCI_PMON_CTR0, |
2103 | .event_ctl = SNBEP_PCI_PMON_CTL0, | 2115 | .event_ctl = SNBEP_PCI_PMON_CTL0, |
2104 | .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK, | 2116 | .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK, |
2105 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, | 2117 | .box_ctl = SNBEP_PCI_PMON_BOX_CTL, |
2106 | .num_shared_regs = 1, | 2118 | .num_shared_regs = 1, |
2107 | .ops = &snbep_uncore_qpi_ops, | 2119 | .ops = &snbep_uncore_qpi_ops, |
2108 | .format_group = &snbep_uncore_qpi_format_group, | 2120 | .format_group = &snbep_uncore_qpi_format_group, |
2109 | }; | 2121 | }; |
2110 | 2122 | ||
2111 | static struct event_constraint hswep_uncore_r2pcie_constraints[] = { | 2123 | static struct event_constraint hswep_uncore_r2pcie_constraints[] = { |
2112 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), | 2124 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), |
2113 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), | 2125 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), |
2114 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), | 2126 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), |
2115 | UNCORE_EVENT_CONSTRAINT(0x23, 0x1), | 2127 | UNCORE_EVENT_CONSTRAINT(0x23, 0x1), |
2116 | UNCORE_EVENT_CONSTRAINT(0x24, 0x1), | 2128 | UNCORE_EVENT_CONSTRAINT(0x24, 0x1), |
2117 | UNCORE_EVENT_CONSTRAINT(0x25, 0x1), | 2129 | UNCORE_EVENT_CONSTRAINT(0x25, 0x1), |
2118 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), | 2130 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), |
2119 | UNCORE_EVENT_CONSTRAINT(0x27, 0x1), | 2131 | UNCORE_EVENT_CONSTRAINT(0x27, 0x1), |
2120 | UNCORE_EVENT_CONSTRAINT(0x28, 0x3), | 2132 | UNCORE_EVENT_CONSTRAINT(0x28, 0x3), |
2121 | UNCORE_EVENT_CONSTRAINT(0x29, 0x3), | 2133 | UNCORE_EVENT_CONSTRAINT(0x29, 0x3), |
2122 | UNCORE_EVENT_CONSTRAINT(0x2a, 0x1), | 2134 | UNCORE_EVENT_CONSTRAINT(0x2a, 0x1), |
2123 | UNCORE_EVENT_CONSTRAINT(0x2b, 0x3), | 2135 | UNCORE_EVENT_CONSTRAINT(0x2b, 0x3), |
2124 | UNCORE_EVENT_CONSTRAINT(0x2c, 0x3), | 2136 | UNCORE_EVENT_CONSTRAINT(0x2c, 0x3), |
2125 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), | 2137 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), |
2126 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | 2138 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), |
2127 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | 2139 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), |
2128 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | 2140 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), |
2129 | UNCORE_EVENT_CONSTRAINT(0x35, 0x3), | 2141 | UNCORE_EVENT_CONSTRAINT(0x35, 0x3), |
2130 | EVENT_CONSTRAINT_END | 2142 | EVENT_CONSTRAINT_END |
2131 | }; | 2143 | }; |
2132 | 2144 | ||
2133 | static struct intel_uncore_type hswep_uncore_r2pcie = { | 2145 | static struct intel_uncore_type hswep_uncore_r2pcie = { |
2134 | .name = "r2pcie", | 2146 | .name = "r2pcie", |
2135 | .num_counters = 4, | 2147 | .num_counters = 4, |
2136 | .num_boxes = 1, | 2148 | .num_boxes = 1, |
2137 | .perf_ctr_bits = 48, | 2149 | .perf_ctr_bits = 48, |
2138 | .constraints = hswep_uncore_r2pcie_constraints, | 2150 | .constraints = hswep_uncore_r2pcie_constraints, |
2139 | SNBEP_UNCORE_PCI_COMMON_INIT(), | 2151 | SNBEP_UNCORE_PCI_COMMON_INIT(), |
2140 | }; | 2152 | }; |
2141 | 2153 | ||
2142 | static struct event_constraint hswep_uncore_r3qpi_constraints[] = { | 2154 | static struct event_constraint hswep_uncore_r3qpi_constraints[] = { |
2143 | UNCORE_EVENT_CONSTRAINT(0x01, 0x3), | 2155 | UNCORE_EVENT_CONSTRAINT(0x01, 0x3), |
2144 | UNCORE_EVENT_CONSTRAINT(0x07, 0x7), | 2156 | UNCORE_EVENT_CONSTRAINT(0x07, 0x7), |
2145 | UNCORE_EVENT_CONSTRAINT(0x08, 0x7), | 2157 | UNCORE_EVENT_CONSTRAINT(0x08, 0x7), |
2146 | UNCORE_EVENT_CONSTRAINT(0x09, 0x7), | 2158 | UNCORE_EVENT_CONSTRAINT(0x09, 0x7), |
2147 | UNCORE_EVENT_CONSTRAINT(0x0a, 0x7), | 2159 | UNCORE_EVENT_CONSTRAINT(0x0a, 0x7), |
2148 | UNCORE_EVENT_CONSTRAINT(0x0e, 0x7), | 2160 | UNCORE_EVENT_CONSTRAINT(0x0e, 0x7), |
2149 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), | 2161 | UNCORE_EVENT_CONSTRAINT(0x10, 0x3), |
2150 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), | 2162 | UNCORE_EVENT_CONSTRAINT(0x11, 0x3), |
2151 | UNCORE_EVENT_CONSTRAINT(0x12, 0x3), | 2163 | UNCORE_EVENT_CONSTRAINT(0x12, 0x3), |
2152 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), | 2164 | UNCORE_EVENT_CONSTRAINT(0x13, 0x1), |
2153 | UNCORE_EVENT_CONSTRAINT(0x14, 0x3), | 2165 | UNCORE_EVENT_CONSTRAINT(0x14, 0x3), |
2154 | UNCORE_EVENT_CONSTRAINT(0x15, 0x3), | 2166 | UNCORE_EVENT_CONSTRAINT(0x15, 0x3), |
2155 | UNCORE_EVENT_CONSTRAINT(0x1f, 0x3), | 2167 | UNCORE_EVENT_CONSTRAINT(0x1f, 0x3), |
2156 | UNCORE_EVENT_CONSTRAINT(0x20, 0x3), | 2168 | UNCORE_EVENT_CONSTRAINT(0x20, 0x3), |
2157 | UNCORE_EVENT_CONSTRAINT(0x21, 0x3), | 2169 | UNCORE_EVENT_CONSTRAINT(0x21, 0x3), |
2158 | UNCORE_EVENT_CONSTRAINT(0x22, 0x3), | 2170 | UNCORE_EVENT_CONSTRAINT(0x22, 0x3), |
2159 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), | 2171 | UNCORE_EVENT_CONSTRAINT(0x23, 0x3), |
2160 | UNCORE_EVENT_CONSTRAINT(0x25, 0x3), | 2172 | UNCORE_EVENT_CONSTRAINT(0x25, 0x3), |
2161 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), | 2173 | UNCORE_EVENT_CONSTRAINT(0x26, 0x3), |
2162 | UNCORE_EVENT_CONSTRAINT(0x28, 0x3), | 2174 | UNCORE_EVENT_CONSTRAINT(0x28, 0x3), |
2163 | UNCORE_EVENT_CONSTRAINT(0x29, 0x3), | 2175 | UNCORE_EVENT_CONSTRAINT(0x29, 0x3), |
2164 | UNCORE_EVENT_CONSTRAINT(0x2c, 0x3), | 2176 | UNCORE_EVENT_CONSTRAINT(0x2c, 0x3), |
2165 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), | 2177 | UNCORE_EVENT_CONSTRAINT(0x2d, 0x3), |
2166 | UNCORE_EVENT_CONSTRAINT(0x2e, 0x3), | 2178 | UNCORE_EVENT_CONSTRAINT(0x2e, 0x3), |
2167 | UNCORE_EVENT_CONSTRAINT(0x2f, 0x3), | 2179 | UNCORE_EVENT_CONSTRAINT(0x2f, 0x3), |
2168 | UNCORE_EVENT_CONSTRAINT(0x31, 0x3), | 2180 | UNCORE_EVENT_CONSTRAINT(0x31, 0x3), |
2169 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), | 2181 | UNCORE_EVENT_CONSTRAINT(0x32, 0x3), |
2170 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), | 2182 | UNCORE_EVENT_CONSTRAINT(0x33, 0x3), |
2171 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), | 2183 | UNCORE_EVENT_CONSTRAINT(0x34, 0x3), |
2172 | UNCORE_EVENT_CONSTRAINT(0x36, 0x3), | 2184 | UNCORE_EVENT_CONSTRAINT(0x36, 0x3), |
2173 | UNCORE_EVENT_CONSTRAINT(0x37, 0x3), | 2185 | UNCORE_EVENT_CONSTRAINT(0x37, 0x3), |
2174 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), | 2186 | UNCORE_EVENT_CONSTRAINT(0x38, 0x3), |
2175 | UNCORE_EVENT_CONSTRAINT(0x39, 0x3), | 2187 | UNCORE_EVENT_CONSTRAINT(0x39, 0x3), |
2176 | EVENT_CONSTRAINT_END | 2188 | EVENT_CONSTRAINT_END |
2177 | }; | 2189 | }; |
2178 | 2190 | ||
2179 | static struct intel_uncore_type hswep_uncore_r3qpi = { | 2191 | static struct intel_uncore_type hswep_uncore_r3qpi = { |
2180 | .name = "r3qpi", | 2192 | .name = "r3qpi", |
2181 | .num_counters = 4, | 2193 | .num_counters = 4, |
2182 | .num_boxes = 3, | 2194 | .num_boxes = 3, |
2183 | .perf_ctr_bits = 44, | 2195 | .perf_ctr_bits = 44, |
2184 | .constraints = hswep_uncore_r3qpi_constraints, | 2196 | .constraints = hswep_uncore_r3qpi_constraints, |
2185 | SNBEP_UNCORE_PCI_COMMON_INIT(), | 2197 | SNBEP_UNCORE_PCI_COMMON_INIT(), |
2186 | }; | 2198 | }; |
2187 | 2199 | ||
2188 | enum { | 2200 | enum { |
2189 | HSWEP_PCI_UNCORE_HA, | 2201 | HSWEP_PCI_UNCORE_HA, |
2190 | HSWEP_PCI_UNCORE_IMC, | 2202 | HSWEP_PCI_UNCORE_IMC, |
2191 | HSWEP_PCI_UNCORE_IRP, | 2203 | HSWEP_PCI_UNCORE_IRP, |
2192 | HSWEP_PCI_UNCORE_QPI, | 2204 | HSWEP_PCI_UNCORE_QPI, |
2193 | HSWEP_PCI_UNCORE_R2PCIE, | 2205 | HSWEP_PCI_UNCORE_R2PCIE, |
2194 | HSWEP_PCI_UNCORE_R3QPI, | 2206 | HSWEP_PCI_UNCORE_R3QPI, |
2195 | }; | 2207 | }; |
2196 | 2208 | ||
2197 | static struct intel_uncore_type *hswep_pci_uncores[] = { | 2209 | static struct intel_uncore_type *hswep_pci_uncores[] = { |
2198 | [HSWEP_PCI_UNCORE_HA] = &hswep_uncore_ha, | 2210 | [HSWEP_PCI_UNCORE_HA] = &hswep_uncore_ha, |
2199 | [HSWEP_PCI_UNCORE_IMC] = &hswep_uncore_imc, | 2211 | [HSWEP_PCI_UNCORE_IMC] = &hswep_uncore_imc, |
2200 | [HSWEP_PCI_UNCORE_IRP] = &hswep_uncore_irp, | 2212 | [HSWEP_PCI_UNCORE_IRP] = &hswep_uncore_irp, |
2201 | [HSWEP_PCI_UNCORE_QPI] = &hswep_uncore_qpi, | 2213 | [HSWEP_PCI_UNCORE_QPI] = &hswep_uncore_qpi, |
2202 | [HSWEP_PCI_UNCORE_R2PCIE] = &hswep_uncore_r2pcie, | 2214 | [HSWEP_PCI_UNCORE_R2PCIE] = &hswep_uncore_r2pcie, |
2203 | [HSWEP_PCI_UNCORE_R3QPI] = &hswep_uncore_r3qpi, | 2215 | [HSWEP_PCI_UNCORE_R3QPI] = &hswep_uncore_r3qpi, |
2204 | NULL, | 2216 | NULL, |
2205 | }; | 2217 | }; |
2206 | 2218 | ||
2207 | static DEFINE_PCI_DEVICE_TABLE(hswep_uncore_pci_ids) = { | 2219 | static DEFINE_PCI_DEVICE_TABLE(hswep_uncore_pci_ids) = { |
2208 | { /* Home Agent 0 */ | 2220 | { /* Home Agent 0 */ |
2209 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f30), | 2221 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f30), |
2210 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 0), | 2222 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 0), |
2211 | }, | 2223 | }, |
2212 | { /* Home Agent 1 */ | 2224 | { /* Home Agent 1 */ |
2213 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f38), | 2225 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f38), |
2214 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 1), | 2226 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 1), |
2215 | }, | 2227 | }, |
2216 | { /* MC0 Channel 0 */ | 2228 | { /* MC0 Channel 0 */ |
2217 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb0), | 2229 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb0), |
2218 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 0), | 2230 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 0), |
2219 | }, | 2231 | }, |
2220 | { /* MC0 Channel 1 */ | 2232 | { /* MC0 Channel 1 */ |
2221 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb1), | 2233 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb1), |
2222 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 1), | 2234 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 1), |
2223 | }, | 2235 | }, |
2224 | { /* MC0 Channel 2 */ | 2236 | { /* MC0 Channel 2 */ |
2225 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb4), | 2237 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb4), |
2226 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 2), | 2238 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 2), |
2227 | }, | 2239 | }, |
2228 | { /* MC0 Channel 3 */ | 2240 | { /* MC0 Channel 3 */ |
2229 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb5), | 2241 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb5), |
2230 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 3), | 2242 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 3), |
2231 | }, | 2243 | }, |
2232 | { /* MC1 Channel 0 */ | 2244 | { /* MC1 Channel 0 */ |
2233 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd0), | 2245 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd0), |
2234 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 4), | 2246 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 4), |
2235 | }, | 2247 | }, |
2236 | { /* MC1 Channel 1 */ | 2248 | { /* MC1 Channel 1 */ |
2237 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd1), | 2249 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd1), |
2238 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 5), | 2250 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 5), |
2239 | }, | 2251 | }, |
2240 | { /* MC1 Channel 2 */ | 2252 | { /* MC1 Channel 2 */ |
2241 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd4), | 2253 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd4), |
2242 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 6), | 2254 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 6), |
2243 | }, | 2255 | }, |
2244 | { /* MC1 Channel 3 */ | 2256 | { /* MC1 Channel 3 */ |
2245 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd5), | 2257 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd5), |
2246 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 7), | 2258 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 7), |
2247 | }, | 2259 | }, |
2248 | { /* IRP */ | 2260 | { /* IRP */ |
2249 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f39), | 2261 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f39), |
2250 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IRP, 0), | 2262 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IRP, 0), |
2251 | }, | 2263 | }, |
2252 | { /* QPI0 Port 0 */ | 2264 | { /* QPI0 Port 0 */ |
2253 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f32), | 2265 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f32), |
2254 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 0), | 2266 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 0), |
2255 | }, | 2267 | }, |
2256 | { /* QPI0 Port 1 */ | 2268 | { /* QPI0 Port 1 */ |
2257 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f33), | 2269 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f33), |
2258 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 1), | 2270 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 1), |
2259 | }, | 2271 | }, |
2260 | { /* QPI1 Port 2 */ | 2272 | { /* QPI1 Port 2 */ |
2261 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3a), | 2273 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3a), |
2262 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 2), | 2274 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 2), |
2263 | }, | 2275 | }, |
2264 | { /* R2PCIe */ | 2276 | { /* R2PCIe */ |
2265 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f34), | 2277 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f34), |
2266 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R2PCIE, 0), | 2278 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R2PCIE, 0), |
2267 | }, | 2279 | }, |
2268 | { /* R3QPI0 Link 0 */ | 2280 | { /* R3QPI0 Link 0 */ |
2269 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f36), | 2281 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f36), |
2270 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 0), | 2282 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 0), |
2271 | }, | 2283 | }, |
2272 | { /* R3QPI0 Link 1 */ | 2284 | { /* R3QPI0 Link 1 */ |
2273 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f37), | 2285 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f37), |
2274 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 1), | 2286 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 1), |
2275 | }, | 2287 | }, |
2276 | { /* R3QPI1 Link 2 */ | 2288 | { /* R3QPI1 Link 2 */ |
2277 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3e), | 2289 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3e), |
2278 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 2), | 2290 | .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 2), |
2279 | }, | 2291 | }, |
2280 | { /* QPI Port 0 filter */ | 2292 | { /* QPI Port 0 filter */ |
2281 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f86), | 2293 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f86), |
2282 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | 2294 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, |
2283 | SNBEP_PCI_QPI_PORT0_FILTER), | 2295 | SNBEP_PCI_QPI_PORT0_FILTER), |
2284 | }, | 2296 | }, |
2285 | { /* QPI Port 1 filter */ | 2297 | { /* QPI Port 1 filter */ |
2286 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f96), | 2298 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f96), |
2287 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | 2299 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, |
2288 | SNBEP_PCI_QPI_PORT1_FILTER), | 2300 | SNBEP_PCI_QPI_PORT1_FILTER), |
2301 | }, | ||
2302 | { /* PCU.3 (for Capability registers) */ | ||
2303 | PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fc0), | ||
2304 | .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV, | ||
2305 | HSWEP_PCI_PCU_3), | ||
2289 | }, | 2306 | }, |
2290 | { /* end: all zeroes */ } | 2307 | { /* end: all zeroes */ } |
2291 | }; | 2308 | }; |
2292 | 2309 | ||
2293 | static struct pci_driver hswep_uncore_pci_driver = { | 2310 | static struct pci_driver hswep_uncore_pci_driver = { |
2294 | .name = "hswep_uncore", | 2311 | .name = "hswep_uncore", |
2295 | .id_table = hswep_uncore_pci_ids, | 2312 | .id_table = hswep_uncore_pci_ids, |
2296 | }; | 2313 | }; |
2297 | 2314 | ||
2298 | int hswep_uncore_pci_init(void) | 2315 | int hswep_uncore_pci_init(void) |
2299 | { | 2316 | { |
2300 | int ret = snbep_pci2phy_map_init(0x2f1e); | 2317 | int ret = snbep_pci2phy_map_init(0x2f1e); |
2301 | if (ret) | 2318 | if (ret) |
2302 | return ret; | 2319 | return ret; |
2303 | uncore_pci_uncores = hswep_pci_uncores; | 2320 | uncore_pci_uncores = hswep_pci_uncores; |
2304 | uncore_pci_driver = &hswep_uncore_pci_driver; | 2321 | uncore_pci_driver = &hswep_uncore_pci_driver; |
2305 | return 0; | 2322 | return 0; |
2306 | } | 2323 | } |
2307 | /* end of Haswell-EP uncore support */ | 2324 | /* end of Haswell-EP uncore support */ |
2308 | 2325 |