Blame view

arch/sparc/kernel/spiterrs.S 6.99 KB
6eda3a759   David S. Miller   sparc64: Split en...
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
  	/* We need to carefully read the error status, ACK the errors,
  	 * prevent recursive traps, and pass the information on to C
  	 * code for logging.
  	 *
  	 * We pass the AFAR in as-is, and we encode the status
  	 * information as described in asm-sparc64/sfafsr.h
  	 */
  	.type		__spitfire_access_error,#function
  __spitfire_access_error:
  	/* Disable ESTATE error reporting so that we do not take
  	 * recursive traps and RED state the processor.
  	 */
  	stxa		%g0, [%g0] ASI_ESTATE_ERROR_EN
  	membar		#Sync
  
  	mov		UDBE_UE, %g1
  	ldxa		[%g0] ASI_AFSR, %g4	! Get AFSR
  
  	/* __spitfire_cee_trap branches here with AFSR in %g4 and
  	 * UDBE_CE in %g1.  It only clears ESTATE_ERR_CE in the ESTATE
  	 * Error Enable register.
  	 */
  __spitfire_cee_trap_continue:
  	ldxa		[%g0] ASI_AFAR, %g5	! Get AFAR
  
  	rdpr		%tt, %g3
  	and		%g3, 0x1ff, %g3		! Paranoia
  	sllx		%g3, SFSTAT_TRAP_TYPE_SHIFT, %g3
  	or		%g4, %g3, %g4
  	rdpr		%tl, %g3
  	cmp		%g3, 1
  	mov		1, %g3
  	bleu		%xcc, 1f
  	 sllx		%g3, SFSTAT_TL_GT_ONE_SHIFT, %g3
  
  	or		%g4, %g3, %g4
  
  	/* Read in the UDB error register state, clearing the sticky
  	 * error bits as-needed.  We only clear them if the UE bit is
  	 * set.  Likewise, __spitfire_cee_trap below will only do so
  	 * if the CE bit is set.
  	 *
  	 * NOTE: UltraSparc-I/II have high and low UDB error
  	 *       registers, corresponding to the two UDB units
  	 *       present on those chips.  UltraSparc-IIi only
  	 *       has a single UDB, called "SDB" in the manual.
  	 *       For IIi the upper UDB register always reads
  	 *       as zero so for our purposes things will just
  	 *       work with the checks below.
  	 */
  1:	ldxa		[%g0] ASI_UDBH_ERROR_R, %g3
  	and		%g3, 0x3ff, %g7		! Paranoia
  	sllx		%g7, SFSTAT_UDBH_SHIFT, %g7
  	or		%g4, %g7, %g4
  	andcc		%g3, %g1, %g3		! UDBE_UE or UDBE_CE
  	be,pn		%xcc, 1f
  	 nop
  	stxa		%g3, [%g0] ASI_UDB_ERROR_W
  	membar		#Sync
  
  1:	mov		0x18, %g3
  	ldxa		[%g3] ASI_UDBL_ERROR_R, %g3
  	and		%g3, 0x3ff, %g7		! Paranoia
  	sllx		%g7, SFSTAT_UDBL_SHIFT, %g7
  	or		%g4, %g7, %g4
  	andcc		%g3, %g1, %g3		! UDBE_UE or UDBE_CE
  	be,pn		%xcc, 1f
  	 nop
  	mov		0x18, %g7
  	stxa		%g3, [%g7] ASI_UDB_ERROR_W
  	membar		#Sync
  
  1:	/* Ok, now that we've latched the error state, clear the
  	 * sticky bits in the AFSR.
  	 */
  	stxa		%g4, [%g0] ASI_AFSR
  	membar		#Sync
  
  	rdpr		%tl, %g2
  	cmp		%g2, 1
  	rdpr		%pil, %g2
  	bleu,pt		%xcc, 1f
b4f4372f9   David S. Miller   sparc64: Make %pi...
83
  	 wrpr		%g0, PIL_NORMAL_MAX, %pil
6eda3a759   David S. Miller   sparc64: Split en...
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
  
  	ba,pt		%xcc, etraptl1
  	 rd		%pc, %g7
  
  	ba,pt		%xcc, 2f
  	 nop
  
  1:	ba,pt		%xcc, etrap_irq
  	 rd		%pc, %g7
  
  2:
  #ifdef CONFIG_TRACE_IRQFLAGS
  	call	trace_hardirqs_off
  	 nop
  #endif
  	mov		%l4, %o1
  	mov		%l5, %o2
  	call		spitfire_access_error
  	 add		%sp, PTREGS_OFF, %o0
  	ba,pt		%xcc, rtrap
  	 nop
  	.size		__spitfire_access_error,.-__spitfire_access_error
  
  	/* This is the trap handler entry point for ECC correctable
  	 * errors.  They are corrected, but we listen for the trap so
  	 * that the event can be logged.
  	 *
  	 * Disrupting errors are either:
  	 * 1) single-bit ECC errors during UDB reads to system
  	 *    memory
  	 * 2) data parity errors during write-back events
  	 *
  	 * As far as I can make out from the manual, the CEE trap is
  	 * only for correctable errors during memory read accesses by
  	 * the front-end of the processor.
  	 *
  	 * The code below is only for trap level 1 CEE events, as it
  	 * is the only situation where we can safely record and log.
  	 * For trap level >1 we just clear the CE bit in the AFSR and
  	 * return.
  	 *
  	 * This is just like __spiftire_access_error above, but it
  	 * specifically handles correctable errors.  If an
  	 * uncorrectable error is indicated in the AFSR we will branch
  	 * directly above to __spitfire_access_error to handle it
  	 * instead.  Uncorrectable therefore takes priority over
  	 * correctable, and the error logging C code will notice this
  	 * case by inspecting the trap type.
  	 */
  	.type		__spitfire_cee_trap,#function
  __spitfire_cee_trap:
  	ldxa		[%g0] ASI_AFSR, %g4	! Get AFSR
  	mov		1, %g3
  	sllx		%g3, SFAFSR_UE_SHIFT, %g3
  	andcc		%g4, %g3, %g0		! Check for UE
  	bne,pn		%xcc, __spitfire_access_error
  	 nop
  
  	/* Ok, in this case we only have a correctable error.
  	 * Indicate we only wish to capture that state in register
  	 * %g1, and we only disable CE error reporting unlike UE
  	 * handling which disables all errors.
  	 */
  	ldxa		[%g0] ASI_ESTATE_ERROR_EN, %g3
  	andn		%g3, ESTATE_ERR_CE, %g3
  	stxa		%g3, [%g0] ASI_ESTATE_ERROR_EN
  	membar		#Sync
  
  	/* Preserve AFSR in %g4, indicate UDB state to capture in %g1 */
  	ba,pt		%xcc, __spitfire_cee_trap_continue
  	 mov		UDBE_CE, %g1
  	.size		__spitfire_cee_trap,.-__spitfire_cee_trap
  
  	.type		__spitfire_data_access_exception_tl1,#function
  __spitfire_data_access_exception_tl1:
  	rdpr		%pstate, %g4
  	wrpr		%g4, PSTATE_MG|PSTATE_AG, %pstate
  	mov		TLB_SFSR, %g3
  	mov		DMMU_SFAR, %g5
  	ldxa		[%g3] ASI_DMMU, %g4	! Get SFSR
  	ldxa		[%g5] ASI_DMMU, %g5	! Get SFAR
  	stxa		%g0, [%g3] ASI_DMMU	! Clear SFSR.FaultValid bit
  	membar		#Sync
  	rdpr		%tt, %g3
  	cmp		%g3, 0x80		! first win spill/fill trap
  	blu,pn		%xcc, 1f
  	 cmp		%g3, 0xff		! last win spill/fill trap
  	bgu,pn		%xcc, 1f
  	 nop
  	ba,pt		%xcc, winfix_dax
  	 rdpr		%tpc, %g3
  1:	sethi		%hi(109f), %g7
  	ba,pt		%xcc, etraptl1
  109:	 or		%g7, %lo(109b), %g7
  	mov		%l4, %o1
  	mov		%l5, %o2
  	call		spitfire_data_access_exception_tl1
  	 add		%sp, PTREGS_OFF, %o0
  	ba,pt		%xcc, rtrap
  	 nop
  	.size		__spitfire_data_access_exception_tl1,.-__spitfire_data_access_exception_tl1
  
  	.type		__spitfire_data_access_exception,#function
  __spitfire_data_access_exception:
  	rdpr		%pstate, %g4
  	wrpr		%g4, PSTATE_MG|PSTATE_AG, %pstate
  	mov		TLB_SFSR, %g3
  	mov		DMMU_SFAR, %g5
  	ldxa		[%g3] ASI_DMMU, %g4	! Get SFSR
  	ldxa		[%g5] ASI_DMMU, %g5	! Get SFAR
  	stxa		%g0, [%g3] ASI_DMMU	! Clear SFSR.FaultValid bit
  	membar		#Sync
  	sethi		%hi(109f), %g7
  	ba,pt		%xcc, etrap
  109:	 or		%g7, %lo(109b), %g7
  	mov		%l4, %o1
  	mov		%l5, %o2
  	call		spitfire_data_access_exception
  	 add		%sp, PTREGS_OFF, %o0
  	ba,pt		%xcc, rtrap
  	 nop
  	.size		__spitfire_data_access_exception,.-__spitfire_data_access_exception
  
  	.type		__spitfire_insn_access_exception_tl1,#function
  __spitfire_insn_access_exception_tl1:
  	rdpr		%pstate, %g4
  	wrpr		%g4, PSTATE_MG|PSTATE_AG, %pstate
  	mov		TLB_SFSR, %g3
  	ldxa		[%g3] ASI_IMMU, %g4	! Get SFSR
  	rdpr		%tpc, %g5		! IMMU has no SFAR, use TPC
  	stxa		%g0, [%g3] ASI_IMMU	! Clear FaultValid bit
  	membar		#Sync
  	sethi		%hi(109f), %g7
  	ba,pt		%xcc, etraptl1
  109:	 or		%g7, %lo(109b), %g7
  	mov		%l4, %o1
  	mov		%l5, %o2
  	call		spitfire_insn_access_exception_tl1
  	 add		%sp, PTREGS_OFF, %o0
  	ba,pt		%xcc, rtrap
  	 nop
  	.size		__spitfire_insn_access_exception_tl1,.-__spitfire_insn_access_exception_tl1
  
  	.type		__spitfire_insn_access_exception,#function
  __spitfire_insn_access_exception:
  	rdpr		%pstate, %g4
  	wrpr		%g4, PSTATE_MG|PSTATE_AG, %pstate
  	mov		TLB_SFSR, %g3
  	ldxa		[%g3] ASI_IMMU, %g4	! Get SFSR
  	rdpr		%tpc, %g5		! IMMU has no SFAR, use TPC
  	stxa		%g0, [%g3] ASI_IMMU	! Clear FaultValid bit
  	membar		#Sync
  	sethi		%hi(109f), %g7
  	ba,pt		%xcc, etrap
  109:	 or		%g7, %lo(109b), %g7
  	mov		%l4, %o1
  	mov		%l5, %o2
  	call		spitfire_insn_access_exception
  	 add		%sp, PTREGS_OFF, %o0
  	ba,pt		%xcc, rtrap
  	 nop
  	.size		__spitfire_insn_access_exception,.-__spitfire_insn_access_exception